text
stringlengths
2.5k
6.39M
kind
stringclasses
3 values
import * as path from 'path'; import { workspace as Workspace, window as Window, languages as Languages, Uri, TextDocument, CodeActionContext, Diagnostic, ProviderResult, Command, CodeAction, MessageItem, ConfigurationTarget, env as Env, CodeActionKind, WorkspaceConfiguration, NotebookCell, commands, ExtensionContext, StatusBarAlignment, ThemeColor } from 'vscode'; import { LanguageClient, LanguageClientOptions, TransportKind, ErrorHandler, ErrorHandlerResult, CloseAction, CloseHandlerResult, RevealOutputChannelOn, ServerOptions, DocumentFilter, DidCloseTextDocumentNotification, DidOpenTextDocumentNotification, State, VersionedTextDocumentIdentifier, ExecuteCommandParams, ExecuteCommandRequest, ConfigurationParams, NotebookDocumentSyncRegistrationType } from 'vscode-languageclient/node'; import { CodeActionsOnSave, LegacyDirectoryItem, Migration, PatternItem, ValidateItem } from './settings'; import { ExitCalled, NoConfigRequest, NoESLintLibraryRequest, OpenESLintDocRequest, ProbeFailedRequest, ShowOutputChannel, Status, StatusNotification, StatusParams } from './shared/customMessages'; import { CodeActionsOnSaveMode, CodeActionsOnSaveRules, ConfigurationSettings, DirectoryItem, ESLintSeverity, ModeItem, RuleCustomization, Validate } from './shared/settings'; import { convert2RegExp, Is, Semaphore, toOSPath, toPosixPath } from './node-utils'; import { pickFolder } from './vscode-utils'; export class Validator { private readonly probeFailed: Set<string> = new Set(); public clear(): void { this.probeFailed.clear(); } public add(uri: Uri): void { this.probeFailed.add(uri.toString()); } public check(textDocument: TextDocument): Validate { const config = Workspace.getConfiguration('eslint', textDocument.uri); if (!config.get('enable', true)) { return Validate.off; } const languageId = textDocument.languageId; const validate = config.get<(ValidateItem | string)[]>('validate'); if (Array.isArray(validate)) { for (const item of validate) { if (Is.string(item) && item === languageId) { return Validate.on; } else if (ValidateItem.is(item) && item.language === languageId) { return Validate.on; } } } const uri: string = textDocument.uri.toString(); if (this.probeFailed.has(uri)) { return Validate.off; } const probe: string[] | undefined = config.get<string[]>('probe'); if (Array.isArray(probe)) { for (const item of probe) { if (item === languageId) { return Validate.probe; } } } return Validate.off; } } type NoESLintState = { global?: boolean; workspaces?: { [key: string]: boolean }; }; export namespace ESLintClient { function migrationFailed(client: LanguageClient, error: any): void { client.error(error.message ?? 'Unknown error', error); void Window.showErrorMessage('ESLint settings migration failed. Please see the ESLint output channel for further details', 'Open Channel').then((selected) => { if (selected === undefined) { return; } client.outputChannel.show(); }); } export async function migrateSettings(client: LanguageClient): Promise<void> { const folders = Workspace.workspaceFolders; if (folders === undefined) { void Window.showErrorMessage('ESLint settings can only be converted if VS Code is opened on a workspace folder.'); return; } const folder = await pickFolder(folders, 'Pick a folder to convert its settings'); if (folder === undefined) { return; } const migration = new Migration(folder.uri); migration.record(); if (migration.needsUpdate()) { try { await migration.update(); } catch (error) { migrationFailed(client, error); } } } export function create(context: ExtensionContext, validator: Validator): LanguageClient { // Filters for client options const packageJsonFilter: DocumentFilter = { scheme: 'file', pattern: '**/package.json' }; const configFileFilter: DocumentFilter = { scheme: 'file', pattern: '**/.eslintr{c.js,c.yaml,c.yml,c,c.json}' }; const supportedQuickFixKinds: Set<string> = new Set([CodeActionKind.Source.value, CodeActionKind.SourceFixAll.value, `${CodeActionKind.SourceFixAll.value}.eslint`, CodeActionKind.QuickFix.value]); // A map of documents synced to the server const syncedDocuments: Map<string, TextDocument> = new Map(); // The actual ESLint client const client: LanguageClient = new LanguageClient('ESLint', createServerOptions(context.extensionUri), createClientOptions()); // The default error handler. const defaultErrorHandler: ErrorHandler = client.createDefaultErrorHandler(); // Whether the server call process.exit() which is intercepted and reported to // the client let serverCalledProcessExit: boolean = false; // A semaphore to ensure we are only running one migration at a time const migrationSemaphore: Semaphore<void> = new Semaphore<void>(1); // The actual migration code if any. let migration: Migration | undefined; // Whether migration should happen now let notNow: boolean = false; // The client's status bar item. const statusBarItem = Window.createStatusBarItem('generalStatus', StatusBarAlignment.Right, 0); let serverRunning: boolean | undefined; const starting = 'ESLint server is starting.'; const running = 'ESLint server is running.'; const stopped = 'ESLint server stopped.'; statusBarItem.name = 'ESLint'; statusBarItem.text = 'ESLint'; statusBarItem.command = 'eslint.showOutputChannel'; const documentStatus: Map<string, Status> = new Map(); // If the workspace configuration changes we need to update the synced documents since the // list of probe language type can change. context.subscriptions.push(Workspace.onDidChangeConfiguration(() => { validator.clear(); for (const textDocument of syncedDocuments.values()) { if (validator.check(textDocument) === Validate.off) { const provider = client.getFeature(DidCloseTextDocumentNotification.method).getProvider(textDocument); provider?.send(textDocument).catch((error) => client.error(`Sending close notification failed.`, error)); } } for (const textDocument of Workspace.textDocuments) { if (!syncedDocuments.has(textDocument.uri.toString()) && validator.check(textDocument) !== Validate.off) { const provider = client.getFeature(DidOpenTextDocumentNotification.method).getProvider(textDocument); provider?.send(textDocument).catch((error) => client.error(`Sending open notification failed.`, error)); } } })); client.onNotification(ShowOutputChannel.type, () => { client.outputChannel.show(); }); client.onNotification(StatusNotification.type, (params) => { updateDocumentStatus(params); }); client.onNotification(ExitCalled.type, (params) => { serverCalledProcessExit = true; client.error(`Server process exited with code ${params[0]}. This usually indicates a misconfigured ESLint setup.`, params[1]); void Window.showErrorMessage(`ESLint server shut down itself. See 'ESLint' output channel for details.`, { title: 'Open Output', id: 1}).then((value) => { if (value !== undefined && value.id === 1) { client.outputChannel.show(); } }); }); client.onRequest(NoConfigRequest.type, (params) => { const document = Uri.parse(params.document.uri); const workspaceFolder = Workspace.getWorkspaceFolder(document); const fileLocation = document.fsPath; if (workspaceFolder) { client.warn([ '', `No ESLint configuration (e.g .eslintrc) found for file: ${fileLocation}`, `File will not be validated. Consider running 'eslint --init' in the workspace folder ${workspaceFolder.name}`, `Alternatively you can disable ESLint by executing the 'Disable ESLint' command.` ].join('\n')); } else { client.warn([ '', `No ESLint configuration (e.g .eslintrc) found for file: ${fileLocation}`, `File will not be validated. Alternatively you can disable ESLint by executing the 'Disable ESLint' command.` ].join('\n')); } updateDocumentStatus({ uri: params.document.uri, state: Status.error }); return {}; }); client.onRequest(NoESLintLibraryRequest.type, (params) => { const key = 'noESLintMessageShown'; const state = context.globalState.get<NoESLintState>(key, {}); const uri: Uri = Uri.parse(params.source.uri); const workspaceFolder = Workspace.getWorkspaceFolder(uri); const packageManager = Workspace.getConfiguration('eslint', uri).get('packageManager', 'npm'); const localInstall = { npm: 'npm install eslint', pnpm: 'pnpm install eslint', yarn: 'yarn add eslint', }; const globalInstall = { npm: 'npm install -g eslint', pnpm: 'pnpm install -g eslint', yarn: 'yarn global add eslint' }; const isPackageManagerNpm = packageManager === 'npm'; interface ButtonItem extends MessageItem { id: number; } const outputItem: ButtonItem = { title: 'Go to output', id: 1 }; if (workspaceFolder) { client.info([ '', `Failed to load the ESLint library for the document ${uri.fsPath}`, '', `To use ESLint please install eslint by running ${localInstall[packageManager]} in the workspace folder ${workspaceFolder.name}`, `or globally using '${globalInstall[packageManager]}'. You need to reopen the workspace after installing eslint.`, '', isPackageManagerNpm ? 'If you are using yarn or pnpm instead of npm set the setting `eslint.packageManager` to either `yarn` or `pnpm`' : null, `Alternatively you can disable ESLint for the workspace folder ${workspaceFolder.name} by executing the 'Disable ESLint' command.` ].filter((str => (str !== null))).join('\n')); if (state.workspaces === undefined) { state.workspaces = {}; } if (!state.workspaces[workspaceFolder.uri.toString()]) { state.workspaces[workspaceFolder.uri.toString()] = true; void context.globalState.update(key, state); void Window.showInformationMessage(`Failed to load the ESLint library for the document ${uri.fsPath}. See the output for more information.`, outputItem).then((item) => { if (item && item.id === 1) { client.outputChannel.show(true); } }); } } else { client.info([ `Failed to load the ESLint library for the document ${uri.fsPath}`, `To use ESLint for single JavaScript file install eslint globally using '${globalInstall[packageManager]}'.`, isPackageManagerNpm ? 'If you are using yarn or pnpm instead of npm set the setting `eslint.packageManager` to either `yarn` or `pnpm`' : null, 'You need to reopen VS Code after installing eslint.', ].filter((str => (str !== null))).join('\n')); if (!state.global) { state.global = true; void context.globalState.update(key, state); void Window.showInformationMessage(`Failed to load the ESLint library for the document ${uri.fsPath}. See the output for more information.`, outputItem).then((item) => { if (item && item.id === 1) { client.outputChannel.show(true); } }); } } return {}; }); client.onRequest(OpenESLintDocRequest.type, async (params) => { await commands.executeCommand('vscode.open', Uri.parse(params.url)); return {}; }); client.onRequest(ProbeFailedRequest.type, (params) => { validator.add(client.protocol2CodeConverter.asUri(params.textDocument.uri)); const closeFeature = client.getFeature(DidCloseTextDocumentNotification.method); for (const document of Workspace.textDocuments) { if (document.uri.toString() === params.textDocument.uri) { closeFeature.getProvider(document)?.send(document).catch((error) => client.error(`Sending close notification failed`, error)); } } }); const notebookFeature = client.getFeature(NotebookDocumentSyncRegistrationType.method); if (notebookFeature !== undefined) { notebookFeature.register({ id: String(Date.now()), registerOptions: { notebookSelector: [{ notebook: { scheme: 'file' } }] } }); } client.onDidChangeState((event) => { if (event.newState === State.Starting) { client.info('ESLint server is starting'); serverRunning = undefined; } else if (event.newState === State.Running) { client.info(running); serverRunning = true; } else { client.info(stopped); serverRunning = false; } updateStatusBar(undefined); }); context.subscriptions.push( Window.onDidChangeActiveTextEditor(() => { updateStatusBar(undefined); }), Workspace.onDidCloseTextDocument((document) => { const uri = document.uri.toString(); documentStatus.delete(uri); updateStatusBar(undefined); }), commands.registerCommand('eslint.executeAutofix', async () => { const textEditor = Window.activeTextEditor; if (!textEditor) { return; } const textDocument: VersionedTextDocumentIdentifier = { uri: textEditor.document.uri.toString(), version: textEditor.document.version }; const params: ExecuteCommandParams = { command: 'eslint.applyAllFixes', arguments: [textDocument] }; await client.start(); client.sendRequest(ExecuteCommandRequest.type, params).then(undefined, () => { void Window.showErrorMessage('Failed to apply ESLint fixes to the document. Please consider opening an issue with steps to reproduce.'); }); }) ); return client; function createServerOptions(extensionUri: Uri): ServerOptions { const serverModule = Uri.joinPath(extensionUri, 'server', 'out', 'eslintServer.js').fsPath; const eslintConfig = Workspace.getConfiguration('eslint'); const debug = sanitize(eslintConfig.get<boolean>('debug', false) ?? false, 'boolean', false); const runtime = sanitize(eslintConfig.get<string | null>('runtime', null) ?? undefined, 'string', undefined); const execArgv = sanitize(eslintConfig.get<string[] | null>('execArgv', null) ?? undefined, 'string', undefined); const nodeEnv = sanitize(eslintConfig.get('nodeEnv', null) ?? undefined, 'string', undefined); let env: { [key: string]: string | number | boolean } | undefined; if (debug) { env = env || {}; env.DEBUG = 'eslint:*,-eslint:code-path'; } if (nodeEnv !== undefined) { env = env || {}; env.NODE_ENV = nodeEnv; } const debugArgv = ['--nolazy', '--inspect=6011']; const result: ServerOptions = { run: { module: serverModule, transport: TransportKind.ipc, runtime, options: { execArgv, cwd: process.cwd(), env } }, debug: { module: serverModule, transport: TransportKind.ipc, runtime, options: { execArgv: execArgv !== undefined ? execArgv.concat(debugArgv) : debugArgv, cwd: process.cwd(), env } } }; return result; } function sanitize<T, D>(value: T, type: 'bigint' | 'boolean' | 'function' | 'number' | 'object' | 'string' | 'symbol' | 'undefined', def: D): T | D { if (Array.isArray(value)) { return value.filter(item => typeof item === type) as unknown as T; } else if (typeof value !== type) { return def; } return value; } function createClientOptions(): LanguageClientOptions { const clientOptions: LanguageClientOptions = { documentSelector: [{ scheme: 'file' }, { scheme: 'untitled' }], diagnosticCollectionName: 'eslint', revealOutputChannelOn: RevealOutputChannelOn.Never, initializationOptions: { }, progressOnInitialization: true, synchronize: { fileEvents: [ Workspace.createFileSystemWatcher('**/.eslintr{c.js,c.yaml,c.yml,c,c.json}'), Workspace.createFileSystemWatcher('**/.eslintignore'), Workspace.createFileSystemWatcher('**/package.json') ] }, initializationFailedHandler: (error) => { client.error('Server initialization failed.', error); client.outputChannel.show(true); return false; }, errorHandler: { error: (error, message, count): ErrorHandlerResult => { return defaultErrorHandler.error(error, message, count); }, closed: (): CloseHandlerResult => { if (serverCalledProcessExit) { return { action: CloseAction.DoNotRestart }; } return defaultErrorHandler.closed(); } }, middleware: { didOpen: async (document, next) => { if (Languages.match(packageJsonFilter, document) || Languages.match(configFileFilter, document) || validator.check(document) !== Validate.off) { const result = next(document); syncedDocuments.set(document.uri.toString(), document); return result; } }, didChange: async (event, next) => { if (syncedDocuments.has(event.document.uri.toString())) { return next(event); } }, willSave: async (event, next) => { if (syncedDocuments.has(event.document.uri.toString())) { return next(event); } }, willSaveWaitUntil: (event, next) => { if (syncedDocuments.has(event.document.uri.toString())) { return next(event); } else { return Promise.resolve([]); } }, didSave: async (document, next) => { if (syncedDocuments.has(document.uri.toString())) { return next(document); } }, didClose: async (document, next) => { const uri = document.uri.toString(); if (syncedDocuments.has(uri)) { syncedDocuments.delete(uri); return next(document); } }, notebooks: { didOpen: (notebookDocument, cells, next) => { const result = next(notebookDocument, cells); for (const cell of cells) { syncedDocuments.set(cell.document.uri.toString(), cell.document); } return result; }, didChange: (event, next) => { if (event.cells?.structure?.didOpen !== undefined) { for (const open of event.cells.structure.didOpen) { syncedDocuments.set(open.document.uri.toString(), open.document); } } if (event.cells?.structure?.didClose !== undefined) { for (const closed of event.cells.structure.didClose) { syncedDocuments.delete(closed.document.uri.toString()); } } return next(event); }, didClose: (document, cells, next) => { for (const cell of cells) { const key = cell.document.uri.toString(); syncedDocuments.delete(key); } return next(document, cells); } }, provideCodeActions: (document, range, context, token, next): ProviderResult<(Command | CodeAction)[]> => { if (!syncedDocuments.has(document.uri.toString())) { return []; } if (context.only !== undefined && !supportedQuickFixKinds.has(context.only.value)) { return []; } if (context.only === undefined && (!context.diagnostics || context.diagnostics.length === 0)) { return []; } const eslintDiagnostics: Diagnostic[] = []; for (const diagnostic of context.diagnostics) { if (diagnostic.source === 'eslint') { eslintDiagnostics.push(diagnostic); } } if (context.only === undefined && eslintDiagnostics.length === 0) { return []; } const newContext: CodeActionContext = Object.assign({}, context, { diagnostics: eslintDiagnostics }); return next(document, range, newContext, token); }, workspace: { didChangeWatchedFile: (event, next) => { validator.clear(); return next(event); }, didChangeConfiguration: async (sections, next) => { if (migration !== undefined && (sections === undefined || sections.length === 0)) { migration.captureDidChangeSetting(() => { return next(sections); }); } else { return next(sections); } }, configuration: (params) => { return readConfiguration(params); } } }, notebookDocumentOptions: { filterCells: (_notebookDocument, cells) => { const result: NotebookCell[] = []; for (const cell of cells) { const document = cell.document; if (Languages.match(packageJsonFilter, document) || Languages.match(configFileFilter, document) || validator.check(document) !== Validate.off) { result.push(cell); } } return result; } } }; return clientOptions; } async function readConfiguration(params: ConfigurationParams): Promise<(ConfigurationSettings | null)[]> { if (params.items === undefined) { return []; } const result: (ConfigurationSettings | null)[] = []; for (const item of params.items) { if (item.section || !item.scopeUri) { result.push(null); continue; } const resource = client.protocol2CodeConverter.asUri(item.scopeUri); const textDocument = getTextDocument(resource); const config = Workspace.getConfiguration('eslint', textDocument ?? resource); const workspaceFolder = resource.scheme === 'untitled' ? Workspace.workspaceFolders !== undefined ? Workspace.workspaceFolders[0] : undefined : Workspace.getWorkspaceFolder(resource); await migrationSemaphore.lock(async () => { const globalMigration = Workspace.getConfiguration('eslint').get('migration.2_x', 'on'); if (notNow === false && globalMigration === 'on') { try { migration = new Migration(resource); migration.record(); interface Item extends MessageItem { id: 'yes' | 'no' | 'readme' | 'global' | 'local'; } if (migration.needsUpdate()) { const folder = workspaceFolder?.name; const file = path.basename(resource.fsPath); const selected = await Window.showInformationMessage<Item>( [ `The ESLint 'autoFixOnSave' setting needs to be migrated to the new 'editor.codeActionsOnSave' setting`, folder !== undefined ? `for the workspace folder: ${folder}.` : `for the file: ${file}.`, `For compatibility reasons the 'autoFixOnSave' remains and needs to be removed manually.`, `Do you want to migrate the setting?` ].join(' '), { modal: true}, { id: 'yes', title: 'Yes'}, { id: 'global', title: 'Never migrate Settings' }, { id: 'readme', title: 'Open Readme' }, { id: 'no', title: 'Not now', isCloseAffordance: true } ); if (selected !== undefined) { if (selected.id === 'yes') { try { await migration.update(); } catch (error) { migrationFailed(client, error); } } else if (selected.id === 'no') { notNow = true; } else if (selected.id === 'global') { await config.update('migration.2_x', 'off', ConfigurationTarget.Global); } else if (selected.id === 'readme') { notNow = true; void Env.openExternal(Uri.parse('https://github.com/microsoft/vscode-eslint#settings-migration')); } } } } finally { migration = undefined; } } }); const settings: ConfigurationSettings = { validate: Validate.off, packageManager: config.get('packageManager', 'npm'), useESLintClass: config.get('useESLintClass', false), codeActionOnSave: { enable: false, mode: CodeActionsOnSaveMode.all }, format: false, quiet: config.get('quiet', false), onIgnoredFiles: ESLintSeverity.from(config.get<string>('onIgnoredFiles', ESLintSeverity.off)), options: config.get('options', {}), rulesCustomizations: getRuleCustomizations(config, resource), run: config.get('run', 'onType'), nodePath: config.get<string | undefined>('nodePath', undefined) ?? null, workingDirectory: undefined, workspaceFolder: undefined, codeAction: { disableRuleComment: config.get('codeAction.disableRuleComment', { enable: true, location: 'separateLine' as 'separateLine' }), showDocumentation: config.get('codeAction.showDocumentation', { enable: true }) } }; const document: TextDocument | undefined = syncedDocuments.get(item.scopeUri); if (document === undefined) { result.push(settings); continue; } if (config.get('enabled', true)) { settings.validate = validator.check(document); } if (settings.validate !== Validate.off) { settings.format = !!config.get('format.enable', false); settings.codeActionOnSave.enable = readCodeActionsOnSaveSetting(document); settings.codeActionOnSave.mode = CodeActionsOnSaveMode.from(config.get('codeActionsOnSave.mode', CodeActionsOnSaveMode.all)); settings.codeActionOnSave.rules = CodeActionsOnSaveRules.from(config.get('codeActionsOnSave.rules', null)); } if (workspaceFolder !== undefined) { settings.workspaceFolder = { name: workspaceFolder.name, uri: client.code2ProtocolConverter.asUri(workspaceFolder.uri) }; } const workingDirectories = config.get<(string | LegacyDirectoryItem | DirectoryItem | PatternItem | ModeItem)[] | undefined>('workingDirectories', undefined); if (Array.isArray(workingDirectories)) { let workingDirectory: ModeItem | DirectoryItem | undefined = undefined; const workspaceFolderPath = workspaceFolder && workspaceFolder.uri.scheme === 'file' ? workspaceFolder.uri.fsPath : undefined; for (const entry of workingDirectories) { let directory: string | undefined; let pattern: string | undefined; let noCWD = false; if (Is.string(entry)) { directory = entry; } else if (LegacyDirectoryItem.is(entry)) { directory = entry.directory; noCWD = !entry.changeProcessCWD; } else if (DirectoryItem.is(entry)) { directory = entry.directory; if (entry['!cwd'] !== undefined) { noCWD = entry['!cwd']; } } else if (PatternItem.is(entry)) { pattern = entry.pattern; if (entry['!cwd'] !== undefined) { noCWD = entry['!cwd']; } } else if (ModeItem.is(entry)) { workingDirectory = entry; continue; } let itemValue: string | undefined; if (directory !== undefined || pattern !== undefined) { const filePath = document.uri.scheme === 'file' ? document.uri.fsPath : undefined; if (filePath !== undefined) { if (directory !== undefined) { directory = toOSPath(directory); if (!path.isAbsolute(directory) && workspaceFolderPath !== undefined) { directory = path.join(workspaceFolderPath, directory); } if (directory.charAt(directory.length - 1) !== path.sep) { directory = directory + path.sep; } if (filePath.startsWith(directory)) { itemValue = directory; } } else if (pattern !== undefined && pattern.length > 0) { if (!path.posix.isAbsolute(pattern) && workspaceFolderPath !== undefined) { pattern = path.posix.join(toPosixPath(workspaceFolderPath), pattern); } if (pattern.charAt(pattern.length - 1) !== path.posix.sep) { pattern = pattern + path.posix.sep; } const regExp: RegExp | undefined = convert2RegExp(pattern); if (regExp !== undefined) { const match = regExp.exec(filePath); if (match !== null && match.length > 0) { itemValue = match[0]; } } } } } if (itemValue !== undefined) { if (workingDirectory === undefined || ModeItem.is(workingDirectory)) { workingDirectory = { directory: itemValue, '!cwd': noCWD }; } else { if (workingDirectory.directory.length < itemValue.length) { workingDirectory.directory = itemValue; workingDirectory['!cwd'] = noCWD; } } } } settings.workingDirectory = workingDirectory; } result.push(settings); } return result; } function parseRulesCustomizations(rawConfig: unknown): RuleCustomization[] { if (!rawConfig || !Array.isArray(rawConfig)) { return []; } return rawConfig.map(rawValue => { if (typeof rawValue.severity === 'string' && typeof rawValue.rule === 'string') { return { severity: rawValue.severity, rule: rawValue.rule, }; } return undefined; }).filter((value): value is RuleCustomization => !!value); } function getRuleCustomizations(config: WorkspaceConfiguration, uri: Uri): RuleCustomization[] { let customizations: any = undefined; if (uri.scheme === 'vscode-notebook-cell') { customizations = config.get('notebooks.rules.customizations', undefined); } if (customizations === undefined || customizations === null) { customizations = config.get('rules.customizations'); } return parseRulesCustomizations(customizations); } function readCodeActionsOnSaveSetting(document: TextDocument): boolean { let result: boolean | undefined = undefined; function isEnabled(value: CodeActionsOnSave | string[]): boolean | undefined { if (value === undefined || value === null) { return undefined; } if (Array.isArray(value)) { const result = value.some((element) => { return element === 'source.fixAll.eslint' || element === 'source.fixAll'; }); return result === true ? true : undefined; } else { return value['source.fixAll.eslint'] ?? value['source.fixAll']; } } const codeActionsOnSave = Workspace.getConfiguration('editor', document).get<CodeActionsOnSave>('codeActionsOnSave'); if (codeActionsOnSave !== undefined) { result = isEnabled(codeActionsOnSave); } return result ?? false; } function getTextDocument(uri: Uri): TextDocument | undefined { return syncedDocuments.get(uri.toString()); } function updateDocumentStatus(params: StatusParams): void { documentStatus.set(params.uri, params.state); updateStatusBar(params.uri); } function updateStatusBar(uri: string | undefined) { const status = function() { if (serverRunning === false) { return Status.error; } if (uri === undefined) { uri = Window.activeTextEditor?.document.uri.toString(); } return (uri !== undefined ? documentStatus.get(uri) : undefined) ?? Status.ok; }(); let icon: string| undefined; let tooltip: string | undefined; let text: string = 'ESLint'; let backgroundColor: ThemeColor | undefined; let foregroundColor: ThemeColor | undefined; switch (status) { case Status.ok: icon = undefined; foregroundColor = new ThemeColor('statusBarItem.foreground'); backgroundColor = new ThemeColor('statusBarItem.background'); break; case Status.warn: icon = '$(alert)'; foregroundColor = new ThemeColor('statusBarItem.warningForeground'); backgroundColor = new ThemeColor('statusBarItem.warningBackground'); break; case Status.error: icon = '$(issue-opened)'; foregroundColor = new ThemeColor('statusBarItem.errorForeground'); backgroundColor = new ThemeColor('statusBarItem.errorBackground'); break; } statusBarItem.text = icon !== undefined ? `${icon} ${text}` : text; statusBarItem.color = foregroundColor; statusBarItem.backgroundColor = backgroundColor; statusBarItem.tooltip = tooltip ? tooltip : serverRunning === undefined ? starting : serverRunning === true ? running : stopped; const alwaysShow = Workspace.getConfiguration('eslint').get('alwaysShowStatus', false); if (alwaysShow || status !== Status.ok) { statusBarItem.show(); } else { statusBarItem.hide(); } } } }
the_stack
import { ConfigService } from '@nestjs/config'; import { barTimeframe } from '~blockml/barrels/bar-timeframe'; import { common } from '~blockml/barrels/common'; import { constants } from '~blockml/barrels/constants'; import { enums } from '~blockml/barrels/enums'; import { helper } from '~blockml/barrels/helper'; import { interfaces } from '~blockml/barrels/interfaces'; import { types } from '~blockml/barrels/types'; import { BmError } from '~blockml/models/bm-error'; let func = enums.FuncEnum.TransformTimes; export function transformTimes<T extends types.vmType>( item: { entities: T[]; weekStart: common.ProjectWeekStartEnum; errors: BmError[]; structId: string; caller: enums.CallerEnum; }, cs: ConfigService<interfaces.Config> ): T[] { let { caller, structId } = item; helper.log(cs, caller, func, structId, enums.LogTypeEnum.Input, item); let newEntities: T[] = []; item.entities.forEach(x => { let errorsOnStart = item.errors.length; if (x.fileExt === common.FileExtensionEnum.Dashboard) { newEntities.push(x); return; } let newFields: interfaces.FieldAny[] = []; x.fields.forEach(field => { if (field.fieldClass !== common.FieldClassEnum.Time) { newFields.push(field); return; } if (common.isUndefined(field.timeframes)) { field.timeframes = [ enums.TimeframeEnum.Time, enums.TimeframeEnum.Date, enums.TimeframeEnum.HourOfDay, enums.TimeframeEnum.Hour, enums.TimeframeEnum.TimeOfDay, enums.TimeframeEnum.DayOfWeek, enums.TimeframeEnum.DayOfWeekIndex, enums.TimeframeEnum.DayOfYear, enums.TimeframeEnum.Week, enums.TimeframeEnum.WeekOfYear, enums.TimeframeEnum.DayOfMonth, enums.TimeframeEnum.Month, enums.TimeframeEnum.MonthNum, enums.TimeframeEnum.MonthName, enums.TimeframeEnum.Year, enums.TimeframeEnum.Quarter, enums.TimeframeEnum.QuarterOfYear, enums.TimeframeEnum.Minute, enums.TimeframeEnum.YesNoHasValue ]; field.timeframes_line_num = 0; } let groupLabel = field.group_label; if (common.isUndefined(groupLabel)) { groupLabel = common.MyRegex.replaceUnderscoresWithSpaces(field.name); groupLabel = groupLabel .split(' ') .map(word => common.capitalizeFirstLetter(word)) .join(' '); } let groupDescription = field.group_description; let ts; if ( common.isUndefined(field.source) || field.source === enums.TimeSourceEnum.Timestamp ) { ts = field.sql; } else if (field.source === enums.TimeSourceEnum.Epoch) { ts = barTimeframe.makeTsFromSourceEpoch({ sql: field.sql, connection: x.connection }); } else if (field.source === enums.TimeSourceEnum.YYYYMMDD) { ts = barTimeframe.makeTsFromSourceYYYYMMDD({ sql: field.sql, connection: x.connection }); } else { item.errors.push( new BmError({ title: enums.ErTitleEnum.WRONG_TIME_SOURCE, message: `possible values for "${enums.ParameterEnum.Source}" of time field are: "${enums.TimeSourceEnum.Timestamp}", "${enums.TimeSourceEnum.Epoch}", "${enums.TimeSourceEnum.YYYYMMDD}"`, lines: [ { line: field.source_line_num, name: x.fileName, path: x.filePath } ] }) ); return; } let sqlTimestamp = `${constants.MPROVE_TIMESTAMP_START}${ts}${constants.MPROVE_TIMESTAMP_END}`; field.timeframes.forEach(timeframe => { let sqlTransformed: string; let name: string; let label: string; let result: common.FieldResultEnum; switch (true) { case timeframe === enums.TimeframeEnum.DayOfWeek: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.DayOfWeek; sqlTransformed = barTimeframe.makeTimeframeDayOfWeek({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.DayOfWeek; break; } case timeframe === enums.TimeframeEnum.DayOfWeekIndex: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.DayOfWeekIndex; sqlTransformed = barTimeframe.makeTimeframeDayOfWeekIndex({ sqlTimestamp: sqlTimestamp, connection: x.connection, weekStart: item.weekStart }); result = common.FieldResultEnum.DayOfWeekIndex; break; } case timeframe === enums.TimeframeEnum.DayOfYear: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.DayOfYear; // no need for weekStart sqlTransformed = barTimeframe.makeTimeframeDayOfYear({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Number; break; } case timeframe === enums.TimeframeEnum.Week: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.Week; sqlTransformed = barTimeframe.makeTimeframeWeek({ sqlTimestamp: sqlTimestamp, connection: x.connection, weekStart: item.weekStart }); result = common.FieldResultEnum.Ts; break; } case timeframe === enums.TimeframeEnum.WeekOfYear: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.WeekOfYear; sqlTransformed = barTimeframe.makeTimeframeWeekOfYear({ sqlTimestamp: sqlTimestamp, connection: x.connection, weekStart: item.weekStart }); result = common.FieldResultEnum.Number; break; } case timeframe === enums.TimeframeEnum.Date: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.Date; sqlTransformed = barTimeframe.makeTimeframeDate({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Ts; break; } case timeframe === enums.TimeframeEnum.DayOfMonth: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.DayOfMonth; sqlTransformed = barTimeframe.makeTimeframeDayOfMonth({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Number; break; } case timeframe === enums.TimeframeEnum.Hour: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.Hour; sqlTransformed = barTimeframe.makeTimeframeHour({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Ts; break; } case timeframe === enums.TimeframeEnum.HourOfDay: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.HourOfDay; sqlTransformed = barTimeframe.makeTimeframeHourOfDay({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Number; break; } case timeframe === enums.TimeframeEnum.Hour2 || timeframe === enums.TimeframeEnum.Hour3 || timeframe === enums.TimeframeEnum.Hour4 || timeframe === enums.TimeframeEnum.Hour6 || timeframe === enums.TimeframeEnum.Hour8 || timeframe === enums.TimeframeEnum.Hour12: { let reg = common.MyRegex.CAPTURE_DIGITS_G(); let r = reg.exec(timeframe); let num = r[1]; name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = timeframe; sqlTransformed = barTimeframe.makeTimeframeHourNum({ num: num, sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Ts; break; } case timeframe === enums.TimeframeEnum.Minute: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.Minute; sqlTransformed = barTimeframe.makeTimeframeMinute({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Ts; break; } case timeframe === enums.TimeframeEnum.Minute2 || timeframe === enums.TimeframeEnum.Minute3 || timeframe === enums.TimeframeEnum.Minute5 || timeframe === enums.TimeframeEnum.Minute10 || timeframe === enums.TimeframeEnum.Minute15 || timeframe === enums.TimeframeEnum.Minute30: { let reg = common.MyRegex.CAPTURE_DIGITS_G(); let r = reg.exec(timeframe); let num = r[1]; name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = timeframe; sqlTransformed = barTimeframe.makeTimeframeMinuteNum({ num: num, sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Ts; break; } case timeframe === enums.TimeframeEnum.Month: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.Month; sqlTransformed = barTimeframe.makeTimeframeMonth({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Ts; break; } case timeframe === enums.TimeframeEnum.MonthName: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.MonthName; sqlTransformed = barTimeframe.makeTimeframeMonthName({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.MonthName; break; } case timeframe === enums.TimeframeEnum.MonthNum: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.MonthNum; sqlTransformed = barTimeframe.makeTimeframeMonthNum({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Number; break; } case timeframe === enums.TimeframeEnum.Quarter: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.Quarter; sqlTransformed = barTimeframe.makeTimeframeQuarter({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Ts; break; } case timeframe === enums.TimeframeEnum.QuarterOfYear: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.QuarterOfYear; sqlTransformed = barTimeframe.makeTimeframeQuarterOfYear({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.QuarterOfYear; break; } case timeframe === enums.TimeframeEnum.Time: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.Time; sqlTransformed = barTimeframe.makeTimeframeTime({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Ts; break; } case timeframe === enums.TimeframeEnum.TimeOfDay: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.TimeOfDay; sqlTransformed = barTimeframe.makeTimeframeTimeOfDay({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.String; break; } case timeframe === enums.TimeframeEnum.Year: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.Year; sqlTransformed = barTimeframe.makeTimeframeYear({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Ts; break; } case timeframe === enums.TimeframeEnum.YesNoHasValue: { name = field.name + common.TRIPLE_UNDERSCORE + timeframe; label = enums.TimeLabelEnum.YesNoHasValue; sqlTransformed = barTimeframe.makeTimeframeYesNoHasValue({ sqlTimestamp: sqlTimestamp, connection: x.connection }); result = common.FieldResultEnum.Yesno; break; } default: { item.errors.push( new BmError({ title: enums.ErTitleEnum.WRONG_TIMEFRAMES_ELEMENT, message: `Element "${timeframe}" is wrong`, lines: [ { line: field.timeframes_line_num, name: x.fileName, path: x.filePath } ] }) ); return; // next timeframe } } let newDimension: interfaces.Dimension = { hidden: field.hidden, hidden_line_num: 0, label: label, label_line_num: 0, // description: undefined, sql: sqlTransformed, sql_line_num: field.sql_line_num, type: common.FieldTypeEnum.Custom, type_line_num: 0, result: result, result_line_num: 0, unnest: field.unnest, unnest_line_num: 0, format_number: result === common.FieldResultEnum.Number ? ',' : undefined, format_number_line_num: 0, currency_prefix: result === common.FieldResultEnum.Number ? '$' : undefined, currency_prefix_line_num: 0, currency_suffix: result === common.FieldResultEnum.Number ? '' : undefined, currency_suffix_line_num: 0, // group_label: groupLabel, group_label_line_num: 0, group_description: groupDescription, group_description_line_num: 0, // name: name, name_line_num: field.name_line_num, fieldClass: common.FieldClassEnum.Dimension, // sqlReal: undefined, groupId: field.name, sqlTimestamp: result === common.FieldResultEnum.Ts ? sqlTimestamp : undefined, sqlTimestampName: result === common.FieldResultEnum.Ts ? field.name + common.TRIPLE_UNDERSCORE + constants.TIMESTAMP : undefined // sqlTimestampReal: undefined, }; newFields.push(newDimension); }); // no push of time field (transformed to dimensions) }); if (errorsOnStart === item.errors.length) { x.fields = newFields; newEntities.push(x); } }); helper.log(cs, caller, func, structId, enums.LogTypeEnum.Errors, item.errors); helper.log( cs, caller, func, structId, enums.LogTypeEnum.Entities, newEntities ); return newEntities; }
the_stack
'use strict'; import React, { Component } from 'react'; import { StyleSheet, Text, TouchableOpacity, View, ActivityIndicator, PanResponder, ToastAndroid, } from 'react-native'; import { Picker } from '@react-native-picker/picker' import Video, { TextTrackType } from 'react-native-video'; class VideoPlayer extends Component { state = { rate: 1, volume: 1, muted: false, resizeMode: 'contain', duration: 0.0, currentTime: 0.0, videoWidth: 0, videoHeight: 0, paused: false, fullscreen: true, decoration: true, isLoading: false, seekerFillWidth: 0, seekerPosition: 0, seekerOffset: 0, seeking: false, audioTracks: [], textTracks: [], selectedAudioTrack: undefined, selectedTextTrack: undefined, srcListId: 0, loop: false, }; seekerWidth = 0 srcList = [ require('./broadchurch.mp4'), { description: '(dash) sintel subtitles', uri: 'https://bitmovin-a.akamaihd.net/content/sintel/sintel.mpd', }, { description: '(mp4) big buck bunny', uri: 'http://d23dyxeqlo5psv.cloudfront.net/big_buck_bunny.mp4', }, { description: '(hls|live) red bull tv', uri: 'https://rbmn-live.akamaized.net/hls/live/590964/BoRB-AT/master_928.m3u8' }, { description: '(mp4|subtitles) demo with sintel Subtitles', uri: 'http://www.youtube.com/api/manifest/dash/id/bf5bb2419360daf1/source/youtube?as=fmp4_audio_clear,fmp4_sd_hd_clear&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0&ipbits=0&expire=19000000000&signature=51AF5F39AB0CEC3E5497CD9C900EBFEAECCCB5C7.8506521BFC350652163895D4C26DEE124209AA9E&key=ik0', type: 'mpd', }, { description: '(no url) Stopped playback', uri: undefined }, { description: '(no view) no View', noView: true, }, ] video: Video; seekPanResponder: PanResponder | undefined; onLoad = (data: any) => { this.setState({ duration: data.duration, loading: false, }); this.onAudioTracks(data.audioTracks) this.onTextTracks(data.textTracks) }; onProgress = (data: any) => { if (!this.state.seeking) { const position = this.calculateSeekerPosition() this.setSeekerPosition(position) } this.setState({ currentTime: data.currentTime }) }; onVideoLoadStart = () => { console.log('onVideoLoadStart') this.setState({ isLoading: true }) } onAudioTracks = (data: any) => { const selectedTrack = data.audioTracks?.find((x: any) => { return x.selected }) this.setState({ audioTracks: data, }) if (selectedTrack?.language) { this.setState({ selectedAudioTrack: { type: 'language', value: selectedTrack?.language, }, }) } } onTextTracks = (data: any) => { const selectedTrack = data.textTracks?.find((x: any) => { return x.selected }) this.setState({ textTracks: data, }) if (selectedTrack?.language) { this.setState({ textTracks: data, selectedTextTrack: { type: 'language', value: selectedTrack?.language, }, }) } } onAspectRatio = (data: any) => { console.log('onAspectRadio called ' + JSON.stringify(data)) this.setState({ videoWidth: data.width, videoHeight: data.height, }) } onVideoBuffer = (param: any) => { console.log('onVideoBuffer') this.setState({ isLoading: param.isBuffering }) } onReadyForDisplay = () => { console.log('onReadyForDisplay') this.setState({ isLoading: false }) } onAudioBecomingNoisy = () => { this.setState({ paused: true }) }; onAudioFocusChanged = (event: { hasAudioFocus: boolean }) => { this.setState({ paused: !event.hasAudioFocus }) }; getCurrentTimePercentage = () => { if (this.state.currentTime > 0 && this.state.duration !== 0) { return this.state.currentTime / this.state.duration; } return 0; }; renderRateControl(rate: number) { const isSelected = (this.state.rate === rate); return ( <TouchableOpacity onPress={() => { this.setState({ rate }) }}> <Text style={[styles.controlOption, { fontWeight: isSelected ? 'bold' : 'normal' }]}> {rate} </Text> </TouchableOpacity> ); } renderResizeModeControl(resizeMode: string) { const isSelected = (this.state.resizeMode === resizeMode); return ( <TouchableOpacity onPress={() => { this.setState({ resizeMode }) }}> <Text style={[styles.controlOption, { fontWeight: isSelected ? 'bold' : 'normal' }]}> {resizeMode} </Text> </TouchableOpacity> ) } renderVolumeControl(volume: number) { const isSelected = (this.state.volume === volume); return ( <TouchableOpacity onPress={() => { this.setState({ volume }) }}> <Text style={[styles.controlOption, { fontWeight: isSelected ? 'bold' : 'normal' }]}> {volume * 100}% </Text> </TouchableOpacity> ) } toast = (visible: boolean, message: string) => { if (visible) { ToastAndroid.showWithGravityAndOffset( message, ToastAndroid.LONG, ToastAndroid.BOTTOM, 25, 50, ) return null } return null } onError = (err: any) => { console.log(JSON.stringify(err)) this.toast(true, 'error: ' + err?.error?.code) } onEnd = () => { this.channelUp() }; toggleFullscreen() { this.setState({ fullscreen: !this.state.fullscreen }) } toggleDecoration() { this.setState({ decoration: !this.state.decoration }) if (this.state.decoration) { this.video.dismissFullscreenPlayer() } else { this.video.presentFullscreenPlayer() } } goToChannel(channel: any) { this.setState({ srcListId: channel, duration: 0.0, currentTime: 0.0, videoWidth: 0, videoHeight: 0, isLoading: false, audioTracks: [], textTracks: [], selectedAudioTrack: undefined, selectedTextTrack: undefined, }) } channelUp() { console.log('channel up') this.goToChannel((this.state.srcListId + 1) % this.srcList.length) } channelDown() { console.log('channel down') this.goToChannel((this.state.srcListId + this.srcList.length - 1) % this.srcList.length) } componentDidMount() { this.initSeekPanResponder() } renderDecorationsControl() { return ( <TouchableOpacity onPress={() => { this.toggleDecoration() }} > <Text style={[styles.controlOption]}>{'decoration'}</Text> </TouchableOpacity> ) } renderFullScreenControl() { return ( <TouchableOpacity onPress={() => { this.toggleFullscreen() }} > <Text style={[styles.controlOption]}>{'fullscreen'}</Text> </TouchableOpacity> ) } renderPause() { return ( <TouchableOpacity onPress={() => { this.setState({ paused: !this.state.paused }) }} > <Text style={[styles.controlOption]}> {this.state.paused ? 'pause' : 'playing'} </Text> </TouchableOpacity> ) } renderRepeatModeControl() { return ( <TouchableOpacity onPress={() => { this.setState({ loop: !this.state.loop }) }} > <Text style={[styles.controlOption]}> {this.state.loop ? 'loop enable' : 'loop disable'} </Text> </TouchableOpacity> ) } renderLeftControl() { return ( <View> <TouchableOpacity onPress={() => { this.channelDown() }} > <Text style={[styles.leftRightControlOption]}>{'ChDown'}</Text> </TouchableOpacity> </View> // onTimelineUpdated ) } renderRightControl() { return ( <View> <TouchableOpacity onPress={() => { this.channelUp() }} > <Text style={[styles.leftRightControlOption]}>{'ChUp'}</Text> </TouchableOpacity> </View> ) } /** * Render the seekbar and attach its handlers */ /** * Constrain the location of the seeker to the * min/max value based on how big the * seeker is. * * @param {float} val position of seeker handle in px * @return {float} constrained position of seeker handle in px */ constrainToSeekerMinMax(val = 0) { if (val <= 0) { return 0 } else if (val >= this.seekerWidth) { return this.seekerWidth } return val } /** * Set the position of the seekbar's components * (both fill and handle) according to the * position supplied. * * @param {float} position position in px of seeker handle} */ setSeekerPosition(position = 0) { const state = this.state position = this.constrainToSeekerMinMax(position) state.seekerFillWidth = position state.seekerPosition = position if (!state.seeking) { state.seekerOffset = position } this.setState(state) } /** * Calculate the position that the seeker should be * at along its track. * * @return {float} position of seeker handle in px based on currentTime */ calculateSeekerPosition() { const percent = this.state.currentTime / this.state.duration return this.seekerWidth * percent } /** * Return the time that the video should be at * based on where the seeker handle is. * * @return {float} time in ms based on seekerPosition. */ calculateTimeFromSeekerPosition() { const percent = this.state.seekerPosition / this.seekerWidth return this.state.duration * percent } /** * Get our seekbar responder going */ initSeekPanResponder() { this.seekPanResponder = PanResponder.create({ // Ask to be the responder. onStartShouldSetPanResponder: (evt, gestureState) => true, onMoveShouldSetPanResponder: (evt, gestureState) => true, /** * When we start the pan tell the machine that we're * seeking. This stops it from updating the seekbar * position in the onProgress listener. */ onPanResponderGrant: (evt, gestureState) => { const state = this.state // this.clearControlTimeout() const position = evt.nativeEvent.locationX this.setSeekerPosition(position) state.seeking = true this.setState(state) }, /** * When panning, update the seekbar position, duh. */ onPanResponderMove: (evt, gestureState) => { const position = this.state.seekerOffset + gestureState.dx this.setSeekerPosition(position) }, /** * On release we update the time and seek to it in the video. * If you seek to the end of the video we fire the * onEnd callback */ onPanResponderRelease: (evt, gestureState) => { const time = this.calculateTimeFromSeekerPosition() const state = this.state if (time >= state.duration && !state.isLoading) { state.paused = true this.onEnd() } else { this.video?.seek(time) state.seeking = false } this.setState(state) }, }) } renderSeekBar() { if (!this.seekPanResponder) { return null } return ( <View style={styles.seekbarContainer} {...this.seekPanResponder.panHandlers} {...styles.generalControls} > <View style={styles.seekbarTrack} onLayout={(event) => (this.seekerWidth = event.nativeEvent.layout.width)} pointerEvents={'none'} > <View style={[ styles.seekbarFill, { width: this.state.seekerFillWidth > 0 ? this.state.seekerFillWidth : 0, backgroundColor: '#FFF', }, ]} pointerEvents={'none'} /> </View> <View style={[ styles.seekbarHandle, { left: this.state.seekerPosition > 0 ? this.state.seekerPosition : 0 }, ]} pointerEvents={'none'} > <View style={[ styles.seekbarCircle, { backgroundColor: '#FFF' }, ]} pointerEvents={'none'} /> </View> </View> ) } IndicatorLoadingView() { if (this.state.isLoading) return <ActivityIndicator color="#3235fd" size="large" style={styles.IndicatorStyle} /> else return <View /> } renderOverlay() { return ( <> {this.IndicatorLoadingView()} <View style={styles.topControls}> <Text style={[styles.controlOption]}> {this.srcList[this.state.srcListId]?.description || 'local file'} </Text> </View> <View style={styles.leftControls}> <View style={styles.resizeModeControl}>{this.renderLeftControl()}</View> </View> <View style={styles.rightControls}> <View style={styles.resizeModeControl}>{this.renderRightControl()}</View> </View> <View style={styles.bottomControls}> <View style={styles.generalControls}> <View style={styles.resizeModeControl}>{this.renderPause()}</View> <View style={styles.resizeModeControl}> {this.renderRepeatModeControl()} </View> <View style={styles.resizeModeControl}> {this.renderFullScreenControl()} </View> <View style={styles.resizeModeControl}> {this.renderDecorationsControl()} </View> </View> <View style={styles.generalControls}> <View style={styles.rateControl}> {this.renderRateControl(0.25)} {this.renderRateControl(0.5)} {this.renderRateControl(1.0)} {this.renderRateControl(1.5)} {this.renderRateControl(2.0)} </View> <View style={styles.volumeControl}> {this.renderVolumeControl(0.5)} {this.renderVolumeControl(1)} {this.renderVolumeControl(1.5)} </View> <View style={styles.resizeModeControl}> {this.renderResizeModeControl('cover')} {this.renderResizeModeControl('contain')} {this.renderResizeModeControl('stretch')} </View> </View> {this.renderSeekBar()} <View style={styles.generalControls}> <Text style={styles.controlOption}>AudioTrack</Text> {this.state.audioTracks?.length <= 0 ? ( <Text style={styles.controlOption}>empty</Text> ) : ( <Picker style={styles.picker} selectedValue={this.state.selectedAudioTrack?.value} onValueChange={(itemValue, itemIndex) => { console.log('on audio value change ' + itemValue) this.setState({ selectedAudioTrack: { type: 'language', value: itemValue, }, }) }} > {this.state.audioTracks.map((track) => { return ( <Picker.Item label={track.language} value={track.language} key={track.language} /> ) })} </Picker> )} <Text style={styles.controlOption}>TextTrack</Text> {this.state.textTracks?.length <= 0 ? ( <Text style={styles.controlOption}>empty</Text> ) : ( <Picker style={styles.picker} selectedValue={this.state.selectedTextTrack?.value} onValueChange={(itemValue, itemIndex) => { console.log('on value change ' + itemValue) this.setState({ selectedTextTrack: { type: 'language', value: itemValue, }, }) }} > <Picker.Item label={'none'} value={'none'} key={'none'} /> {this.state.textTracks.map((track) => ( <Picker.Item label={track.language} value={track.language} key={track.language} /> ))} </Picker> )} </View> </View> </> ) } renderVideoView() { const viewStyle = this.state.fullscreen ? styles.fullScreen : styles.halfScreen return ( <TouchableOpacity style={viewStyle}> <Video ref={(ref: Video) => { this.video = ref }} source={this.srcList[this.state.srcListId]} style={viewStyle} rate={this.state.rate} paused={this.state.paused} volume={this.state.volume} muted={this.state.muted} resizeMode={this.state.resizeMode} onLoad={this.onLoad} onProgress={this.onProgress} onEnd={this.onEnd} progressUpdateInterval={1000} onError={this.onError} onAudioBecomingNoisy={this.onAudioBecomingNoisy} onAudioFocusChanged={this.onAudioFocusChanged} onLoadStart={this.onVideoLoadStart} onVideoAspectRatio={this.onAspectRatio} onReadyForDisplay={this.onReadyForDisplay} onBuffer={this.onVideoBuffer} repeat={this.state.loop} selectedTextTrack={this.state.selectedTextTrack} selectedAudioTrack={this.state.selectedAudioTrack} /> </TouchableOpacity> ) } render() { return ( <View style={styles.container}> {this.srcList[this.state.srcListId]?.noView ? null : this.renderVideoView()} {this.renderOverlay()} </View> ) } } const styles = StyleSheet.create({ container: { flex: 1, justifyContent: 'center', alignItems: 'center', backgroundColor: 'black', }, halfScreen: { position: 'absolute', top: 50, left: 50, bottom: 100, right: 100, }, fullScreen: { position: 'absolute', top: 0, left: 0, bottom: 0, right: 0, }, bottomControls: { backgroundColor: 'transparent', borderRadius: 5, position: 'absolute', bottom: 20, left: 20, right: 20, }, leftControls: { backgroundColor: 'transparent', borderRadius: 5, position: 'absolute', top: 20, bottom: 20, left: 20, }, rightControls: { backgroundColor: 'transparent', borderRadius: 5, position: 'absolute', top: 20, bottom: 20, right: 20, }, topControls: { backgroundColor: 'transparent', borderRadius: 4, position: 'absolute', top: 20, left: 20, right: 20, flex: 1, flexDirection: 'row', overflow: 'hidden', paddingBottom: 10, }, generalControls: { flex: 1, flexDirection: 'row', borderRadius: 4, overflow: 'hidden', paddingBottom: 10, }, rateControl: { flex: 1, flexDirection: 'row', justifyContent: 'center', }, volumeControl: { flex: 1, flexDirection: 'row', justifyContent: 'center', }, resizeModeControl: { flex: 1, flexDirection: 'row', alignItems: 'center', justifyContent: 'center', }, leftRightControlOption: { alignSelf: 'center', fontSize: 11, color: 'white', padding: 10, lineHeight: 12, }, controlOption: { alignSelf: 'center', fontSize: 11, color: 'white', paddingLeft: 2, paddingRight: 2, lineHeight: 12, }, IndicatorStyle: { flex: 1, justifyContent: 'center', }, seekbarContainer: { flex: 1, flexDirection: 'row', borderRadius: 4, height: 30, }, seekbarTrack: { backgroundColor: '#333', height: 1, position: 'relative', top: 14, width: '100%', }, seekbarFill: { backgroundColor: '#FFF', height: 1, width: '100%', }, seekbarHandle: { position: 'absolute', marginLeft: -7, height: 28, width: 28, }, seekbarCircle: { borderRadius: 12, position: 'relative', top: 8, left: 8, height: 12, width: 12, }, picker: { color: 'white', flex: 1, flexDirection: 'row', justifyContent: 'center', }, }); export default VideoPlayer
the_stack
import type * as I from '@apollo-elements/core/types'; import type { SinonSpy } from 'sinon'; import type { NonNullableParamQueryData, NonNullableParamQueryVariables, NoParamQueryData, NoParamQueryVariables, } from '@apollo-elements/test'; import type { ApolloQueryController, ApolloQueryElement } from '@apollo-elements/core'; import { gql, NormalizedCacheObject, TypePolicies } from '@apollo/client/core'; import { ApolloClient, TypedDocumentNode } from '@apollo/client/core'; import { ApolloController } from '@apollo-elements/core'; import * as S from '@apollo-elements/test/schema'; import * as Core from '@apollo-elements/core'; import * as FAST from '@apollo-elements/fast'; import * as Lit from '@apollo-elements/lit-apollo'; import * as LitDeco from 'lit/decorators.js'; import * as Haunted from '@apollo-elements/haunted'; import * as Hybrids from '@apollo-elements/hybrids'; import '@apollo-elements/polymer/polymer-apollo-query'; import { aTimeout, defineCE, expect, fixture, fixtureSync, nextFrame, oneEvent, } from '@open-wc/testing'; import { html, unsafeStatic } from 'lit/static-html.js'; import { ApolloQueryMixin, ApolloMutationMixin } from '@apollo-elements/mixins'; import { ApolloClientElement } from './apollo-client'; import { makeClient, teardownClient } from '@apollo-elements/test'; import { spy, stub, SinonStub } from 'sinon'; import './apollo-client'; function mockFetch() { stub(window, 'fetch'); } function restoreFetch() { (window.fetch as SinonStub).restore?.(); } describe('<apollo-client>', function() { describe('with basic elements', function() { class ShallowElement<D extends I.MaybeTDN = I.MaybeTDN, V = I.MaybeVariables<D>> extends ApolloMutationMixin(HTMLElement)<D, V> { declare shadowRoot: ShadowRoot; constructor() { super(); this.attachShadow({ mode: 'open' }).innerHTML = ` <deep-element></deep-element> <div> <span></span> </div> `; this.shadowRoot.appendChild(new Text('yipee')); } } class DeepElement extends HTMLElement { declare shadowRoot: ShadowRoot; constructor() { super(); this.attachShadow({ mode: 'open' }).innerHTML = /* html */` <query-element></query-element> <div id="fake"></div> <svg></svg> `; this.shadowRoot.appendChild(new Text('wheee')); } } class QueryElement<D extends I.MaybeTDN = I.MaybeTDN, V = I.MaybeVariables<D>> extends ApolloQueryMixin(HTMLElement)<D, V> { query = S.NoParamQuery; constructor() { super(); this.attachShadow({ mode: 'open' }).innerHTML = /* html */ ` <div id="fake"></div> `; } } before(function() { customElements.define('shallow-element', ShallowElement); customElements.define('deep-element', DeepElement); customElements.define('query-element', QueryElement); }); let client: ApolloClient<NormalizedCacheObject> | undefined; let cached: ApolloClient<NormalizedCacheObject> | undefined; let element: ApolloClientElement | null; let shallow: ShallowElement | null; let deep: DeepElement | null; let query: QueryElement<NoParamQueryData, NoParamQueryVariables> | null; afterEach(function() { (QueryElement.prototype.subscribe as SinonSpy)?.restore?.(); delete window.__APOLLO_CLIENT__; client = undefined; element = null; shallow = null; deep = null; query = null; }); describe('without client', function() { beforeEach(async function() { spy(QueryElement.prototype, 'subscribe'); element = await fixture<ApolloClientElement>(html` <apollo-client> <shallow-element></shallow-element> <deep-element></deep-element> <query-element></query-element> </apollo-client> `); shallow = element.querySelector('shallow-element'); deep = shallow!.shadowRoot.querySelector('deep-element'); query = deep!.shadowRoot.querySelector('query-element'); expect(window.__APOLLO_CLIENT__, 'no global client').to.be.undefined; }); it('does not initialize elements', function() { expect(query!.subscribe).to.not.have.been.called; }); }); describe('with client', function() { beforeEach(async function setupElements() { spy(QueryElement.prototype, 'subscribe'); cached = window.__APOLLO_CLIENT__; delete window.__APOLLO_CLIENT__; client = makeClient(); spy(client, 'watchQuery'); element = await fixture<ApolloClientElement>(html` <apollo-client> <shallow-element></shallow-element> <deep-element></deep-element> <query-element></query-element> </apollo-client> `); element.client = client as ApolloClientElement['client']; shallow = element.querySelector('shallow-element'); deep = shallow!.shadowRoot.querySelector('deep-element'); query = deep!.shadowRoot.querySelector('query-element'); expect(window.__APOLLO_CLIENT__, 'no global client').to.be.undefined; }); afterEach(function() { (client?.watchQuery as SinonSpy).restore?.(); window.__APOLLO_CLIENT__ = cached; }); beforeEach(nextFrame); it('lists all child controllers', function() { expect(element!.controllers).to.include(shallow!.controller); expect(element!.controllers).to.not.include(deep); expect(element!.controllers).to.include(query!.controller); expect(element!.controllers.every(x => x instanceof ApolloController), 'ApolloController').to.be.true; }); it('assigns client to shallow elements', function() { expect(shallow!.client, 'shallow').to.equal(client); }); it('assigns client to deep elements', function() { expect(query!.client, 'deep').to.equal(client); }); it('subscribes elements', function() { expect(client!.watchQuery).to.have.been.called; }); describe('when setting client', function() { const next = makeClient() as ApolloClientElement['client']; beforeEach(function() { element!.client = next; }); it('reassigns client to shallow elements', function() { expect(shallow!.client, 'shallow').to.equal(next); expect(shallow!.client, 'shallow').to.not.equal(client); }); it('reassigns client to deep elements', function() { expect(query!.client, 'deep').to.equal(next); expect(query!.client, 'deep').to.not.equal(client); }); describe('when element is a query element', function() { beforeEach(nextFrame); it('subscribes', function() { expect(query!.data).to.be.ok; }); }); }); describe('moving the deep element out of scope', function() { beforeEach(function() { document.body.append(deep!); }); afterEach(function() { deep!.remove(); }); it('deletes deep element client', function() { expect(query!.client).to.not.be.ok; }); it('removes deep from elements list', function() { expect(element!.controllers).to.not.include(deep); }); }); describe('when a non-apollo-element fires event', function() { let controllers: readonly ApolloController[]; beforeEach(function() { ({ controllers } = element!); }); beforeEach(function() { deep!.shadowRoot.getElementById('fake')! .dispatchEvent(new Event('apollo-element-connected', { bubbles: true, composed: true })); }); it('does nothing', function() { expect(element!.controllers).to.deep.equal(controllers); }); }); describe('setting typePolicies', function() { const user = Symbol('user'); const typePolicies: TypePolicies = { Query: { fields: { user() { return user; }, }, }, }; beforeEach(function() { element!.typePolicies = typePolicies; }); afterEach(function() { element!.typePolicies = undefined; }); it('loads the type policies', function() { expect(client?.readQuery({ query: gql`{ user @client }` }).user).to.equal(user); }); it('returns the set typePolicies', function() { expect(element!.typePolicies).to.equal(typePolicies); }); }); describe('setting non-string uri', function() { beforeEach(function() { spy(element!, 'createApolloClient'); // @ts-expect-error: bad input element.uri = 1; }); afterEach(function() { // @ts-expect-error: spy element.createApolloClient.restore(); }); it('does nothing', function() { expect(element!.uri).to.be.undefined; expect(element!.createApolloClient).to.not.have.been.called; }); }); }); describe('with uri', function() { beforeEach(mockFetch); afterEach(restoreFetch); it('creates a new client', async function() { element = fixtureSync<ApolloClientElement>(html` <apollo-client uri="/graphql"></apollo-client> `); const { detail } = await oneEvent(element, 'client-changed'); expect(detail.value).to.be.an.instanceOf(ApolloClient); expect(detail.value).to.eq(element.client); }); }); describe('with uri and validate-variables', function() { beforeEach(mockFetch); afterEach(restoreFetch); class ApolloQueryEl extends ApolloQueryMixin(HTMLElement)< NonNullableParamQueryData, NonNullableParamQueryVariables > { } const tag = defineCE(ApolloQueryEl); const tagName = unsafeStatic(tag); it('creates a new client', async function() { element = await fixture<ApolloClientElement>(html` <apollo-client uri="/graphql" validate-variables> <${tagName} .query="${S.NonNullableParamQuery}" .variables="${{ 'nullable': true }}" ></${tagName}> </apollo-client> `); await aTimeout(100); expect(element.querySelector<ApolloQueryEl>(tag)!.query).to.be.ok; expect(element.querySelector<ApolloQueryEl>(tag)!.variables).to.be.ok; // first call is to introspect, and occurs regardless of operations expect(window.fetch).to.not.have.been.calledTwice; }); }); }); describe('with various children', function() { const TagNameQuery: TypedDocumentNode<{ tagName: string }, { tagName: string }> = gql`query TagNameQuery($tagName: String!) { tagName(tagName: $tagName) { tagName } }`; beforeEach(teardownClient); beforeEach(mockFetch); afterEach(restoreFetch); describe('that implement the apollo-query interface', function() { const TAG_NAMES = { fast: 'fast-apollo-query', lit: 'lit-apollo-query', polymer: 'polymer-apollo-query', }; /* eslint-disable @typescript-eslint/no-unused-vars */ @FAST.customElement({ name: TAG_NAMES.fast }) class FASTApolloQuery extends FAST.ApolloQuery { } @LitDeco.customElement(TAG_NAMES.lit) class LitApolloQuery extends Lit.ApolloQuery { } /* eslint-enable @typescript-eslint/no-unused-vars */ Object.values(TAG_NAMES).forEach(tagName => { describe(tagName, function() { beforeEach(async function(this: Mocha.Context) { const name = unsafeStatic(tagName); await fixture(html` <apollo-client uri="/graphql"> <${name} .query="${TagNameQuery}" .variables="${{ tagName }}"></${name}> </apollo-client> `); }); beforeEach(() => aTimeout(50)); it('fetches the query', async function() { expect(window.fetch).to.have.been.calledOnce; const [, { body }] = (window.fetch as SinonSpy).firstCall.args; expect(JSON.parse(body).variables.tagName).to.equal(tagName); }); }); }); }); describe('that implement an ApolloQuery controller', function() { const FIXTURES = { haunted: { tagName: 'haunted-use-query', register: () => { const { tagName } = FIXTURES.haunted; customElements.define(tagName, Haunted.component( function HauntedApolloQuery(this: HTMLElement) { const { data } = Haunted.useQuery(TagNameQuery, { hostElement: this, variables: { tagName } }); return JSON.stringify(data); } as any)); }, }, lit: { tagName: 'lit-apollo-query-controller', register: () => { const { tagName } = FIXTURES.lit; @LitDeco.customElement(tagName) class LitApolloQueryController extends Lit.LitElement { query = new Core.ApolloQueryController(this, TagNameQuery, { variables: { tagName } }); } }, }, hybrids: { tagName: 'hybrids-apollo-query-controller', register: () => { const { tagName } = FIXTURES.hybrids; Hybrids.define(tagName, { render: host => Hybrids.html`${JSON.stringify(host.query.data)}`, query: Hybrids.query(TagNameQuery, { variables: { tagName } }), } as Hybrids.Hybrids<HTMLElement & { query: ApolloQueryController<typeof TagNameQuery> }>); }, }, }; Object.values(FIXTURES).forEach(({ tagName, register }) => { describe(tagName, function() { beforeEach(register); beforeEach(async function() { await fixture(` <apollo-client uri="/graphql"> <${tagName}></${tagName}> </apollo-client> `); }); beforeEach(() => aTimeout(50)); it('fetches the query', function() { expect(window.fetch).to.have.been.calledOnce; const [, init] = (window.fetch as SinonSpy).firstCall.args; expect(JSON.parse(init.body).variables.tagName).to.equal(tagName); }); }); }); }); }); });
the_stack
import { ANTLRErrorListener } from "./ANTLRErrorListener"; import { CharStream } from "./CharStream"; import { CommonTokenFactory } from "./CommonTokenFactory"; import { IntegerStack } from "./misc/IntegerStack"; import { Interval } from "./misc/Interval"; import { IntStream } from "./IntStream"; import { LexerATNSimulator } from "./atn/LexerATNSimulator"; import { LexerNoViableAltException } from "./LexerNoViableAltException"; import { Override } from "./Decorators"; import { RecognitionException } from "./RecognitionException"; import { Recognizer } from "./Recognizer"; import { Token } from "./Token"; import { TokenFactory } from "./TokenFactory"; import { TokenSource } from "./TokenSource"; /** A lexer is recognizer that draws input symbols from a character stream. * lexer grammars result in a subclass of this object. A Lexer object * uses simplified match() and error recovery mechanisms in the interest * of speed. */ export abstract class Lexer extends Recognizer<number, LexerATNSimulator> implements TokenSource { public static readonly DEFAULT_MODE: number = 0; public static readonly MORE: number = -2; public static readonly SKIP: number = -3; static get DEFAULT_TOKEN_CHANNEL(): number { return Token.DEFAULT_CHANNEL; } static get HIDDEN(): number { return Token.HIDDEN_CHANNEL; } public static readonly MIN_CHAR_VALUE: number = 0x0000; public static readonly MAX_CHAR_VALUE: number = 0x10FFFF; public _input: CharStream; protected _tokenFactorySourcePair: { source: TokenSource, stream: CharStream }; /** How to create token objects */ protected _factory: TokenFactory = CommonTokenFactory.DEFAULT; /** The goal of all lexer rules/methods is to create a token object. * This is an instance variable as multiple rules may collaborate to * create a single token. nextToken will return this object after * matching lexer rule(s). If you subclass to allow multiple token * emissions, then set this to the last token to be matched or * something non-undefined so that the auto token emit mechanism will not * emit another token. */ public _token: Token | undefined; /** What character index in the stream did the current token start at? * Needed, for example, to get the text for current token. Set at * the start of nextToken. */ public _tokenStartCharIndex: number = -1; /** The line on which the first character of the token resides */ public _tokenStartLine: number = 0; /** The character position of first character within the line */ public _tokenStartCharPositionInLine: number = 0; /** Once we see EOF on char stream, next token will be EOF. * If you have DONE : EOF ; then you see DONE EOF. */ public _hitEOF: boolean = false; /** The channel number for the current token */ public _channel: number = 0; /** The token type for the current token */ public _type: number = 0; public readonly _modeStack: IntegerStack = new IntegerStack(); public _mode: number = Lexer.DEFAULT_MODE; /** You can set the text for the current token to override what is in * the input char buffer. Set `text` or can set this instance var. */ public _text: string | undefined; constructor(input: CharStream) { super(); this._input = input; this._tokenFactorySourcePair = { source: this, stream: input }; } public reset(): void; public reset(resetInput: boolean): void; public reset(resetInput?: boolean): void { // wack Lexer state variables if (resetInput === undefined || resetInput) { this._input.seek(0); // rewind the input } this._token = undefined; this._type = Token.INVALID_TYPE; this._channel = Token.DEFAULT_CHANNEL; this._tokenStartCharIndex = -1; this._tokenStartCharPositionInLine = -1; this._tokenStartLine = -1; this._text = undefined; this._hitEOF = false; this._mode = Lexer.DEFAULT_MODE; this._modeStack.clear(); this.interpreter.reset(); } /** Return a token from this source; i.e., match a token on the char * stream. */ @Override public nextToken(): Token { if (this._input == null) { throw new Error("nextToken requires a non-null input stream."); } // Mark start location in char stream so unbuffered streams are // guaranteed at least have text of current token let tokenStartMarker: number = this._input.mark(); try { outer: while (true) { if (this._hitEOF) { return this.emitEOF(); } this._token = undefined; this._channel = Token.DEFAULT_CHANNEL; this._tokenStartCharIndex = this._input.index; this._tokenStartCharPositionInLine = this.interpreter.charPositionInLine; this._tokenStartLine = this.interpreter.line; this._text = undefined; do { this._type = Token.INVALID_TYPE; // System.out.println("nextToken line "+tokenStartLine+" at "+((char)input.LA(1))+ // " in mode "+mode+ // " at index "+input.index); let ttype: number; try { ttype = this.interpreter.match(this._input, this._mode); } catch (e) { if (e instanceof LexerNoViableAltException) { this.notifyListeners(e); // report error this.recover(e); ttype = Lexer.SKIP; } else { throw e; } } if (this._input.LA(1) === IntStream.EOF) { this._hitEOF = true; } if (this._type === Token.INVALID_TYPE) { this._type = ttype; } if (this._type === Lexer.SKIP) { continue outer; } } while (this._type === Lexer.MORE); if (this._token == null) { return this.emit(); } return this._token; } } finally { // make sure we release marker after match or // unbuffered char stream will keep buffering this._input.release(tokenStartMarker); } } /** Instruct the lexer to skip creating a token for current lexer rule * and look for another token. nextToken() knows to keep looking when * a lexer rule finishes with token set to SKIP_TOKEN. Recall that * if token==undefined at end of any token rule, it creates one for you * and emits it. */ public skip(): void { this._type = Lexer.SKIP; } public more(): void { this._type = Lexer.MORE; } public mode(m: number): void { this._mode = m; } public pushMode(m: number): void { if (LexerATNSimulator.debug) { console.log("pushMode " + m); } this._modeStack.push(this._mode); this.mode(m); } public popMode(): number { if (this._modeStack.isEmpty) { throw new Error("EmptyStackException"); } if (LexerATNSimulator.debug) { console.log("popMode back to " + this._modeStack.peek()); } this.mode(this._modeStack.pop()); return this._mode; } @Override get tokenFactory(): TokenFactory { return this._factory; } // @Override set tokenFactory(factory: TokenFactory) { this._factory = factory; } @Override get inputStream(): CharStream { return this._input; } /** Set the char stream and reset the lexer */ set inputStream(input: CharStream) { this.reset(false); this._input = input; this._tokenFactorySourcePair = { source: this, stream: this._input }; } @Override get sourceName(): string { return this._input.sourceName; } /** The standard method called to automatically emit a token at the * outermost lexical rule. The token object should point into the * char buffer start..stop. If there is a text override in 'text', * use that to set the token's text. Override this method to emit * custom Token objects or provide a new factory. */ public emit(token: Token): Token; /** By default does not support multiple emits per nextToken invocation * for efficiency reasons. Subclass and override this method, nextToken, * and getToken (to push tokens into a list and pull from that list * rather than a single variable as this implementation does). */ public emit(): Token; public emit(token?: Token): Token { if (!token) { token = this._factory.create( this._tokenFactorySourcePair, this._type, this._text, this._channel, this._tokenStartCharIndex, this.charIndex - 1, this._tokenStartLine, this._tokenStartCharPositionInLine); } this._token = token; return token; } public emitEOF(): Token { let cpos: number = this.charPositionInLine; let line: number = this.line; let eof: Token = this._factory.create( this._tokenFactorySourcePair, Token.EOF, undefined, Token.DEFAULT_CHANNEL, this._input.index, this._input.index - 1, line, cpos); this.emit(eof); return eof; } @Override get line(): number { return this.interpreter.line; } set line(line: number) { this.interpreter.line = line; } @Override get charPositionInLine(): number { return this.interpreter.charPositionInLine; } set charPositionInLine(charPositionInLine: number) { this.interpreter.charPositionInLine = charPositionInLine; } /** What is the index of the current character of lookahead? */ get charIndex(): number { return this._input.index; } /** Return the text matched so far for the current token or any * text override. */ get text(): string { if (this._text != null) { return this._text; } return this.interpreter.getText(this._input); } /** Set the complete text of this token; it wipes any previous * changes to the text. */ set text(text: string) { this._text = text; } /** Override if emitting multiple tokens. */ get token(): Token | undefined { return this._token; } set token(_token: Token | undefined) { this._token = _token; } set type(ttype: number) { this._type = ttype; } get type(): number { return this._type; } set channel(channel: number) { this._channel = channel; } get channel(): number { return this._channel; } public abstract readonly channelNames: string[]; public abstract readonly modeNames: string[]; /** Return a list of all Token objects in input char stream. * Forces load of all tokens. Does not include EOF token. */ public getAllTokens(): Token[] { let tokens: Token[] = []; let t: Token = this.nextToken(); while (t.type !== Token.EOF) { tokens.push(t); t = this.nextToken(); } return tokens; } public notifyListeners(e: LexerNoViableAltException): void { let text: string = this._input.getText( Interval.of(this._tokenStartCharIndex, this._input.index)); let msg: string = "token recognition error at: '" + this.getErrorDisplay(text) + "'"; let listener: ANTLRErrorListener<number> = this.getErrorListenerDispatch(); if (listener.syntaxError) { listener.syntaxError(this, undefined, this._tokenStartLine, this._tokenStartCharPositionInLine, msg, e); } } public getErrorDisplay(s: string | number): string { if (typeof s === "number") { switch (s) { case Token.EOF: return "<EOF>"; case 0x0a: return "\\n"; case 0x09: return "\\t"; case 0x0d: return "\\r"; } return String.fromCharCode(s); } return s.replace(/\n/g, "\\n") .replace(/\t/g, "\\t") .replace(/\r/g, "\\r"); } public getCharErrorDisplay(c: number): string { let s: string = this.getErrorDisplay(c); return "'" + s + "'"; } /** Lexers can normally match any char in it's vocabulary after matching * a token, so do the easy thing and just kill a character and hope * it all works out. You can instead use the rule invocation stack * to do sophisticated error recovery if you are in a fragment rule. */ public recover(re: RecognitionException): void; public recover(re: LexerNoViableAltException): void; public recover(re: RecognitionException): void { if (re instanceof LexerNoViableAltException) { if (this._input.LA(1) !== IntStream.EOF) { // skip a char and try again this.interpreter.consume(this._input); } } else { //System.out.println("consuming char "+(char)input.LA(1)+" during recovery"); //re.printStackTrace(); // TODO: Do we lose character or line position information? this._input.consume(); } } }
the_stack
import Taro, { Component, Config } from "@tarojs/taro"; import { View, Text, Image, OpenData, Button } from "@tarojs/components"; import "./index.less"; import { userInfo } from "../orderDetails/service"; import { login } from "../home/service"; // import { myCoupons } from "../../packageA/pages/myCoupons/service"; import { freightPrice, myCoupons } from "./service"; import { AtModal, AtModalHeader, AtModalContent, AtModalAction, AtIcon } from "taro-ui"; interface IState { authorization: boolean; query: any; aboutQuery: { data?: any; }; visible: boolean; couponsTotal?: any; } export default class Mine extends Component<null, IState> { config: Config = { navigationBarTitleText: "个人中心", // 状态栏自定义 navigationStyle: "custom", // 状态栏颜色白色 navigationBarTextStyle: "white", }; state = { authorization: false, query: null, aboutQuery: { data: null }, visible: false, couponsTotal: null, }; async componentWillMount() { const token = Taro.getStorageSync('accessToken'); console.log('token', token); if (token) { const { data } = await userInfo(); console.log('data', data); this.setState({ query: data.userInfo }); this.setState({ visible: false }); } const aboutResult = await freightPrice(); this.setState({ aboutQuery: aboutResult }); const { query } = this.state; if (query === null) { Taro.showToast({ title: '请先登录!', icon: 'none' }); } const couponsLen = await myCoupons('useable', 1, 1); const { pagination } = couponsLen.data.coupons; this.setState({ couponsTotal: pagination }) } // 绑定手机号 handleBind() { Taro.navigateTo({ url: "../bindPhoneNum/index" }); } // 跳转指定订单 handleOrder1(id) { Taro.navigateTo({ url: `../theorder/index?id=${id}` }); } //跳转全部订单 handleOrderMore() { Taro.navigateTo({ url: `../theorder/index?id=0` }); } // 授权 async onGetUserInfo(e) { const imageUrl = e.detail.userInfo.avatarUrl; const nickname = e.detail.userInfo.nickName; const res = await Taro.login(); const code = res.code; const result = await login(code, imageUrl, nickname); const token = result.data.accessToken; // //将获取到的 token 存入缓存 Taro.setStorage({ key: "accessToken", data: token }); Taro.showTabBar({}); Taro.showShareMenu({ withShareTicket: true }); const setAuthorization = () => { this.setState({ visible: true }); }; //地理位置授权 await new Promise(resolve => { Taro.getLocation({ type: "wgs84", success(res) { resolve(res); } }); }); // 检查是否授权了 Taro.getSetting({ success(res) { if (res.authSetting["scope.userInfo"]) { // 已经授权,可以直接调用 getUserInfo 获取头像昵称 Taro.showTabBar({}); } else { setAuthorization(); Taro.hideTabBar({}); } } }); const { data } = await userInfo(); this.setState({ query: data.userInfo }); this.setState({ visible: false }); if (!Taro.getStorageSync("storeId")) { Taro.navigateTo({ url: "../nearbystores/index" }); } } // 余额积分奖励优惠券跳转 handleJump(id) { const { query } = this.state; const userId = query.id; switch (id) { case 1: Taro.navigateTo({ url: `../../packageA/pages/balance/index?id=${userId}` }); break; case 2: Taro.navigateTo({ url: "../../packageA/pages/myCoupons/index" }); break; case 3: Taro.navigateTo({ url: `../../packageA/pages/integral/index?id=${userId}` }); break; case 4: Taro.navigateTo({ url: "../collection/index" }); break; default: break; } } //更多服务 handle(id) { switch (id) { case 1: Taro.chooseAddress({ async success() { } }); break; case 4: Taro.navigateTo({ url: `../aboutUs/index` }); break; case 5: Taro.navigateTo({ url: "../agreement/index" }); break; case 6: Taro.navigateTo({ url: "../../packageA/pages/business/index" }); break; } } lookForward() { const { query } = this.state; Taro.checkSession({ success() { if(query.role==="member"){ Taro.navigateTo({ url: '../theMemberCenter/index' }); }else{ Taro.navigateTo({ url: '../../packageA/pages/topup/index' }); } }, fail() { Taro.showToast({ title: "请登录", icon: "none" }); } }) } technicalSupport() { Taro.navigateToMiniProgram({ appId: "wx4a96aca05249ba58" }); } //下拉刷新 async onPullDownRefresh() { const { data } = await userInfo(); this.setState({ query: data.userInfo }); const couponsLen = await myCoupons('useable', 1, 1); const { pagination } = couponsLen.data.coupons; this.setState({ couponsTotal: pagination }) setTimeout(() => { Taro.stopPullDownRefresh(); //停止下拉刷新 }, 1000); } //暂不授权 temporary() { this.setState({ visible: false }); } OpenLogin() { this.setState({ visible: true }) } integralExchange() { Taro.navigateTo({ url: '../../packageA/pages/activityCoupons/index' }); } render() { const { query, visible, couponsTotal } = this.state; console.log('mine query', query); const otherInformation = [ { id: 1, number: `${query ? (query.balance / 100) : (0)}`, name: "余额" }, { id: 2, number: `${couponsTotal ? (couponsTotal.total) : (0)}`, name: "优惠券" }, { id: 3, number: `${query ? (query.point / 100) : (0)}`, name: "积分" }, { id: 4, number: `${query ? (query.follow) : (0)}`, name: "收藏" }, ]; const orders = [ { id: 0, name: '已完成', icon: 'https://mengmao-qingying-files.oss-cn-hangzhou.aliyuncs.com/wc.png' }, { id: 1, name: '已付款', icon: 'https://mengmao-qingying-files.oss-cn-hangzhou.aliyuncs.com/fk.png' }, { id: 2, name: '待取货', icon: 'https://mengmao-qingying-files.oss-cn-hangzhou.aliyuncs.com/qh.png' }, { id: 3, name: '待配送', icon: 'https://mengmao-qingying-files.oss-cn-hangzhou.aliyuncs.com/fh.png' }, ]; const services = [ { id: 1, text: "收货地址" }, { id: 2, text: "在线客服" }, { id: 5, text: "用户协议" }, { id: 6, text: "平台资质" } ]; return ( <View className="index"> <View className='topInformation'> <View className='personalTitle'>个人中心</View> {/* 上部个人信息 */} <View className="userInformation"> <View className="head"> <OpenData type="userAvatarUrl" /> </View> <View className="mine"> {query ? ( <View className="mine"> <OpenData type="userNickName" className="title" /> <Text className="phone" onClick={this.handleBind}> {query.phone ? query.phone : '点击绑定手机'} </Text> </View> ) : ( <Text className="title2" onClick={this.OpenLogin}> 立即登录 </Text> )} </View> <View className="couponOther"> <Button className="integralExchange" onClick={this.integralExchange} >兑换</Button> </View> </View> {/* 余额积分奖励优惠券 */} <View className="ortherInformationBox top_padding_bottom"> {otherInformation.map(item => ( <View className="otherInformation" key={item.id} onClick={this.handleJump.bind(this, item.id)} > <Text className="number">{item.number}</Text> <Text className="name">{item.name}</Text> </View> ))} </View> </View> <View className="huiYuan" onClick={this.lookForward}> <Text className='huiYuanFirst'>Plus会员</Text> <Text className='huiYuanSecond'>【专享】领取会员专享优惠券</Text> <Text className='huiYuanThird'>立即领取 > </Text> </View> {/* 我的订单 */} <View className="myStoreBox"> <View className="myOrdersTopLine"> <Text className="myOrderText">我的订单</Text> <View onClick={this.handleOrderMore}> <Text className="textMore">查看更多</Text> <AtIcon value='chevron-right' size='20' className='rightArrow'></AtIcon> </View> </View> <View className="ortherInformationBox"> {orders.map(order => ( <View className="otherInformation" key={order.id} onClick={this.handleOrder1.bind(this, order.id)} > <Image src={order.icon} className="iconImage" /> <Text className="orderName">{order.name}</Text> </View> ))} </View> </View> {/* 我的服务 */} <View className="myServicesBox"> <View> {services.map(service => ( <View key={service.id} className={ service.id !== 2 ? "one-service" : "one-service-other" } onClick={this.handle.bind(this, service.id)} > <View className="service-left-box"> <Image src={service.img} className={service.id == 1 ? "service-img" : "serviceImg"} /> {service.id !== 2 ? ( <Text className="service-left-box-text"> {service.text} </Text> ) : ( <Button open-type="contact" className="button"> {service.text} </Button> )} </View> <AtIcon value='chevron-right' className='rightArrow'></AtIcon> </View> ))} </View> </View> <Text className="support" onClick={this.technicalSupport}> 萌猫智科提供技术支持 </Text> {visible ? ( <AtModal isOpened={visible} closeOnClickOverlay > <AtModalHeader>您还未登录</AtModalHeader> <AtModalContent> <Text className="tit">请先登录再进行操作</Text> </AtModalContent> <AtModalAction> <Button onClick={this.temporary}>暂不登录</Button> <Button openType="getUserInfo" onGetUserInfo={this.onGetUserInfo}>立即登录</Button> </AtModalAction> </AtModal> ) : null} </View> ); } }
the_stack
import { INumberHash } from '../lookup'; import { LSTMTimeStep } from './lstm-time-step'; import { Matrix } from './matrix'; import { Equation } from './matrix/equation'; import { IRNNStatus } from './rnn'; import { RNNTimeStep, IRNNTimeStepJSON } from './rnn-time-step'; // TODO: break out LSTMTimeStep into its own tests describe('RNNTimeStep', () => { describe('.constructor()', () => { describe('when using options.json', () => { let fromJSONSpy: jest.SpyInstance; beforeEach(() => { fromJSONSpy = jest.spyOn(RNNTimeStep.prototype, 'fromJSON'); }); afterEach(() => { fromJSONSpy.mockRestore(); }); it('calls this.fromJSON with this value', () => { const json: IRNNTimeStepJSON = { type: 'RNNTimeStep', options: { inputSize: 1, inputRange: 1, hiddenLayers: [1], outputSize: 1, decayRate: 1, smoothEps: 1, regc: 1, clipval: 1, maxPredictionLength: 1, }, hiddenLayers: [ { weight: { rows: 1, columns: 1, weights: [1] }, transition: { rows: 1, columns: 1, weights: [1], }, bias: { rows: 1, columns: 1, weights: [1] }, }, ], outputConnector: { rows: 1, columns: 1, weights: [1], }, output: { rows: 1, columns: 1, weights: [1] }, inputLookup: { a: 0 }, inputLookupLength: 1, outputLookup: { a: 0 }, outputLookupLength: 1, }; // eslint-disable-next-line no-new new RNNTimeStep({ json }); expect(fromJSONSpy).toHaveBeenCalledWith(json); }); }); }); describe('.createInputMatrix()', () => { it('throws', () => { expect(() => { new RNNTimeStep().createInputMatrix(); }).toThrow(); }); }); describe('.createOutputMatrix()', () => { it('creates the outputConnector and output for model', () => { const net = new RNNTimeStep({ inputSize: 2, hiddenLayers: [9, 11], outputSize: 5, }); const { outputConnector, output } = net.createOutputMatrices(); expect(outputConnector.rows).toBe(5); expect(outputConnector.columns).toBe(11); expect(output.rows).toBe(5); expect(output.columns).toBe(1); }); }); describe('.bindEquation()', () => { let getEquationSpy: jest.SpyInstance; beforeEach(() => { getEquationSpy = jest.spyOn(RNNTimeStep.prototype, 'getEquation'); }); afterEach(() => { getEquationSpy.mockRestore(); }); it('calls static getEquation method', () => { const net = new RNNTimeStep(); net.initialize(); net.bindEquation(); expect(getEquationSpy).toBeCalled(); }); it('adds equations as expected', () => { const net = new RNNTimeStep({ inputSize: 2, hiddenLayers: [9, 11], outputSize: 5, }); net.initialize(); net.mapModel(); expect(net.model.equations.length).toBe(0); net.bindEquation(); expect(net.model.equations.length).toBe(1); net.bindEquation(); expect(net.model.equations.length).toBe(2); net.bindEquation(); expect(net.model.equations.length).toBe(3); }); }); describe('.mapModel()', () => { describe('when .createHiddenLayers() does not provide model.hiddenLayers', () => { it('throws', () => { const net = new RNNTimeStep({ hiddenLayers: [] }); expect(() => { net.mapModel(); }).not.toThrow(); }); }); it('maps models to model.allMatrices', () => { const net = new RNNTimeStep(); const model = net.mapModel(); expect(model.allMatrices.length).toBe(5); }); }); describe('.backpropagate()', () => { let equationsBackpropagateSpy: jest.SpyInstance; beforeEach(() => { equationsBackpropagateSpy = jest.spyOn( Equation.prototype, 'backpropagate' ); }); afterEach(() => { equationsBackpropagateSpy.mockRestore(); }); it('steps through model.equations in reverse, calling model.equations[index].backpropagate', () => { const net = new RNNTimeStep(); for (let i = 0; i < 3; i++) { const equation = new Equation(); equation.add(new Matrix(1, 1), new Matrix(1, 1)); net.model.equations.push(equation); } net.backpropagate(); expect(equationsBackpropagateSpy).toHaveBeenCalledTimes(3); }); }); describe('.run()', () => { describe('when called with unknown data shape', () => { it('throws', () => { const net = new RNNTimeStep({ inputSize: 1, outputSize: 1 }); net.initialize(); net.train([[1, 2]], { iterations: 1 }); expect(() => { net.run({ one: 1, two: 2 }); }).toThrow(); }); }); describe('when called with array,number data shape', () => { let runArraySpy: jest.SpyInstance; beforeEach(() => { runArraySpy = jest.spyOn(RNNTimeStep.prototype, 'runArray'); }); afterEach(() => { runArraySpy.mockRestore(); }); it('calls this.runArray() and returns value from there', () => { const net = new RNNTimeStep({ inputSize: 1, outputSize: 1 }); net.initialize(); net.train([[1, 2]], { iterations: 1 }); const result = net.run([1, 2]); expect(result).toBeGreaterThan(0); expect(runArraySpy).toHaveBeenCalledWith([1, 2]); }); }); describe('when called with array,array,number data shape', () => { let runArrayOfArraySpy: jest.SpyInstance; beforeEach(() => { runArrayOfArraySpy = jest.spyOn( RNNTimeStep.prototype, 'runArrayOfArray' ); }); afterEach(() => { runArrayOfArraySpy.mockRestore(); }); it('calls this.runArrayOfArray()', () => { const net = new RNNTimeStep({ inputSize: 4, outputSize: 4 }); net.initialize(); const item1 = [ [1, 2, 3, 4], [4, 3, 2, 1], ]; const item2 = [ [4, 3, 2, 1], [1, 2, 3, 4], ]; net.train([item1, item2], { iterations: 1 }); net.run(item1); expect(runArrayOfArraySpy).toHaveBeenCalledWith(item1); }); }); describe('when called with array,object,number data shape', () => { let runArrayOfObjectSpy: jest.SpyInstance; beforeEach(() => { runArrayOfObjectSpy = jest.spyOn( RNNTimeStep.prototype, 'runArrayOfObject' ); }); afterEach(() => { runArrayOfObjectSpy.mockRestore(); }); it('calls this.runArrayOfArray()', () => { const net = new RNNTimeStep({ inputSize: 4, outputSize: 4 }); net.initialize(); const oneToFour = { low: 1, high: 2, mid: 3, total: 4 }; const fourToOne = { low: 4, high: 3, mid: 2, total: 1 }; const item1 = [oneToFour, fourToOne]; const item2 = [fourToOne, oneToFour]; net.train([item1, item2], { iterations: 1 }); net.run(item1); expect(runArrayOfObjectSpy).toHaveBeenCalledWith(item1); }); }); }); describe('.runArrayOfArray()', () => { describe('when network is not runnable', () => { it('throws', () => { expect(() => { const net = new RNNTimeStep(); net.runArrayOfArray([Float32Array.from([1])]); }).toThrow(); }); }); describe('when network is runnable', () => { let runInputSpy: jest.SpyInstance; beforeEach(() => { runInputSpy = jest.spyOn(Equation.prototype, 'runInput'); }); afterEach(() => { runInputSpy.mockRestore(); }); it('sets up equations for length of input plus 1 for internal of 0', () => { const net = new RNNTimeStep({ inputSize: 2, hiddenLayers: [2], outputSize: 2, }); net.initialize(); net.bindEquation(); expect(net.model.equations.length).toBe(1); net.runArrayOfArray([ Float32Array.from([1, 3]), Float32Array.from([2, 2]), Float32Array.from([3, 1]), ]); expect(net.model.equations.length).toBe(4); }); it('sets calls equation.runInput() with value in array for each input plus 1 for 0 (to end) output', () => { const net = new RNNTimeStep({ inputSize: 2, hiddenLayers: [2], outputSize: 2, }); net.initialize(); net.bindEquation(); net.runArrayOfArray( [ [1, 3], [2, 2], [3, 1], ].map((v) => Float32Array.from(v)) ); expect(runInputSpy.mock.instances.length).toBe(4); expect(runInputSpy.mock.calls.length).toBe(4); expect(runInputSpy.mock.calls[0][0]).toEqual(Float32Array.from([1, 3])); expect(runInputSpy.mock.calls[1][0]).toEqual(Float32Array.from([2, 2])); expect(runInputSpy.mock.calls[2][0]).toEqual(Float32Array.from([3, 1])); expect(runInputSpy.mock.calls[3][0]).toEqual(Float32Array.from([0, 0])); }); it('sets calls this.end() after calls equations.runInput', () => { const net = new RNNTimeStep({ inputSize: 2, hiddenLayers: [2], outputSize: 2, }); const stub = (net.end = jest.fn()); net.initialize(); net.bindEquation(); net.runArrayOfArray([ Float32Array.from([1, 3]), Float32Array.from([2, 2]), Float32Array.from([3, 1]), ]); expect(stub).toBeCalled(); }); }); }); describe('.train()', () => { it('throws on array,datum,array w/ inputSize of 2', () => { const data = [{ input: [1, 2], output: [3, 4] }]; const net = new RNNTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 1, }); expect(() => { net.train(data); }).toThrow('manually set inputSize and outputSize mismatch'); }); it('throws on array,datum,array w/ outputSize of 2', () => { const data = [{ input: [1, 2], output: [3, 4] }]; const net = new RNNTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 2, }); expect(() => { net.train(data); }).toThrow('manually set inputSize and outputSize mismatch'); }); it('throws on array,datum,object w/ inputSize of 2', () => { const data = [{ input: { a: 1, b: 2 }, output: { c: 3, d: 4 } }]; const net = new RNNTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); expect(() => { net.train(data); }).toThrow('inputSize must be 1 for this data size'); }); describe('automatically setting options.inputSize and options.outputSize', () => { describe('array', () => { it('will set inputSize & outputSize if from data', () => { const data = [[0.1, 0.2, 0.3, 0.4, 0.5]]; const options = { iterations: 0, }; const net = new RNNTimeStep(); net.train(data, options); expect(net.options.inputSize).toBe(1); expect(net.options.outputSize).toBe(1); }); }); describe('array of array', () => { it('will set inputSize & outputSize if from data', () => { const data = [ [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], [0.5, 0.1], ], ]; const options = { iterations: 1, }; const net = new RNNTimeStep(); net.train(data, options); expect(net.options.inputSize).toBe(2); expect(net.options.outputSize).toBe(2); }); }); describe('array of object in single long array', () => { it('will set inputSize & outputSize if from data', () => { const data = [{ low: 0.1, med: 0.25, high: 0.5 }]; const options = { iterations: 1, }; const net = new RNNTimeStep(); net.train(data, options); expect(net.options.inputSize).toBe(1); expect(net.options.outputSize).toBe(1); }); }); describe('array of object in multiple array', () => { it('will set inputSize & outputSize if from data', () => { const data = [ [ { low: 0.1, med: 0.25, high: 0.5 }, { low: 0.5, med: 0.25, high: 0.1 }, ], ]; const options = { iterations: 1, }; const net = new RNNTimeStep(); net.train(data, options); expect(net.options.inputSize).toBe(3); expect(net.options.outputSize).toBe(3); }); }); describe('input/output numbers', () => { it('will set inputSize & outputSize if from data', () => { const data = [{ input: [0.1, 0.2, 0.3, 0.4], output: [0.5] }]; const options = { iterations: 1, }; const net = new RNNTimeStep(); net.train(data, options); expect(net.options.inputSize).toBe(1); expect(net.options.outputSize).toBe(1); }); }); describe('input/output arrays', () => { it('will set inputSize & outputSize if from data', () => { const data = [ { input: [[0.1, 0.5]], output: [[0.5, 0.1]], }, ]; const options = { iterations: 1, }; const net = new RNNTimeStep(); net.train(data, options); expect(net.options.inputSize).toBe(2); expect(net.options.outputSize).toBe(2); }); }); describe('input/output object', () => { it('will set inputSize & outputSize if from data', () => { const data = [ { input: { low: 0.1, high: 0.5 }, output: { low: 0.5, high: 0.1 }, }, ]; const options = { iterations: 1, }; const net = new RNNTimeStep(); net.train(data, options); expect(net.options.inputSize).toBe(1); expect(net.options.outputSize).toBe(1); }); }); describe('datum', () => { it('will set inputSize & outputSize if from data', () => { const data = [ { input: [{ low: 0.1, high: 0.5 }], output: [{ low: 0.5, high: 0.1 }], }, ]; const options = { iterations: 1, }; const net = new RNNTimeStep(); net.train(data, options); expect(net.options.inputSize).toBe(2); expect(net.options.outputSize).toBe(2); }); }); it('will not set inputSize & outputSize if already set larger than 1', () => { const net = new RNNTimeStep({ inputSize: 99, outputSize: 88 }); net.initialize = () => { throw new Error('got passed size check'); }; expect(() => { net.train([ [0, 1, 2, 3, 4], [4, 3, 2, 1, 0], ]); }).toThrow(); expect(net.options.inputSize).toBe(99); expect(net.options.outputSize).toBe(88); }); }); describe('calling using arrays', () => { describe('training data with 1D arrays', () => { describe('end to end', () => { let trainArraysSpy: jest.SpyInstance; let predictTargetSpy: jest.SpyInstance; beforeEach(() => { trainArraysSpy = jest.spyOn( RNNTimeStep.prototype, 'trainArrayOfArray' ); predictTargetSpy = jest.spyOn(Equation.prototype, 'predictTarget'); }); afterEach(() => { trainArraysSpy.mockRestore(); predictTargetSpy.mockRestore(); }); it('uses .runInputNumbers with correct arguments', () => { const net = new RNNTimeStep({ inputSize: 1, hiddenLayers: [1], outputSize: 1, }); const trainingData = [ [0.1, 0.2, 0.3, 0.4, 0.5], [0.5, 0.4, 0.3, 0.2, 0.1], ]; net.train(trainingData, { iterations: 1 }); expect(trainArraysSpy.mock.calls.length).toBe(2); expect(trainArraysSpy.mock.calls[0].length).toBe(1); expect(trainArraysSpy.mock.calls[0][0]).toEqual( trainingData[0].map((value) => Float32Array.from([value])) ); expect(trainArraysSpy.mock.calls[1][0]).toEqual( trainingData[1].map((value) => Float32Array.from([value])) ); expect(predictTargetSpy.mock.calls.length).toBe(8); expect(net.model.equations.length).toBe(5); // first array expect(predictTargetSpy.mock.calls[0][0]).toEqual( Float32Array.from([0.1]) ); expect(predictTargetSpy.mock.calls[0][1]).toEqual( Float32Array.from([0.2]) ); expect(predictTargetSpy.mock.calls[1][0]).toEqual( Float32Array.from([0.2]) ); expect(predictTargetSpy.mock.calls[1][1]).toEqual( Float32Array.from([0.3]) ); expect(predictTargetSpy.mock.calls[2][0]).toEqual( Float32Array.from([0.3]) ); expect(predictTargetSpy.mock.calls[2][1]).toEqual( Float32Array.from([0.4]) ); expect(predictTargetSpy.mock.calls[3][0]).toEqual( Float32Array.from([0.4]) ); expect(predictTargetSpy.mock.calls[3][1]).toEqual( Float32Array.from([0.5]) ); // second array expect(predictTargetSpy.mock.calls[4][0]).toEqual( Float32Array.from([0.5]) ); expect(predictTargetSpy.mock.calls[4][1]).toEqual( Float32Array.from([0.4]) ); expect(predictTargetSpy.mock.calls[5][0]).toEqual( Float32Array.from([0.4]) ); expect(predictTargetSpy.mock.calls[5][1]).toEqual( Float32Array.from([0.3]) ); expect(predictTargetSpy.mock.calls[6][0]).toEqual( Float32Array.from([0.3]) ); expect(predictTargetSpy.mock.calls[6][1]).toEqual( Float32Array.from([0.2]) ); expect(predictTargetSpy.mock.calls[7][0]).toEqual( Float32Array.from([0.2]) ); expect(predictTargetSpy.mock.calls[7][1]).toEqual( Float32Array.from([0.1]) ); }); }); it('can learn basic logic', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); const trainingData = [ [0.1, 0.2, 0.3, 0.4, 0.5], [0.5, 0.4, 0.3, 0.2, 0.1], ]; const result = net.train(trainingData, { errorThresh: 0.005, iterations: 1000, }); expect(result.error).toBeLessThan(0.005); expect(result.iterations).toBeLessThan(1000); const result1 = net.forecast([0.1, 0.2, 0.3], 2); expect(result1[0]).toBeCloseTo(0.4, 1); expect(result1[1]).toBeCloseTo(0.5, 1); const result2 = net.forecast([0.5, 0.4, 0.3], 2); expect(result2[0]).toBeCloseTo(0.2, 1); expect(result2[1]).toBeCloseTo(0.1, 1); }); }); describe('training data with 2D arrays', () => { let trainArraysSpy: jest.SpyInstance; let predictTargetSpy: jest.SpyInstance; beforeEach(() => { trainArraysSpy = jest.spyOn( RNNTimeStep.prototype, 'trainArrayOfArray' ); predictTargetSpy = jest.spyOn(Equation.prototype, 'predictTarget'); }); afterEach(() => { trainArraysSpy.mockRestore(); predictTargetSpy.mockRestore(); }); it('uses .trainArrays with correct arguments', () => { const net = new RNNTimeStep({ inputSize: 2, hiddenLayers: [1], outputSize: 2, }); const trainingData = [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], [0.5, 0.1], ]; const trainingDataFormatted = trainingData.map((array) => Float32Array.from(array) ); net.train(trainingData, { iterations: 1 }); expect(trainArraysSpy.mock.calls.length).toBe(1); expect(trainArraysSpy.mock.calls[0].length).toBe(1); expect(trainArraysSpy.mock.calls[0][0]).toEqual( trainingDataFormatted ); expect(predictTargetSpy.mock.calls.length).toBe(4); expect(net.model.equations.length).toBe(5); // first array expect(predictTargetSpy.mock.calls[0][0]).toEqual( Float32Array.from([0.1, 0.5]) ); expect(predictTargetSpy.mock.calls[0][1]).toEqual( Float32Array.from([0.2, 0.4]) ); // second array expect(predictTargetSpy.mock.calls[1][0]).toEqual( Float32Array.from([0.2, 0.4]) ); expect(predictTargetSpy.mock.calls[1][1]).toEqual( Float32Array.from([0.3, 0.3]) ); // third array expect(predictTargetSpy.mock.calls[2][0]).toEqual( Float32Array.from([0.3, 0.3]) ); expect(predictTargetSpy.mock.calls[2][1]).toEqual( Float32Array.from([0.4, 0.2]) ); // forth array expect(predictTargetSpy.mock.calls[3][0]).toEqual( Float32Array.from([0.4, 0.2]) ); expect(predictTargetSpy.mock.calls[3][1]).toEqual( Float32Array.from([0.5, 0.1]) ); }); it('can learn basic logic', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [20], outputSize: 2, }); const trainingData = [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], [0.5, 0.1], ]; const result = net.train(trainingData, { errorThresh: 0.05 }); expect(result.error).toBeLessThan(0.05); expect(result.iterations).toBeLessThan(4000); }); }); describe('training data with 3D arrays', () => { let trainArraysSpy: jest.SpyInstance; let predictTargetSpy: jest.SpyInstance; beforeEach(() => { trainArraysSpy = jest.spyOn( RNNTimeStep.prototype, 'trainArrayOfArray' ); predictTargetSpy = jest.spyOn(Equation.prototype, 'predictTarget'); }); afterEach(() => { trainArraysSpy.mockRestore(); predictTargetSpy.mockRestore(); }); it('uses .trainArrays with correct arguments', () => { const net = new RNNTimeStep({ inputSize: 2, hiddenLayers: [1], outputSize: 2, }); const trainingData = [ [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], [0.5, 0.1], ], [ [0.5, 0.9], [0.6, 0.8], [0.7, 0.7], [0.8, 0.6], [0.9, 0.5], ], ]; const trainingDataFormatted0 = trainingData[0].map((array) => Float32Array.from(array) ); const trainingDataFormatted1 = trainingData[1].map((array) => Float32Array.from(array) ); net.train(trainingData, { iterations: 1 }); expect(trainArraysSpy.mock.calls.length).toBe(2); expect(trainArraysSpy.mock.calls[0].length).toBe(1); expect(trainArraysSpy.mock.calls[0][0]).toEqual( trainingDataFormatted0 ); expect(trainArraysSpy.mock.calls[1][0]).toEqual( trainingDataFormatted1 ); expect(predictTargetSpy.mock.calls.length).toBe(8); expect(net.model.equations.length).toBe(5); // first set, first array expect(predictTargetSpy.mock.calls[0][0]).toEqual( Float32Array.from([0.1, 0.5]) ); expect(predictTargetSpy.mock.calls[0][1]).toEqual( Float32Array.from([0.2, 0.4]) ); // first set, second array expect(predictTargetSpy.mock.calls[1][0]).toEqual( Float32Array.from([0.2, 0.4]) ); expect(predictTargetSpy.mock.calls[1][1]).toEqual( Float32Array.from([0.3, 0.3]) ); // first set, third array expect(predictTargetSpy.mock.calls[2][0]).toEqual( Float32Array.from([0.3, 0.3]) ); expect(predictTargetSpy.mock.calls[2][1]).toEqual( Float32Array.from([0.4, 0.2]) ); // first set, forth array expect(predictTargetSpy.mock.calls[3][0]).toEqual( Float32Array.from([0.4, 0.2]) ); expect(predictTargetSpy.mock.calls[3][1]).toEqual( Float32Array.from([0.5, 0.1]) ); // second set, first array expect(predictTargetSpy.mock.calls[4][0]).toEqual( Float32Array.from([0.5, 0.9]) ); expect(predictTargetSpy.mock.calls[4][1]).toEqual( Float32Array.from([0.6, 0.8]) ); // second set, second array expect(predictTargetSpy.mock.calls[5][0]).toEqual( Float32Array.from([0.6, 0.8]) ); expect(predictTargetSpy.mock.calls[5][1]).toEqual( Float32Array.from([0.7, 0.7]) ); // second set, third array expect(predictTargetSpy.mock.calls[6][0]).toEqual( Float32Array.from([0.7, 0.7]) ); expect(predictTargetSpy.mock.calls[6][1]).toEqual( Float32Array.from([0.8, 0.6]) ); // second set, forth array expect(predictTargetSpy.mock.calls[7][0]).toEqual( Float32Array.from([0.8, 0.6]) ); expect(predictTargetSpy.mock.calls[7][1]).toEqual( Float32Array.from([0.9, 0.5]) ); }); it('can learn basic logic', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [30], outputSize: 2, }); const trainingData = [ [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], [0.5, 0.1], ], [ [0.5, 0.9], [0.6, 0.8], [0.7, 0.7], [0.8, 0.6], [0.9, 0.5], ], ]; const result = net.train(trainingData, { errorThresh: 0.05 }); expect(result.error).toBeLessThan(0.05); expect(result.iterations).toBeLessThan(4000); }); }); }); describe('calling using training datum', () => { describe('training data with objects', () => { let trainArraysSpy: jest.SpyInstance; let predictTargetSpy: jest.SpyInstance; beforeEach(() => { trainArraysSpy = jest.spyOn( RNNTimeStep.prototype, 'trainArrayOfArray' ); predictTargetSpy = jest.spyOn(Equation.prototype, 'predictTarget'); }); afterEach(() => { trainArraysSpy.mockRestore(); predictTargetSpy.mockRestore(); }); it('uses .runInputOutput with correct arguments', () => { const net = new RNNTimeStep({ inputSize: 1, hiddenLayers: [1], outputSize: 1, }); // average temp const trainingData = [ // Washington DC { input: { jan: 42, feb: 44, mar: 53, apr: 64, }, output: { may: 75, jun: 83, }, }, // Bluff Utah { input: { jan: 44, feb: 52, mar: 63, apr: 72, }, output: { may: 82, jun: 92, }, }, ]; net.train(trainingData, { iterations: 1 }); expect(trainArraysSpy.mock.calls.length).toBe(2); expect(trainArraysSpy.mock.calls[0].length).toBe(1); expect(trainArraysSpy.mock.calls[0][0]).toEqual( [42, 44, 53, 64, 75, 83].map((v: number) => Float32Array.from([v])) ); expect(trainArraysSpy.mock.calls[1][0]).toEqual( [44, 52, 63, 72, 82, 92].map((v: number) => Float32Array.from([v])) ); expect(predictTargetSpy.mock.calls.length).toBe(10); expect(net.model.equations.length).toBe(6); // first array expect(predictTargetSpy.mock.calls[0][0]).toEqual( new Float32Array([42]) ); expect(predictTargetSpy.mock.calls[0][1]).toEqual( new Float32Array([44]) ); expect(predictTargetSpy.mock.calls[1][0]).toEqual( new Float32Array([44]) ); expect(predictTargetSpy.mock.calls[1][1]).toEqual( new Float32Array([53]) ); expect(predictTargetSpy.mock.calls[2][0]).toEqual( new Float32Array([53]) ); expect(predictTargetSpy.mock.calls[2][1]).toEqual( new Float32Array([64]) ); expect(predictTargetSpy.mock.calls[3][0]).toEqual( new Float32Array([64]) ); expect(predictTargetSpy.mock.calls[3][1]).toEqual( new Float32Array([75]) ); expect(predictTargetSpy.mock.calls[4][0]).toEqual( new Float32Array([75]) ); expect(predictTargetSpy.mock.calls[4][1]).toEqual( new Float32Array([83]) ); // second array expect(predictTargetSpy.mock.calls[5][0]).toEqual( new Float32Array([44]) ); expect(predictTargetSpy.mock.calls[5][1]).toEqual( new Float32Array([52]) ); expect(predictTargetSpy.mock.calls[6][0]).toEqual( new Float32Array([52]) ); expect(predictTargetSpy.mock.calls[6][1]).toEqual( new Float32Array([63]) ); expect(predictTargetSpy.mock.calls[7][0]).toEqual( new Float32Array([63]) ); expect(predictTargetSpy.mock.calls[7][1]).toEqual( new Float32Array([72]) ); expect(predictTargetSpy.mock.calls[8][0]).toEqual( new Float32Array([72]) ); expect(predictTargetSpy.mock.calls[8][1]).toEqual( new Float32Array([82]) ); expect(predictTargetSpy.mock.calls[9][0]).toEqual( new Float32Array([82]) ); expect(predictTargetSpy.mock.calls[9][1]).toEqual( new Float32Array([92]) ); }); }); describe('training data with 1D arrays', () => { let trainArraysSpy: jest.SpyInstance; let predictTargetSpy: jest.SpyInstance; beforeEach(() => { trainArraysSpy = jest.spyOn( RNNTimeStep.prototype, 'trainArrayOfArray' ); predictTargetSpy = jest.spyOn(Equation.prototype, 'predictTarget'); }); afterEach(() => { trainArraysSpy.mockRestore(); predictTargetSpy.mockRestore(); }); it('uses .runInputOutput with correct arguments', () => { const net = new RNNTimeStep({ inputSize: 1, hiddenLayers: [1], outputSize: 1, }); const trainingData = [ { input: [1, 2, 3, 4], output: [5] }, { input: [5, 4, 3, 2], output: [1] }, ]; const trainingDataFormatted0 = [1, 2, 3, 4, 5].map((v: number) => Float32Array.from([v]) ); const trainingDataFormatted1 = [5, 4, 3, 2, 1].map((v: number) => Float32Array.from([v]) ); net.train(trainingData, { iterations: 1 }); expect(trainArraysSpy.mock.calls.length).toBe(2); expect(trainArraysSpy.mock.calls[0].length).toBe(1); expect(trainArraysSpy.mock.calls[0][0]).toEqual( trainingDataFormatted0 ); expect(trainArraysSpy.mock.calls[1][0]).toEqual( trainingDataFormatted1 ); expect(predictTargetSpy.mock.calls.length).toBe(8); expect(net.model.equations.length).toBe(5); // first array expect(predictTargetSpy.mock.calls[0][0]).toEqual( Float32Array.from([1]) ); expect(predictTargetSpy.mock.calls[0][1]).toEqual( Float32Array.from([2]) ); expect(predictTargetSpy.mock.calls[1][0]).toEqual( Float32Array.from([2]) ); expect(predictTargetSpy.mock.calls[1][1]).toEqual( Float32Array.from([3]) ); expect(predictTargetSpy.mock.calls[2][0]).toEqual( Float32Array.from([3]) ); expect(predictTargetSpy.mock.calls[2][1]).toEqual( Float32Array.from([4]) ); expect(predictTargetSpy.mock.calls[3][0]).toEqual( Float32Array.from([4]) ); expect(predictTargetSpy.mock.calls[3][1]).toEqual( Float32Array.from([5]) ); // second array expect(predictTargetSpy.mock.calls[4][0]).toEqual( Float32Array.from([5]) ); expect(predictTargetSpy.mock.calls[4][1]).toEqual( Float32Array.from([4]) ); expect(predictTargetSpy.mock.calls[5][0]).toEqual( Float32Array.from([4]) ); expect(predictTargetSpy.mock.calls[5][1]).toEqual( Float32Array.from([3]) ); expect(predictTargetSpy.mock.calls[6][0]).toEqual( Float32Array.from([3]) ); expect(predictTargetSpy.mock.calls[6][1]).toEqual( Float32Array.from([2]) ); expect(predictTargetSpy.mock.calls[7][0]).toEqual( Float32Array.from([2]) ); expect(predictTargetSpy.mock.calls[7][1]).toEqual( Float32Array.from([1]) ); }); }); describe('training data with 2D arrays', () => { let trainArraysSpy: jest.SpyInstance; let predictTargetSpy: jest.SpyInstance; beforeEach(() => { trainArraysSpy = jest.spyOn( RNNTimeStep.prototype, 'trainArrayOfArray' ); predictTargetSpy = jest.spyOn(Equation.prototype, 'predictTarget'); }); afterEach(() => { trainArraysSpy.mockRestore(); predictTargetSpy.mockRestore(); }); it('uses .runInputOutputArray with correct arguments', () => { const net = new RNNTimeStep({ inputSize: 2, hiddenLayers: [1], outputSize: 2, }); const trainingData = [ { input: [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], ], output: [[0.5, 0.1]], }, { input: [ [0.5, 0.9], [0.6, 0.8], [0.7, 0.7], [0.8, 0.6], ], output: [[0.9, 0.5]], }, ]; const trainingDataFormatted0 = [ ...trainingData[0].input.map((value) => Float32Array.from(value)), ...trainingData[0].output.map((value) => Float32Array.from(value)), ]; const trainingDataFormatted1 = [ ...trainingData[1].input.map((value) => Float32Array.from(value)), ...trainingData[1].output.map((value) => Float32Array.from(value)), ]; net.train(trainingData, { iterations: 1 }); expect(trainArraysSpy.mock.calls.length).toBe(2); expect(trainArraysSpy.mock.calls[0].length).toBe(1); expect(trainArraysSpy.mock.calls[0][0]).toEqual( trainingDataFormatted0 ); expect(trainArraysSpy.mock.calls[1][0]).toEqual( trainingDataFormatted1 ); expect(predictTargetSpy.mock.calls.length).toBe(8); expect(net.model.equations.length).toBe(5); // first set, first array expect(predictTargetSpy.mock.calls[0][0]).toEqual( Float32Array.from([0.1, 0.5]) ); expect(predictTargetSpy.mock.calls[0][1]).toEqual( Float32Array.from([0.2, 0.4]) ); // first set, second array expect(predictTargetSpy.mock.calls[1][0]).toEqual( Float32Array.from([0.2, 0.4]) ); expect(predictTargetSpy.mock.calls[1][1]).toEqual( Float32Array.from([0.3, 0.3]) ); // first set, third array expect(predictTargetSpy.mock.calls[2][0]).toEqual( Float32Array.from([0.3, 0.3]) ); expect(predictTargetSpy.mock.calls[2][1]).toEqual( Float32Array.from([0.4, 0.2]) ); // first set, forth array expect(predictTargetSpy.mock.calls[3][0]).toEqual( Float32Array.from([0.4, 0.2]) ); expect(predictTargetSpy.mock.calls[3][1]).toEqual( Float32Array.from([0.5, 0.1]) ); // second set, first array expect(predictTargetSpy.mock.calls[4][0]).toEqual( Float32Array.from([0.5, 0.9]) ); expect(predictTargetSpy.mock.calls[4][1]).toEqual( Float32Array.from([0.6, 0.8]) ); // second set, second array expect(predictTargetSpy.mock.calls[5][0]).toEqual( Float32Array.from([0.6, 0.8]) ); expect(predictTargetSpy.mock.calls[5][1]).toEqual( Float32Array.from([0.7, 0.7]) ); // second set, third array expect(predictTargetSpy.mock.calls[6][0]).toEqual( Float32Array.from([0.7, 0.7]) ); expect(predictTargetSpy.mock.calls[6][1]).toEqual( Float32Array.from([0.8, 0.6]) ); // second set, forth array expect(predictTargetSpy.mock.calls[7][0]).toEqual( Float32Array.from([0.8, 0.6]) ); expect(predictTargetSpy.mock.calls[7][1]).toEqual( Float32Array.from([0.9, 0.5]) ); }); }); }); describe('prediction using arrays', () => { it('can train and predict linear numeric, single input, 1 to 5, and 5 to 1', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [20, 20], outputSize: 1, }); const trainingData = [ [0.1, 0.2, 0.3, 0.4, 0.5], [0.5, 0.4, 0.3, 0.2, 0.1], ]; const result = net.train(trainingData); expect(result.error).toBeLessThan(0.05); const closeToFive = net.run([0.1, 0.2, 0.3, 0.4]); const closeToOne = net.run([0.5, 0.4, 0.3, 0.2]); expect(closeToOne.toFixed(1)).toBe('0.1'); expect(closeToFive.toFixed(1)).toBe('0.5'); }); it('can train and predict single linear array, two input, 1 to 5, and 5 to 1', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [20], outputSize: 2, }); // Same test as previous, but combined on a single set const trainingData = [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], [0.5, 0.1], ]; const result = net.train(trainingData, { errorThresh: 0.01, }); expect(result.error).toBeLessThan(0.01); const closeToFiveAndOne = net.run([ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], ]); expect(closeToFiveAndOne[0].toFixed(1)).toBe('0.5'); expect(closeToFiveAndOne[1].toFixed(1)).toBe('0.1'); }); it('can train and predict multiple linear array, two input, 1 to 5, 5 to 1, 5 to 9, and 9 to 5', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [40], outputSize: 2, }); // Same test as previous, but combined on a single set const trainingData = [ [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], [0.5, 0.1], ], [ [0.5, 0.9], [0.6, 0.8], [0.7, 0.7], [0.8, 0.6], [0.9, 0.5], ], ]; const result = net.train(trainingData); expect(result.error).toBeLessThan(0.05); const closeToFiveAndOne = net.run([ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], ]); expect(closeToFiveAndOne[0].toFixed(1)).toBe('0.5'); expect(closeToFiveAndOne[1].toFixed(1)).toBe('0.1'); const closeToNineAndFive = net.run([ [0.5, 0.9], [0.6, 0.8], [0.7, 0.7], [0.8, 0.6], ]); expect(closeToNineAndFive[0].toFixed(1)).toBe('0.9'); expect(closeToNineAndFive[1].toFixed(1)).toBe('0.5'); }); }); describe('prediction using input/output', () => { describe('with objects', () => { it('can train and predict input/output linear array avg weather data', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [5], outputSize: 1, }); // average temp const trainingData = [ // Washington DC { input: { jan: 0.42, feb: 0.44, mar: 0.53, apr: 0.64, }, output: { may: 0.75, jun: 0.83, }, }, // Bluff Utah { input: { jan: 0.44, feb: 0.52, mar: 0.63, apr: 0.72, }, output: { may: 0.82, jun: 0.92, }, }, ]; const result = net.train(trainingData); const washington = net.runObject({ jan: 0.42, feb: 0.44, mar: 0.53, apr: 0.64, }); const bluff = net.runObject({ jan: 0.44, feb: 0.52, mar: 0.63, apr: 0.72, }); expect(result.error).toBeLessThan(0.05); expect(washington.may.toFixed(2).indexOf('0.7')).toBeGreaterThan(-1); expect(washington.jun.toFixed(2).indexOf('0.8')).toBeGreaterThan(-1); expect(bluff.may.toFixed(2).indexOf('0.8')).toBeGreaterThan(-1); expect(bluff.jun.toFixed(2).indexOf('0.9')).toBeGreaterThan(-1); }); }); describe('with arrays', () => { it('can use inputs(4) and output(1)', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [20, 20], outputSize: 1, }); // Same test as previous, but combined on a single set const trainingData = [ { input: [0.1, 0.2, 0.3, 0.4], output: [0.5], }, { input: [0.5, 0.4, 0.3, 0.2], output: [0.1], }, ]; const result = net.train(trainingData); expect(result.error).toBeLessThan(0.09); const closeToFive = net.run([0.1, 0.2, 0.3, 0.4]); const closeToOne = net.run([0.5, 0.4, 0.3, 0.2]); expect(closeToFive.toFixed(1)).toBe('0.5'); expect(closeToOne.toFixed(1)).toBe('0.1'); }); it('can train and predict using array of input and output, two input, 1 to 5, and 5 to 1', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [20], outputSize: 2, }); // Same test as previous, but combined on a single set const trainingData = [ { input: [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], ], output: [[0.5, 0.1]], }, ]; const result = net.train(trainingData, { errorThresh: 0.01 }); expect(result.error).toBeLessThan(0.01); const closeToFiveAndOne = net.run([ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], ]); expect(closeToFiveAndOne[0].toFixed(1)).toBe('0.5'); expect(closeToFiveAndOne[1].toFixed(1)).toBe('0.1'); }); }); }); }); describe('.trainArrayOfArray()', () => { describe('when preparing equation length', () => { let bindEquationSpy: jest.SpyInstance; beforeEach(() => { bindEquationSpy = jest.spyOn(RNNTimeStep.prototype, 'bindEquation'); }); afterEach(() => { bindEquationSpy.mockRestore(); }); it('calls .bindEquation() to match the input length', () => { const net = new RNNTimeStep({ inputSize: 1, outputSize: 1 }); net.initialize(); net.trainArrayOfArray([ Float32Array.from([1]), Float32Array.from([1]), Float32Array.from([1]), ]); expect(bindEquationSpy).toHaveBeenCalledTimes(3); }); }); describe('when reading in input', () => { let predictTargetSpy: jest.SpyInstance; beforeEach(() => { predictTargetSpy = jest.spyOn(Equation.prototype, 'predictTarget'); }); afterEach(() => { predictTargetSpy.mockRestore(); }); it('calls .predictTarget() with expected current and next values from input argument', () => { const net = new RNNTimeStep({ inputSize: 1, outputSize: 1 }); net.initialize(); net.trainArrayOfArray([ Float32Array.from([1]), Float32Array.from([2]), Float32Array.from([3]), ]); expect(predictTargetSpy.mock.calls.length).toBe(2); expect(predictTargetSpy.mock.calls[0]).toEqual([ Float32Array.from([1]), Float32Array.from([2]), ]); expect(predictTargetSpy.mock.calls[1]).toEqual([ Float32Array.from([2]), Float32Array.from([3]), ]); }); }); describe('after reading in input', () => { let endSpy: jest.SpyInstance; beforeEach(() => { endSpy = jest.spyOn(RNNTimeStep.prototype, 'end'); }); afterEach(() => { endSpy.mockRestore(); }); it('calls .end()', () => { const net = new RNNTimeStep({ inputSize: 1, outputSize: 1 }); net.initialize(); net.trainArrayOfArray([Float32Array.from([1]), Float32Array.from([1])]); expect(endSpy).toHaveBeenCalledTimes(1); }); }); describe('when given an array of length less than 2', () => { it('throws with descriptive message', () => { const net = new RNNTimeStep({ inputSize: 1, outputSize: 1 }); net.initialize(); expect(() => { net.trainArrayOfArray([Float32Array.from([1])]); }).toThrow('input must be an array of 2 or more'); }); }); it('returns a number that is the error', () => { const net = new RNNTimeStep({ inputSize: 1, outputSize: 1 }); net.initialize(); const error = net.trainArrayOfArray([ Float32Array.from([1]), Float32Array.from([2]), ]); expect(error).toBeGreaterThan(0); }); }); describe('.forecastArray()', () => { it('returns null when this.isRunnable returns false', () => { expect(() => { new RNNTimeStep().forecastArray(Float32Array.from([1])); }).toThrow(); }); it('sets up equations for length of input plus count plus 1 for internal of 0', () => { const net = new RNNTimeStep({ inputSize: 1, hiddenLayers: [1], outputSize: 1, }); net.initialize(); net.bindEquation(); expect(net.model.equations.length).toBe(1); net.forecastArray(Float32Array.from([1, 2, 3]), 2); expect(net.model.equations.length).toBe(6); }); it('sets calls this.end() after calls equations.runInput', () => { const net = new RNNTimeStep({ inputSize: 1, hiddenLayers: [1], outputSize: 1, }); const stub = (net.end = jest.fn()); net.initialize(); net.bindEquation(); net.forecastArray(Float32Array.from([1, 2, 3]), 2); expect(stub).toBeCalled(); }); it('outputs the length of required forecast', () => { const net = new RNNTimeStep({ inputSize: 1, hiddenLayers: [1], outputSize: 1, }); net.initialize(); net.bindEquation(); const result = net.forecastArray(Float32Array.from([1, 2, 3]), 2); expect(result.length).toBe(2); }); it('outputs a flat array of numbers', () => { const net = new RNNTimeStep({ inputSize: 1, hiddenLayers: [1], outputSize: 1, }); net.initialize(); net.bindEquation(); const result = net.forecastArray(Float32Array.from([1, 2, 3]), 2); expect(typeof result[0]).toBe('number'); expect(typeof result[1]).toBe('number'); }); }); describe('.forecastArrayOfArray', () => { it('returns null when this.isRunnable returns false', () => { expect(() => { new RNNTimeStep().forecastArrayOfArray([Float32Array.from([1])]); }).toThrow(); }); it('sets up equations for length of input plus count plus 1 for internal of 0', () => { const net = new RNNTimeStep({ inputSize: 3, hiddenLayers: [1], outputSize: 3, }); net.initialize(); net.bindEquation(); expect(net.model.equations.length).toBe(1); net.forecastArrayOfArray([Float32Array.from([1, 2, 3])], 2); expect(net.model.equations.length).toBe(4); }); it('sets calls this.end() after calls equations.runInput', () => { const net = new RNNTimeStep({ inputSize: 3, hiddenLayers: [1], outputSize: 3, }); const stub = (net.end = jest.fn()); net.initialize(); net.bindEquation(); net.forecastArrayOfArray([Float32Array.from([1, 2, 3])], 2); expect(stub).toBeCalled(); }); it('outputs the length of required forecast', () => { const net = new RNNTimeStep({ inputSize: 3, hiddenLayers: [1], outputSize: 3, }); net.initialize(); net.bindEquation(); const result = net.forecastArrayOfArray( [Float32Array.from([1, 2, 3])], 2 ); expect(result.length).toBe(2); }); it('outputs a nested array of numbers', () => { const net = new RNNTimeStep({ inputSize: 3, hiddenLayers: [1], outputSize: 3, }); net.initialize(); net.bindEquation(); const result = net.forecastArrayOfArray( [Float32Array.from([1, 2, 3])], 2 ); expect(result.length).toBe(2); expect(result[0].length).toBe(3); expect(result[1].length).toBe(3); expect(typeof result[0][0]).toBe('number'); expect(typeof result[0][1]).toBe('number'); expect(typeof result[0][2]).toBe('number'); expect(typeof result[1][0]).toBe('number'); expect(typeof result[1][1]).toBe('number'); expect(typeof result[1][2]).toBe('number'); }); }); describe('.forecastArrayOfObject()', () => { let forecastArrayObjectSpy: jest.SpyInstance; beforeEach(() => { forecastArrayObjectSpy = jest.spyOn( RNNTimeStep.prototype, 'forecastArrayOfObject' ); }); afterEach(() => { forecastArrayObjectSpy.mockRestore(); }); it('maps values correctly', () => { const trainingData = [ [ { low: 0.1, high: 0.9 }, { low: 0.2, high: 0.8 }, { low: 0.3, high: 0.7 }, ], [ { low: 0.9, high: 0.1 }, { low: 0.8, high: 0.2 }, { low: 0.7, high: 0.3 }, ], ]; const net = new RNNTimeStep({ inputSize: 2, outputSize: 2, }); net.train(trainingData, { iterations: 1000 }); const result = net.forecast([{ low: 0.1, high: 0.9 }], 2); expect(result.length).toBe(2); expect(result[0].low).toBeGreaterThan(0); expect(result[0].high).toBeGreaterThan(0); expect(result[1].low).toBeGreaterThan(0); expect(result[1].high).toBeGreaterThan(0); }); }); describe('.forecast()', () => { describe('when called with unrecognized data shape', () => { it('throws', () => { expect(() => { const net = new RNNTimeStep(); net.train([[1, 2, 3]], { iterations: 1 }); // @ts-expect-error need to infer types net.forecast({ one: [1] }, 2); }).toThrow('Unrecognized data shape object,array,number'); }); }); describe('when called with array,number', () => { let forecastArraysSpy: jest.SpyInstance; beforeEach(() => { forecastArraysSpy = jest.spyOn(RNNTimeStep.prototype, 'forecastArray'); }); afterEach(() => { forecastArraysSpy.mockRestore(); }); it('calls this.forecastArray with input and count', () => { const net = new RNNTimeStep(); net.train([[1, 2, 3]], { iterations: 1 }); net.forecast([1], 2); expect(forecastArraysSpy).toBeCalledWith([1], 2); }); }); describe('when called with array,array,number', () => { let forecastArraysOfArraySpy: jest.SpyInstance; beforeEach(() => { forecastArraysOfArraySpy = jest.spyOn( RNNTimeStep.prototype, 'forecastArrayOfArray' ); }); afterEach(() => { forecastArraysOfArraySpy.mockRestore(); }); it('calls this.forecastArrayOfArray with input and count', () => { const net = new RNNTimeStep(); net.train( [ [ [1, 2, 3], [4, 5, 6], [7, 8, 9], ], ], { iterations: 1 } ); net.forecast([[1, 2, 3]], 2); expect(forecastArraysOfArraySpy).toBeCalledWith([[1, 2, 3]], 2); }); }); describe('when called with array,object,number', () => { let forecastArrayOfObjectSpy: jest.SpyInstance; beforeEach(() => { forecastArrayOfObjectSpy = jest.spyOn( RNNTimeStep.prototype, 'forecastArrayOfObject' ); }); afterEach(() => { forecastArrayOfObjectSpy.mockRestore(); }); it('calls this.forecastArrayOfObject with input and count', () => { const net = new RNNTimeStep(); net.train( [ [ { low: 1, high: 2, med: 3 }, { low: 4, high: 5, med: 6 }, { low: 7, high: 8, med: 9 }, ], ], { iterations: 1 } ); net.forecast([{ low: 1, high: 2, med: 3 }], 2); expect(forecastArrayOfObjectSpy).toBeCalledWith( [{ low: 1, high: 2, med: 3 }], 2 ); }); }); }); describe('.formatData()', () => { describe('when called with array,number data shape', () => { let formatArraySpy: jest.SpyInstance; beforeEach(() => { formatArraySpy = jest.spyOn(RNNTimeStep.prototype, 'formatArray'); }); afterEach(() => { formatArraySpy.mockRestore(); }); it('calls this.formatNumber with data', () => { const net = new RNNTimeStep(); const data = [1]; net.formatData(data); expect(formatArraySpy).toHaveBeenCalledWith(data); }); }); describe('when called with array,array,number data shape', () => { let formatArrayOfArraySpy: jest.SpyInstance; beforeEach(() => { formatArrayOfArraySpy = jest.spyOn( RNNTimeStep.prototype, 'formatArrayOfArray' ); }); afterEach(() => { formatArrayOfArraySpy.mockRestore(); }); it('calls this.formatArrayOfArray with data', () => { const net = new RNNTimeStep({ inputSize: 1 }); const data = [[1]]; net.formatData(data); expect(formatArrayOfArraySpy).toHaveBeenCalledWith(data); }); }); describe('when called with array,object,number data shape', () => { describe('when this.inputSize = 1', () => { let formatArrayOfObjectSpy: jest.SpyInstance; beforeEach(() => { formatArrayOfObjectSpy = jest.spyOn( RNNTimeStep.prototype, 'formatArrayOfObject' ); }); afterEach(() => { formatArrayOfObjectSpy.mockRestore(); }); it('calls this.formatArrayOfObject with data', () => { const net = new RNNTimeStep({ inputSize: 1 }); const data = [{ low: 1, high: 2 }]; net.formatData(data); expect(formatArrayOfObjectSpy).toHaveBeenCalledWith(data); }); }); describe('when this.inputSize > 1', () => { let formatArrayOfObjectMultiSpy: jest.SpyInstance; beforeEach(() => { formatArrayOfObjectMultiSpy = jest.spyOn( RNNTimeStep.prototype, 'formatArrayOfObjectMulti' ); }); afterEach(() => { formatArrayOfObjectMultiSpy.mockRestore(); }); it('calls this.formatArrayOfObjectMulti with data', () => { const net = new RNNTimeStep({ inputSize: 2 }); const data = [{ low: 1, high: 2 }]; net.formatData(data); expect(formatArrayOfObjectMultiSpy).toHaveBeenCalledWith(data); }); }); }); describe('when called with array,datum,array,number data shape', () => { let formatArrayOfDatumOfArraySpy: jest.SpyInstance; beforeEach(() => { formatArrayOfDatumOfArraySpy = jest.spyOn( RNNTimeStep.prototype, 'formatArrayOfDatumOfArray' ); }); afterEach(() => { formatArrayOfDatumOfArraySpy.mockRestore(); }); it('calls this.formatArrayOfDatumOfArray with data', () => { const net = new RNNTimeStep(); const data = [ { input: [1, 2], output: [3, 4], }, ]; net.formatData(data); expect(formatArrayOfDatumOfArraySpy).toHaveBeenCalledWith(data); }); }); describe('when called with array,datum,object,number data shape', () => { let formatArrayOfDatumOfObjectSpy: jest.SpyInstance; beforeEach(() => { formatArrayOfDatumOfObjectSpy = jest.spyOn( RNNTimeStep.prototype, 'formatArrayOfDatumOfObject' ); }); afterEach(() => { formatArrayOfDatumOfObjectSpy.mockRestore(); }); it('calls this.formatArrayOfDatumOfArray with data', () => { const net = new RNNTimeStep(); const data = [ { input: { low: 1, high: 2 }, output: { low: 3, high: 4 }, }, ]; net.formatData(data); expect(formatArrayOfDatumOfObjectSpy).toHaveBeenCalledWith(data); }); }); describe('when called with array,array,array,number data shape', () => { let formatArrayOfArrayOfArraySpy: jest.SpyInstance; beforeEach(() => { formatArrayOfArrayOfArraySpy = jest.spyOn( RNNTimeStep.prototype, 'formatArrayOfArrayOfArray' ); }); afterEach(() => { formatArrayOfArrayOfArraySpy.mockRestore(); }); it('calls this.formatArrayOfArrayOfArray with data', () => { const net = new RNNTimeStep(); const data = [[[1, 2, 3]], [[3, 4, 5]]]; net.formatData(data); expect(formatArrayOfArrayOfArraySpy).toHaveBeenCalledWith(data); }); }); describe('when called with array,array,object,number data shape', () => { let formatArrayOfArrayOfObjectSpy: jest.SpyInstance; beforeEach(() => { formatArrayOfArrayOfObjectSpy = jest.spyOn( RNNTimeStep.prototype, 'formatArrayOfArrayOfObject' ); }); afterEach(() => { formatArrayOfArrayOfObjectSpy.mockRestore(); }); it('calls this.formatArrayOfArrayOfObject with data', () => { const net = new RNNTimeStep(); const data = [ [ { h: 1, l: 2, m: 3 }, { h: 3, l: 2, m: 3 }, ], [ { h: 3, l: 4, m: 5 }, { h: 4, l: 4, m: 4 }, ], ]; net.formatData(data); expect(formatArrayOfArrayOfObjectSpy).toHaveBeenCalledWith(data); }); }); describe('when called with array,datum,array,array,number data shape', () => { let formatArrayOfDatumOfArrayOfArraySpy: jest.SpyInstance; beforeEach(() => { formatArrayOfDatumOfArrayOfArraySpy = jest.spyOn( RNNTimeStep.prototype, 'formatArrayOfDatumOfArrayOfArray' ); }); afterEach(() => { formatArrayOfDatumOfArrayOfArraySpy.mockRestore(); }); it('calls this.formatArrayOfArrayOfObject with data', () => { const net = new RNNTimeStep({ inputSize: 2, outputSize: 2, }); const data = [ { input: [ [1, 2], [3, 4], ], output: [ [3, 4], [2, 1], ], }, ]; net.formatData(data); expect(formatArrayOfDatumOfArrayOfArraySpy).toHaveBeenCalledWith(data); }); }); describe('when called with array,datum,array,object,number data shape', () => { let formatArrayOfDatumOfArrayOfObjectSpy: jest.SpyInstance; beforeEach(() => { formatArrayOfDatumOfArrayOfObjectSpy = jest.spyOn( RNNTimeStep.prototype, 'formatArrayOfDatumOfArrayOfObject' ); }); afterEach(() => { formatArrayOfDatumOfArrayOfObjectSpy.mockRestore(); }); it('calls this.formatArrayOfDatumOfArrayOfObject with data', () => { const net = new RNNTimeStep(); const data = [ { input: [ { h: 1, l: 2 }, { h: 1, l: 2 }, ], output: [ { h: 2, l: 1 }, { h: 2, l: 1 }, ], }, ]; net.formatData(data); expect(formatArrayOfDatumOfArrayOfObjectSpy).toHaveBeenCalledWith(data); }); }); }); describe('.formatArray()', () => { it('returns a proper Float32Array[][]', () => { const net = new RNNTimeStep(); const result = net.formatArray([1, 2, 3]); expect(result).toEqual([ [[1], [2], [3]].map((v) => Float32Array.from(v)), ]); }); }); describe('.formatArrayOfArray()', () => { describe('when this.options.inputSize and this.options.outputSize = 1', () => { it('returns a proper Float32Array[][]', () => { const net = new RNNTimeStep(); const result = net.formatArrayOfArray([[1, 2, 3]]); expect(result).toEqual([ [[1], [2], [3]].map((v) => Float32Array.from(v)), ]); }); }); describe('when this.options.inputSize and this.options.outputSize > 1', () => { describe('when inputSize does not match data length', () => { const net = new RNNTimeStep({ inputSize: 2, outputSize: 3 }); it('throws', () => { expect(() => { net.formatArrayOfArray([[1, 2, 3]]); }).toThrow('inputSize must match data input size'); }); }); describe('when outputSize does not match data length', () => { const net = new RNNTimeStep({ inputSize: 3, outputSize: 2 }); it('throws', () => { expect(() => { net.formatArrayOfArray([[1, 2, 3]]); }).toThrow('outputSize must match data output size'); }); }); it('returns a proper Float32Array[][]', () => { const net = new RNNTimeStep({ inputSize: 3, outputSize: 3 }); const result = net.formatArrayOfArray([[1, 2, 3]]); expect(result).toEqual([[[1, 2, 3]].map((v) => Float32Array.from(v))]); }); }); }); describe('.formatArrayOfObject()', () => { describe('when this.options.inputSize > 1', () => { it('throws', () => { const net = new RNNTimeStep({ inputSize: 2, outputSize: 1 }); expect(() => { net.formatArrayOfObject([{ a: 1 }]); }).toThrow('inputSize must be 1 for this data size'); }); }); describe('when this.options.outputSize > 1', () => { it('throws', () => { const net = new RNNTimeStep({ inputSize: 1, outputSize: 2 }); expect(() => { net.formatArrayOfObject([{ a: 1 }]); }).toThrow('outputSize must be 1 for this data size'); }); }); describe('when this.inputLookup is null', () => { it('sets this.inputLookup & this.inputLookupLength', () => { const net = new RNNTimeStep(); expect(net.inputLookup).toBe(null); expect(net.inputLookupLength).toBe(0); net.formatArrayOfObject([{ a: 1 }]); expect(net.inputLookup).toEqual({ a: 0 }); expect(net.inputLookupLength).toBe(1); }); }); describe('when this.inputLookup is set', () => { it('does not set this.inputLookup or this.inputLookupLength', () => { const net = new RNNTimeStep(); const inputLookup = { a: 0 }; net.inputLookup = inputLookup; net.inputLookupLength = 2; net.formatArrayOfObject([{ a: 1 }]); expect(net.inputLookup).toBe(inputLookup); expect(net.inputLookupLength).toBe(2); }); }); it('returns a proper Float32Array[][]', () => { const net = new RNNTimeStep(); const result = net.formatArrayOfObject([{ one: 1, two: 2, three: 3 }]); expect(result).toEqual([ [[1], [2], [3]].map((v) => Float32Array.from(v)), ]); }); }); describe('.formatArrayOfObjectMulti()', () => { describe('when this.inputLookup is null', () => { it('sets this.inputLookup & this.inputLookupLength', () => { const net = new RNNTimeStep(); expect(net.inputLookup).toBe(null); expect(net.inputLookupLength).toBe(0); net.formatArrayOfObjectMulti([{ a: 1, b: 2 }]); expect(net.inputLookup).toEqual({ a: 0, b: 1 }); expect(net.inputLookupLength).toBe(2); }); }); describe('when this.inputLookup is set', () => { it('does not set this.inputLookup or this.inputLookupLength', () => { const net = new RNNTimeStep(); const inputLookup = { a: 0, b: 1 }; net.inputLookup = inputLookup; net.inputLookupLength = 3; net.formatArrayOfObjectMulti([{ a: 1, b: 2 }]); expect(net.inputLookup).toBe(inputLookup); expect(net.inputLookupLength).toBe(3); }); }); it('returns a proper Float32Array[][]', () => { const net = new RNNTimeStep(); const result = net.formatArrayOfObjectMulti([ { one: 1, two: 2, three: 3 }, ]); expect(result).toEqual([[Float32Array.from([1, 2, 3])]]); }); }); describe('.formatArrayOfDatumOfArray()', () => { describe('when this.options.inputSize > 1', () => { it('throws', () => { const net = new RNNTimeStep({ inputSize: 2, outputSize: 1 }); expect(() => { net.formatArrayOfDatumOfArray([]); }).toThrow('inputSize must be 1 for this data size'); }); }); describe('when this.options.outputSize > 1', () => { it('throws', () => { const net = new RNNTimeStep({ inputSize: 1, outputSize: 2 }); expect(() => { net.formatArrayOfDatumOfArray([]); }).toThrow('outputSize must be 1 for this data size'); }); }); it('returns a proper Float32Array[][]', () => { const net = new RNNTimeStep(); const result = net.formatArrayOfDatumOfArray([ { input: [1, 2, 3], output: [4, 5, 6] }, ]); expect(result).toEqual([ [[1], [2], [3], [4], [5], [6]].map((v) => Float32Array.from(v)), ]); }); }); describe('.formatArrayOfDatumOfObject()', () => { describe('when this.options.inputSize > 1', () => { it('throws', () => { const net = new RNNTimeStep({ inputSize: 2, outputSize: 1 }); expect(() => { net.formatArrayOfDatumOfObject([]); }).toThrow('inputSize must be 1 for this data size'); }); }); describe('when this.options.outputSize > 1', () => { it('throws', () => { const net = new RNNTimeStep({ inputSize: 1, outputSize: 2 }); expect(() => { net.formatArrayOfDatumOfObject([]); }).toThrow('outputSize must be 1 for this data size'); }); }); describe('when this.inputLookup is null', () => { it('sets this.inputLookup & this.inputLookupLength', () => { const net = new RNNTimeStep(); expect(net.inputLookup).toBe(null); expect(net.inputLookupLength).toBe(0); net.formatArrayOfDatumOfObject([ { input: { a: 1, b: 2 }, output: { a: 1, b: 2 }, }, ]); expect(net.inputLookup).toEqual({ a: 0, b: 1 }); expect(net.inputLookupLength).toBe(2); }); }); describe('when this.inputLookup is set', () => { it('does not set this.inputLookup or this.inputLookupLength', () => { const net = new RNNTimeStep(); const inputLookup = { a: 0, b: 1 }; net.inputLookup = inputLookup; net.inputLookupLength = 3; net.formatArrayOfDatumOfObject([ { input: { a: 1, b: 2 }, output: { a: 1, b: 2 }, }, ]); expect(net.inputLookup).toBe(inputLookup); expect(net.inputLookupLength).toBe(3); }); }); it('returns a proper Float32Array[][]', () => { const net = new RNNTimeStep(); const result = net.formatArrayOfDatumOfObject([ { input: { a: 1, b: 2 }, output: { a: 1, b: 2 } }, ]); expect(result).toEqual([ [[1], [2], [1], [2]].map((v) => Float32Array.from(v)), ]); }); }); describe('.formatArrayOfArrayOfArray()', () => { it('returns a proper Float32Array[][]', () => { const net = new RNNTimeStep(); const result = net.formatArrayOfArrayOfArray([ [ [1, 2, 3, 4], [4, 3, 2, 1], ], ]); expect(result).toEqual([ [ [1, 2, 3, 4], [4, 3, 2, 1], ].map((v) => Float32Array.from(v)), ]); }); }); describe('.formatArrayOfArrayOfObject()', () => { describe('when this.inputLookup is null', () => { it('sets this.inputLookup & this.inputLookupLength', () => { const net = new RNNTimeStep(); expect(net.inputLookup).toBe(null); expect(net.inputLookupLength).toBe(0); net.formatArrayOfArrayOfObject([ [ { a: 1, b: 2 }, { a: 2, b: 1 }, ], ]); expect(net.inputLookup).toEqual({ a: 0, b: 1 }); expect(net.inputLookupLength).toBe(2); }); }); describe('when this.inputLookup is set', () => { it('does not set this.inputLookup or this.inputLookupLength', () => { const net = new RNNTimeStep(); const inputLookup = { a: 0, b: 1 }; net.inputLookup = inputLookup; net.inputLookupLength = 3; net.formatArrayOfArrayOfObject([ [ { a: 1, b: 2 }, { a: 2, b: 1 }, ], ]); expect(net.inputLookup).toBe(inputLookup); expect(net.inputLookupLength).toBe(3); }); }); it('returns a proper Float32Array[][]', () => { const net = new RNNTimeStep(); const result = net.formatArrayOfArrayOfObject([ [ { a: 1, b: 2 }, { a: 2, b: 1 }, ], ]); expect(result).toEqual([ [ [1, 2], [2, 1], ].map((v) => Float32Array.from(v)), ]); }); }); describe('.formatArrayOfDatumOfArrayOfArray()', () => { describe('when inputSize does not match data length', () => { const net = new RNNTimeStep({ inputSize: 2, outputSize: 3 }); it('throws', () => { expect(() => { net.formatArrayOfDatumOfArrayOfArray([ { input: [[1, 2, 3]], output: [[1, 2, 3]] }, ]); }).toThrow('inputSize must match data input size'); }); }); describe('when outputSize does not match data length', () => { const net = new RNNTimeStep({ inputSize: 3, outputSize: 2 }); it('throws', () => { expect(() => { net.formatArrayOfDatumOfArrayOfArray([ { input: [[1, 2, 3]], output: [[1, 2, 3]] }, ]); }).toThrow('outputSize must match data output size'); }); }); it('returns a proper Float32Array[][]', () => { const net = new RNNTimeStep({ inputSize: 2, outputSize: 2 }); const result = net.formatArrayOfDatumOfArrayOfArray([ { input: [ [1, 2], [3, 4], ], output: [ [4, 3], [2, 1], ], }, { input: [ [4, 3], [2, 1], ], output: [ [1, 2], [3, 4], ], }, ]); expect(result).toEqual([ [ [1, 2], [3, 4], [4, 3], [2, 1], ].map((v) => Float32Array.from(v)), [ [4, 3], [2, 1], [1, 2], [3, 4], ].map((v) => Float32Array.from(v)), ]); }); }); describe('.toFunction()', () => { it('processes array same as net w/ inputSize of 1', () => { const data = [{ input: [1, 2], output: [3, 4] }]; const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); net.train(data, { iterations: 100, errorThresh: 0.05 }); const fn = net.toFunction(); const expected = net.run(data[0].input); const result = fn(data[0].input); expect(typeof result).toBe('number'); expect(result).toEqual(expected); }); it('processes object same as net w/ inputSize of 1', () => { const data = [{ input: { a: 1, b: 2 }, output: { c: 3, d: 4 } }]; const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); net.train(data, { iterations: 100, errorThresh: 0.05 }); const fn = net.toFunction(); const expected = net.run(data[0].input); expect(fn(data[0].input)).toEqual(expected); }); it('processes array,object same as net', () => { const data = [ { input: [ { a: 1, b: 4 }, { a: 2, b: 3 }, ], output: [ { c: 3, d: 2 }, { c: 4, d: 1 }, ], }, ]; const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(data, { iterations: 100, errorThresh: 0.05 }); const fn = net.toFunction(); const expected = net.run(data[0].input); expect(fn(data[0].input)).toEqual(expected); }); it('processes array same as net', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); // Same test as previous, but combined on a single set const trainingData = [ [0.1, 0.2, 0.3, 0.4, 0.5], [0.5, 0.4, 0.3, 0.2, 0.1], ]; const trainResult = net.train(trainingData); expect(trainResult.error).toBeLessThan(0.09); const closeToFive = net.run([0.1, 0.2, 0.3, 0.4]); const closeToOne = net.run([0.5, 0.4, 0.3, 0.2]); const fn = net.toFunction(); expect(closeToFive.toFixed(1)).toBe('0.5'); expect(closeToOne.toFixed(1)).toBe('0.1'); expect(fn([0.1, 0.2, 0.3, 0.4])).toBeCloseTo(closeToFive); expect(fn([0.5, 0.4, 0.3, 0.2])).toBeCloseTo(closeToOne); }); it('processes array,array same as net', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); // Same test as previous, but combined on a single set const trainingData = [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], [0.5, 0.1], ]; const trainResult = net.train(trainingData); expect(trainResult.error).toBeLessThan(0.09); const closeToFiveAndOne = net.run([ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], ]); const fn = net.toFunction(); const result = fn([ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], ]); expect(closeToFiveAndOne[0].toFixed(1)).toBe('0.5'); expect(closeToFiveAndOne[1].toFixed(1)).toBe('0.1'); expect(result[0]).toBe(closeToFiveAndOne[0]); expect(result[1]).toBe(closeToFiveAndOne[1]); }); it('processes object same as net', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); // Same test as previous, but combined on a single set const trainingData = [ { input: { monday: 0.1, tuesday: 0.2, wednesday: 0.3, thursday: 0.4 }, output: { friday: 0.5 }, }, { input: { monday: 0.5, tuesday: 0.4, wednesday: 0.3, thursday: 0.2 }, output: { friday: 0.1 }, }, ]; const trainResult = net.train(trainingData); expect(trainResult.error).toBeLessThan(0.09); const closeToFive = net.runObject({ monday: 0.1, tuesday: 0.2, wednesday: 0.3, thursday: 0.4, }); const closeToOne = net.runObject({ monday: 0.5, tuesday: 0.4, wednesday: 0.3, thursday: 0.2, }); const fn = net.toFunction(); expect(closeToFive.friday.toFixed(1)).toBe('0.5'); expect(closeToOne.friday.toFixed(1)).toBe('0.1'); expect( (fn as (input: INumberHash) => INumberHash)({ monday: 0.1, tuesday: 0.2, wednesday: 0.3, thursday: 0.4, }).friday ).toBe(closeToFive.friday); expect( (fn as (input: INumberHash) => INumberHash)({ monday: 0.5, tuesday: 0.4, wednesday: 0.3, thursday: 0.2, }).friday ).toBe(closeToOne.friday); }); it('handles array,object to array,object with lookup tables being same w/ inputSize of 1', () => { const inputSize = 1; const hiddenLayers = [10]; const outputSize = 1; const net = new RNNTimeStep({ inputSize, hiddenLayers, outputSize, }); net.train([ { monday: 1, tuesday: 2, wednesday: 3, thursday: 4, friday: 5 }, ]); const fn = net.toFunction(); const result = (fn as (input: INumberHash) => INumberHash)({ monday: 1, tuesday: 2, wednesday: 3, thursday: 4, }); expect(result).toEqual( net.run({ monday: 1, tuesday: 2, wednesday: 3, thursday: 4 }) ); expect(Object.keys(result).length).toBe(1); expect(result.friday.toFixed(0)).toBe('5'); }); }); describe('.test()', () => { let runSpy: jest.SpyInstance; beforeEach(() => { runSpy = jest.spyOn(LSTMTimeStep.prototype, 'run'); }); afterEach(() => { runSpy.mockRestore(); }); describe('with any data shape', () => { let formatDataSpy: jest.SpyInstance; beforeEach(() => { formatDataSpy = jest.spyOn(RNNTimeStep.prototype, 'formatData'); }); afterEach(() => { formatDataSpy.mockRestore(); }); it('calls .formatData()', () => { const data = [[1, 2]]; const net = new RNNTimeStep(); net.train(data); formatDataSpy.mockClear(); net.test(data); expect(formatDataSpy).toHaveBeenCalledWith(data); }); }); describe('using array,array,number', () => { const trainingData = [[0.1, 0.2, 0.3, 0.4, 0.5]]; describe('inputSize of 1', () => { it('accumulates no error or misclasses when no error', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); net.train(trainingData, { iterations: 500 }); const testResult = net.test(trainingData); expect(testResult.error).toBeLessThan(0.001); expect(testResult.misclasses.length).toBe(0); }); it('accumulates error and misclasses when error', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); net.train(trainingData, { iterations: 500 }); const misclass = [1, 2, 3, 4, 5]; const testResult = net.test([misclass]); expect(testResult.error).toBeGreaterThan(0.1); expect(testResult.misclasses.length).toBe(1); expect(testResult.misclasses).toEqual([ { value: misclass, actual: runSpy.mock.results[0].value, }, ]); }); }); describe('inputSize of 2', () => { it('throws', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 5, }); expect(() => { net.test(trainingData); }).toThrow('inputSize must match data input size'); }); }); describe('outputSize of 2', () => { it('throws', () => { const net = new LSTMTimeStep({ inputSize: 5, hiddenLayers: [10], outputSize: 2, }); expect(() => { net.test(trainingData); }).toThrow('outputSize must match data output size'); }); }); }); describe('using array,array,array,number', () => { const trainingData = [ [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], [0.5, 0.1], ], ]; describe('inputSize of 2', () => { describe('no error', () => { it('can test', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData, { iterations: 500 }); const testResult = net.test(trainingData); expect(testResult.error).toBeLessThan(0.001); expect(testResult.misclasses.length).toBe(0); }); }); describe('some error', () => { it('can test', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData, { iterations: 500 }); const misclass = [ [1, 5], [2, 4], [3, 3], [4, 2], [5, 1], ]; const testResult = net.test([misclass]); expect(testResult.error).toBeGreaterThanOrEqual(0.1); expect(testResult.misclasses.length).toBe(1); expect(testResult.misclasses).toEqual([ { value: misclass, actual: runSpy.mock.results[0].value, }, ]); }); }); }); }); describe('using array,object,number', () => { const trainingData = [ { monday: 0.1, tuesday: 0.1, wednesday: 0.2, thursday: 0.3, friday: 0.4, }, ]; describe('inputSize of 1', () => { describe('no error', () => { it('can test w/ forecastNumbers of 1', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); net.train(trainingData, { iterations: 500 }); const testResult = net.test(trainingData); expect(testResult.error).toBeLessThan(0.001); expect(testResult.misclasses.length).toBe(0); }); }); describe('some error', () => { it('can test w/ forecastNumbers of 1', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); net.train(trainingData, { iterations: 500 }); const misclass = { monday: 1, tuesday: 2, wednesday: 3, thursday: 4, friday: 5, }; const testResult = net.test([misclass]); expect(testResult.error).toBeGreaterThanOrEqual(0.08); expect(testResult.misclasses.length).toBe(1); expect(testResult.misclasses).toEqual([ { value: misclass, actual: runSpy.mock.results[0].value, }, ]); }); }); }); }); describe('using array,array,object,number', () => { const trainingData = [ [ { low: 0.1, high: 0.5 }, { low: 0.2, high: 0.4 }, { low: 0.3, high: 0.3 }, { low: 0.4, high: 0.2 }, { low: 0.5, high: 0.1 }, ], ]; describe('inputSize of 2', () => { describe('no error', () => { it('can test w/ run of 1', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData); const testResult = net.test(trainingData); expect(testResult.error).toBeLessThan(0.001); expect(testResult.misclasses.length).toBe(0); }); }); describe('some error', () => { it('can test w/ run of 1', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData, { iterations: 500 }); const misclass = [ { low: 1, high: 5 }, { low: 2, high: 4 }, { low: 3, high: 3 }, { low: 4, high: 2 }, { low: 5, high: 1 }, ]; const testResult = net.test([misclass]); expect(testResult.error).toBeGreaterThan(0.3); expect(testResult.misclasses.length).toBe(1); expect(testResult.misclasses).toEqual([ { value: misclass, actual: runSpy.mock.results[0].value, }, ]); }); }); }); }); describe('using array,datum,array,number', () => { const trainingData = [{ input: [0.1, 0.2, 0.3, 0.4], output: [0.5] }]; describe('no error', () => { it('can test w/ forecast of 1', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); net.train(trainingData, { iterations: 500 }); const testResult = net.test(trainingData); expect(testResult.error).toBeLessThan(0.001); expect(testResult.misclasses.length).toBe(0); }); }); describe('some error', () => { it('can test w/ forecast of 1', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); net.train(trainingData, { iterations: 500 }); const misclass = { input: [1, 2, 3, 4], output: [5] }; const testResult = net.test([misclass]); expect(testResult.error).toBeGreaterThanOrEqual(0.08); expect(testResult.misclasses.length).toBe(1); expect(testResult.misclasses).toEqual([ { value: misclass, actual: runSpy.mock.results[0].value, }, ]); }); }); }); describe('using array,datum,object,number', () => { const trainingData = [ { input: { monday: 0.1, tuesday: 0.2, wednesday: 0.3, thursday: 0.4, }, output: { friday: 0.5 }, }, ]; describe('inputSize of 1', () => { describe('no error', () => { it('can test w/ forecastNumbers of 1', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); net.train(trainingData, { iterations: 500 }); const testResult = net.test(trainingData); expect(testResult.error).toBeLessThan(0.001); expect(testResult.misclasses.length).toBe(0); }); }); describe('some error', () => { it('can test w/ forecastNumbers of 1', () => { const net = new LSTMTimeStep({ inputSize: 1, hiddenLayers: [10], outputSize: 1, }); net.train(trainingData); const misclass = { input: { monday: 1, tuesday: 2, wednesday: 3, thursday: 4, }, output: { friday: 5 }, }; const testResult = net.test([misclass]); expect(testResult.error).toBeGreaterThanOrEqual(0.08); expect(testResult.misclasses.length).toBe(1); expect(testResult.misclasses).toEqual([ { value: misclass, actual: runSpy.mock.results[0].value, }, ]); }); }); }); }); describe('using array,datum,array,array', () => { const trainingData1 = [ { input: [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], [0.4, 0.2], ], output: [[0.5, 0.1]], }, ]; const trainingData2 = [ { input: [ [0.1, 0.5], [0.2, 0.4], [0.3, 0.3], ], output: [ [0.4, 0.2], [0.5, 0.1], ], }, ]; describe('no error', () => { it('can test w/ forecast of 1', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData1, { iterations: 500 }); const testResult = net.test(trainingData1); expect(testResult.error).toBeLessThan(0.001); expect(testResult.misclasses.length).toBe(0); }); it('can test w/ forecast of 2', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData2, { iterations: 500 }); const testResult = net.test(trainingData2); expect(testResult.error).toBeLessThan(0.001); expect(testResult.misclasses.length).toBe(0); }); }); describe('some error', () => { it('can test w/ forecast of 1', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData1, { iterations: 500 }); const misclass = { input: [ [1, 5], [2, 4], [3, 3], [4, 2], ], output: [[5, 1]], }; const testResult = net.test([misclass]); expect(testResult.error).toBeGreaterThan(0.1); expect(testResult.misclasses.length).toBe(1); expect(testResult.misclasses).toEqual([ { value: misclass, actual: runSpy.mock.results[0].value, }, ]); }); it('can test w/ forecast of 2', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData2, { iterations: 500 }); const misclass = { input: [ [1, 5], [2, 4], [3, 3], ], output: [ [4, 2], [5, 1], ], }; const testResult = net.test([misclass]); expect(testResult.error).toBeGreaterThanOrEqual(0.08); expect(testResult.misclasses.length).toBe(1); expect(testResult.misclasses).toEqual([ { value: misclass, actual: runSpy.mock.results[0].value, }, ]); }); }); }); describe('using array,datum,array,object,number', () => { const trainingData1 = [ { input: [ { low: 0.1, high: 0.5 }, { low: 0.2, high: 0.4 }, { low: 0.3, high: 0.3 }, { low: 0.4, high: 0.2 }, ], output: [{ low: 0.5, high: 0.1 }], }, ]; const trainingData2 = [ { input: [ { low: 0.1, high: 0.5 }, { low: 0.2, high: 0.4 }, { low: 0.3, high: 0.3 }, ], output: [ { low: 0.4, high: 0.2 }, { low: 0.5, high: 0.1 }, ], }, ]; describe('no error', () => { it('can test w/ forecast of 1', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData1, { iterations: 500 }); const testResult = net.test(trainingData1); expect(testResult.error).toBeLessThan(0.001); expect(testResult.misclasses.length).toBe(0); }); it('can test w/ forecast of 2', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData2, { iterations: 500 }); const testResult = net.test(trainingData2); expect(testResult.error).toBeLessThan(0.001); expect(testResult.misclasses.length).toBe(0); }); }); describe('some error', () => { it('can test w/ forecast of 1', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData1, { iterations: 500 }); const misclass = { input: [ { low: 1, high: 5 }, { low: 2, high: 4 }, { low: 3, high: 3 }, { low: 4, high: 2 }, ], output: [{ low: 0.5, high: 0.1 }], }; const testResult = net.test([misclass]); expect(testResult.error).toBeGreaterThan(0.1); expect(testResult.misclasses.length).toBe(1); expect(testResult.misclasses).toEqual([ { value: misclass, actual: runSpy.mock.results[0].value, }, ]); }); it('can test w/ forecast of 2', () => { const net = new LSTMTimeStep({ inputSize: 2, hiddenLayers: [10], outputSize: 2, }); net.train(trainingData2, { iterations: 500 }); const misclass = { input: [ { low: 1, high: 5 }, { low: 2, high: 4 }, { low: 3, high: 3 }, ], output: [ { low: 4, high: 2 }, { low: 5, high: 1 }, ], }; const testResult = net.test([misclass]); expect(testResult.error).toBeGreaterThanOrEqual(0.08); expect(testResult.misclasses.length).toBe(1); expect(testResult.misclasses).toEqual([ { value: misclass, actual: runSpy.mock.results[0].value, }, ]); }); }); }); }); describe('.addFormat()', () => { it('array,array,number', () => { const instance = {}; RNNTimeStep.prototype.addFormat.call(instance, [[0]]); expect(instance).toEqual({}); }); it('datum,array,array,number', () => { const instance = {}; RNNTimeStep.prototype.addFormat.call(instance, { input: [[0]], output: [[0]], }); expect(instance).toEqual({}); }); it('array,number', () => { const instance = {}; RNNTimeStep.prototype.addFormat.call(instance, [0]); expect(instance).toEqual({}); }); it('datum,array,number', () => { const instance = {}; RNNTimeStep.prototype.addFormat.call(instance, { input: [0], output: [0], }); expect(instance).toEqual({}); }); it('datum,object,number', () => { const instance = { inputLookup: { inputOne: 0 }, outputLookup: { outputOne: 0 }, }; RNNTimeStep.prototype.addFormat.call(instance, { input: { inputTwo: 1, inputThree: 2 }, output: { outputTwo: 1, outputThree: 2 }, }); expect(instance).toEqual({ inputLookup: { inputOne: 0, inputTwo: 1, inputThree: 2 }, inputLookupLength: 3, outputLookup: { outputOne: 0, outputTwo: 1, outputThree: 2 }, outputLookupLength: 3, }); }); it('object,number', () => { const instance = { inputLookup: { inputOne: 0 }, }; RNNTimeStep.prototype.addFormat.call(instance, { inputTwo: 1, inputThree: 2, }); expect(instance).toEqual({ inputLookup: { inputOne: 0, inputTwo: 1, inputThree: 2 }, inputLookupLength: 3, outputLookup: { inputOne: 0, inputTwo: 1, inputThree: 2 }, outputLookupLength: 3, }); }); // it('array,object,number', () => {}); // it('datum,array,object,number', () => {}); }); describe('.toJSON()', () => { it('saves network dimensions to json', () => { const inputSize = 4; const hiddenLayers = [1, 2, 3]; const outputSize = 5; const net = new RNNTimeStep({ inputSize, hiddenLayers, outputSize, }); const { inputLookup, inputLookupLength, outputLookup, outputLookupLength, } = net; net.initialize(); const json = net.toJSON(); expect(json.options.inputSize).toBe(inputSize); expect(json.options.hiddenLayers).toEqual(hiddenLayers); expect(json.options.outputSize).toBe(outputSize); expect(json.inputLookup).toBe(inputLookup); expect(json.inputLookupLength).toBe(inputLookupLength); expect(json.outputLookup).toBe(outputLookup); expect(json.outputLookupLength).toBe(outputLookupLength); }); }); describe('.fromJSON()', () => { it('restores network dimensions from json', () => { const inputSize = 45; const hiddenLayers = [1, 2, 3, 4, 5, 6, 7, 8, 9]; const outputSize = 20; const net = new RNNTimeStep({ inputSize, hiddenLayers, outputSize, }); net.initialize(); const json = net.toJSON(); const { inputLookup, inputLookupLength, outputLookup, outputLookupLength, } = json; const serializedNet = new RNNTimeStep(); serializedNet.fromJSON(json); expect(serializedNet.options.inputSize).toBe(inputSize); expect(serializedNet.options.hiddenLayers).toEqual(hiddenLayers); expect(serializedNet.options.outputSize).toBe(outputSize); expect(serializedNet.inputLookup).toBe(inputLookup); expect(serializedNet.inputLookupLength).toBe(inputLookupLength); expect(serializedNet.outputLookup).toBe(outputLookup); expect(serializedNet.outputLookupLength).toBe(outputLookupLength); }); it('error rate stays same after serialization', () => { const inputSize = 1; const hiddenLayers = [10]; const outputSize = 1; const net = new RNNTimeStep({ inputSize, hiddenLayers, outputSize, }); let lastNetStatus: IRNNStatus = { error: Infinity, iterations: -1 }; const trainingData = [ { monday: 1, tuesday: 2, wednesday: 3, thursday: 4, friday: 5 }, ]; net.train(trainingData, { callback: (status) => { lastNetStatus = status; }, iterations: 50, }); net.run({ monday: 1, tuesday: 2, wednesday: 3, thursday: 4 }); const json = net.toJSON(); const serializedNet = new RNNTimeStep(); serializedNet.fromJSON(json); let lastSerializedNetStatus: IRNNStatus = { error: Infinity, iterations: -1, }; serializedNet.train(trainingData, { iterations: 1, callback: (status: IRNNStatus) => { lastSerializedNetStatus = status; }, }); expect(lastSerializedNetStatus.error).toBeLessThan(lastNetStatus.error); }); }); });
the_stack
import { createElement } from '@syncfusion/ej2-base'; import { Diagram } from '../../../src/diagram/diagram'; import { NodeModel, BpmnShapeModel, BpmnSubProcessModel, BpmnActivityModel } from '../../../src/diagram/objects/node-model'; import { BpmnDiagrams } from '../../../src/diagram/objects/bpmn'; import { TextStyleModel, MarginModel } from '../../../src/diagram/core/appearance-model'; import { HorizontalAlignment, VerticalAlignment, NodeConstraints } from '../../../src/diagram/enum/enum'; import { ConnectorModel, BpmnFlowModel } from '../../../src/diagram/objects/connector-model'; import { BpmnFlow } from '../../../src/diagram/index'; import {profile , inMB, getMemoryProfile} from '../../../spec/common.spec'; Diagram.Inject(BpmnDiagrams); /** * BPMN Shape property changes */ describe('Diagram Control', () => { describe('Property Change - BPMN Shape - datobject and gateway', () => { let diagram: Diagram; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; let ele: HTMLElement; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn2' }); document.body.appendChild(ele); //gateway let node0: NodeModel = { id: 'node0', width: 100, height: 100, offsetX: 100, offsetY: 100, shape: { type: 'Bpmn', shape: 'Gateway', gateway: { type: 'Exclusive' } }, }; //gateway let node1: NodeModel = { id: 'node1', width: 100, height: 100, offsetX: 300, offsetY: 100, shape: { type: 'Bpmn', shape: 'Gateway', gateway: { type: 'EventBased' } }, }; //gateway let node2: NodeModel = { id: 'node2', width: 100, height: 100, offsetX: 500, offsetY: 100, shape: { type: 'Bpmn', shape: 'Gateway', gateway: { type: 'None' } }, }; //dataobject let node3: NodeModel = { id: 'node3', width: 100, height: 100, offsetX: 100, offsetY: 300, shape: { type: 'Bpmn', shape: 'DataObject', dataObject: { collection: true, type: 'Input' } } as BpmnShapeModel, }; //dataobject-collection and type let node4: NodeModel = { id: 'node4', width: 100, height: 100, offsetX: 300, offsetY: 300, shape: { type: 'Bpmn', shape: 'DataObject', dataObject: { collection: false, type: 'Output' } } as BpmnShapeModel, }; //dataobject-collection and type let node5: NodeModel = { id: 'node5', width: 100, height: 100, offsetX: 500, offsetY: 300, shape: { type: 'Bpmn', shape: 'DataObject', dataObject: { collection: false, type: 'None' } } as BpmnShapeModel, }; //dataobject-collection and type let node6: NodeModel = { id: 'node6', width: 100, height: 100, offsetX: 700, offsetY: 300, shape: { type: 'Bpmn', shape: 'DataObject', dataObject: { collection: true, type: 'Input' } } as BpmnShapeModel, }; diagram = new Diagram({ width: '1500px', height: '500px', nodes: [node0, node1, node2, node3, node4, node5, node6] }); diagram.appendTo('#bpmn2'); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Checking gateway type change - complex', (done: Function) => { // gateway- gateway: { type: 'Exclusive' } ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).gateway.type = 'Complex'; diagram.dataBind(); diagram.nodes[0].width = 150; diagram.nodes[0].height = 150; diagram.nodes[3].width = 150; diagram.nodes[3].height = 150; diagram.nodes[6].width = 150; diagram.nodes[6].height = 150; expect(((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).gateway.type === 'Complex').toBe(true); done(); }); it('Checking gateway type change - None', (done: Function) => { // // gateway: { type: 'EventBased' } ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).gateway.type = 'None'; diagram.dataBind(); expect(((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).gateway.type === 'None').toBe(true); done(); }); it('Checking gateway type change-parallel', (done: Function) => { // // gateway: { type: 'EventBased' } ((diagram.nodes[2] as NodeModel).shape as BpmnShapeModel).gateway.type = 'Parallel'; diagram.dataBind(); expect(((diagram.nodes[2] as NodeModel).shape as BpmnShapeModel).gateway.type === 'Parallel').toBe(true); done(); }); it('Checking DataObject type change', (done: Function) => { (diagram.nodes[3] as NodeModel).style.fill = 'lightblue'; (diagram.nodes[3] as NodeModel).style.opacity = 0.75; ((diagram.nodes[3] as NodeModel).shape as BpmnShapeModel).dataObject.type = 'Output'; ((diagram.nodes[3] as NodeModel).shape as BpmnShapeModel).dataObject.collection = false; diagram.dataBind(); expect(((diagram.nodes[3] as NodeModel).shape as BpmnShapeModel).dataObject.type === 'Output' && ((diagram.nodes[3] as NodeModel).shape as BpmnShapeModel).dataObject.collection === false).toBe(true); done(); }); it('Checking DataObject type change', (done: Function) => { // //dataobject dataObject: { collection: false, type: 'Output' } ((diagram.nodes[4] as NodeModel).shape as BpmnShapeModel).dataObject.type = 'Input'; ((diagram.nodes[4] as NodeModel).shape as BpmnShapeModel).dataObject.collection = true; // dataobject - dataObject: { collection: true, type: 'Input' } diagram.dataBind(); expect(((diagram.nodes[4] as NodeModel).shape as BpmnShapeModel).dataObject.type === 'Input' && ((diagram.nodes[4] as NodeModel).shape as BpmnShapeModel).dataObject.collection === true).toBe(true); done(); }); it('Checking DataObject type change', (done: Function) => { // //dataobject dataObject: { collection: false, type: 'None' } ((diagram.nodes[5] as NodeModel).shape as BpmnShapeModel).dataObject.type = 'Input'; ((diagram.nodes[5] as NodeModel).shape as BpmnShapeModel).dataObject.collection = true; // //dataobject dataObject: { collection: true, type: 'Input' } ((diagram.nodes[6] as NodeModel).shape as BpmnShapeModel).dataObject.type = 'None'; ((diagram.nodes[6] as NodeModel).shape as BpmnShapeModel).dataObject.collection = false; diagram.dataBind(); expect(((diagram.nodes[6] as NodeModel).shape as BpmnShapeModel).dataObject.type === 'None' && ((diagram.nodes[6] as NodeModel).shape as BpmnShapeModel).dataObject.collection === false).toBe(true); done(); }); }); describe('Property Change - BPMN Shape - events', () => { let diagram: Diagram; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; let ele: HTMLElement; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn3' }); document.body.appendChild(ele); //event- event and trigger let node0: NodeModel = { id: 'node0', width: 100, height: 100, offsetX: 100, offsetY: 100, shape: { type: 'Bpmn', shape: 'Event', event: { event: 'End', trigger: 'None' } }, }; let node1: NodeModel = { id: 'node1', width: 100, height: 100, offsetX: 300, offsetY: 100, shape: { type: 'Bpmn', shape: 'Event', event: { event: 'Intermediate', trigger: 'None' } }, }; let node2: NodeModel = { id: 'node2', width: 100, height: 100, offsetX: 500, offsetY: 100, shape: { type: 'Bpmn', shape: 'Event', event: { event: 'NonInterruptingIntermediate', trigger: 'None' } }, }; let node3: NodeModel = { id: 'node3', width: 100, height: 100, offsetX: 700, offsetY: 100, shape: { type: 'Bpmn', shape: 'Event', event: { event: 'NonInterruptingStart', trigger: 'None' } }, }; let node4: NodeModel = { id: 'node4', width: 100, height: 100, offsetX: 900, offsetY: 100, shape: { type: 'Bpmn', shape: 'Event', event: { event: 'Start', trigger: 'None' } }, }; let node5: NodeModel = { id: 'node5', width: 100, height: 100, offsetX: 100, offsetY: 300, shape: { type: 'Bpmn', shape: 'Event', event: { event: 'ThrowingIntermediate', trigger: 'None' } }, }; let node6: NodeModel = { id: 'node6', width: 100, height: 100, offsetX: 300, offsetY: 300, shape: { type: 'Bpmn', shape: 'Event', event: { event: 'Intermediate', trigger: 'Escalation' } }, }; let node7: NodeModel = { id: 'node7', width: 100, height: 100, offsetX: 500, offsetY: 300, shape: { type: 'Bpmn', shape: 'Event', event: { event: 'Start', trigger: 'None' } }, }; let node8: NodeModel = { id: 'node8', width: 100, height: 100, offsetX: 700, offsetY: 300, shape: { type: 'Bpmn', shape: 'Event', event: { event: 'Start', trigger: 'Conditional' } }, }; let node9: NodeModel = { id: 'node9', width: 100, height: 100, offsetX: 900, offsetY: 300, shape: { type: 'Bpmn', shape: 'Event', event: { event: 'Start', trigger: 'Message' } }, }; diagram = new Diagram({ width: '1500px', height: '500px', nodes: [node0, node1, node2, node3, node4, node5, node6, node7, node8, node9] }); diagram.appendTo('#bpmn3'); diagram.dataBind(); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Checking event property changes', (done: Function) => { diagram.nodes[0].width = 150; diagram.nodes[0].height = 150; diagram.nodes[9].width = 150; diagram.nodes[9].height = 150; // event: { event: 'End', trigger: 'None' } }, diagram.nodes[1].style.fill = 'lightblue'; diagram.nodes[1].style.opacity = 0.75; diagram.nodes[1].style.strokeColor = 'red'; diagram.nodes[6].style.fill = 'lightblue'; diagram.nodes[6].style.opacity = 0.75; diagram.nodes[6].style.strokeColor = 'red'; ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).event.event = 'Intermediate'; // event: { event: 'Intermediate', trigger: 'None' } }, ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).event.event = 'End'; // event: { event: 'NonInterruptingIntermediate', trigger: 'None' } }, ((diagram.nodes[2] as NodeModel).shape as BpmnShapeModel).event.event = 'NonInterruptingStart'; // event: { event: 'NonInterruptingStart', trigger: 'None' } } ((diagram.nodes[3] as NodeModel).shape as BpmnShapeModel).event.event = 'NonInterruptingIntermediate'; // event: { event: 'Start', trigger: 'None' } }, ((diagram.nodes[4] as NodeModel).shape as BpmnShapeModel).event.event = 'ThrowingIntermediate'; // event: { event: 'ThrowingIntermediate', trigger: 'None' } }, ((diagram.nodes[5] as NodeModel).shape as BpmnShapeModel).event.event = 'Start'; // event: { event: 'Intermediate', trigger: 'Escalation' } }, ((diagram.nodes[6] as NodeModel).shape as BpmnShapeModel).event.trigger = 'Compensation'; // event: { event: 'Start', trigger: 'None' } }, ((diagram.nodes[7] as NodeModel).shape as BpmnShapeModel).event.trigger = 'Compensation'; //event: { event: 'Start', trigger: 'Conditional' } }, ((diagram.nodes[8] as NodeModel).shape as BpmnShapeModel).event.trigger = 'None'; diagram.dataBind(); expect(((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).event.event === 'Intermediate').toBe(true); done(); }); }); describe('Property Change - BPMN Shape - task and loop', () => { let diagram: Diagram; let ele: HTMLElement; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn4' }); document.body.appendChild(ele); let node0: NodeModel = { id: 'node0', width: 100, height: 100, offsetX: 100, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'Task', task: { type: 'BusinessRule', loop: 'ParallelMultiInstance' } } }, }; let node1: NodeModel = { id: 'node1', width: 100, height: 100, offsetX: 300, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'Task', task: { type: 'None', loop: 'None' } } }, }; let node2: NodeModel = { id: 'node2', width: 100, height: 100, offsetX: 500, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'Task', task: { type: 'InstantiatingReceive', loop: 'Standard' } } }, }; let node3: NodeModel = { id: 'node3', width: 100, height: 100, offsetX: 700, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'Task', task: { type: 'Receive', loop: 'Standard', call: true } } }, }; let node4: NodeModel = { id: 'node4', width: 100, height: 100, offsetX: 900, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'Task', task: { type: 'None', loop: 'None', call: false } } }, }; let node5: NodeModel = { id: 'node5', width: 100, height: 100, offsetX: 100, offsetY: 300, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'Task', task: { type: 'None', loop: 'None', compensation: true } } }, }; let node6: NodeModel = { id: 'node6', width: 100, height: 100, offsetX: 300, offsetY: 300, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'Task', task: { type: 'None', loop: 'None', compensation: false } } }, }; diagram = new Diagram({ width: '1500px', height: '500px', nodes: [node0, node1, node2, node3, node4, node5, node6] }); diagram.appendTo('#bpmn4'); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Checking - task-service', (done: Function) => { diagram.nodes[0].width = 150; diagram.nodes[0].height = 150; //issue //activity - task task: { type: 'BusinessRule', loop: 'ParallelMultiInstance' } ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.task.type = 'Service'; ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.task.loop = 'SequenceMultiInstance'; //activity - task: { type: 'None', loop: 'None' } ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.task.type = 'InstantiatingReceive'; ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.task.loop = 'ParallelMultiInstance'; //task: { type: 'InstantiatingReceive', loop: 'Standard' } ((diagram.nodes[2] as NodeModel).shape as BpmnShapeModel).activity.task.type = 'None'; ((diagram.nodes[2] as NodeModel).shape as BpmnShapeModel).activity.task.loop = 'None'; // // event: { event: 'NonInterruptingStart', trigger: 'None' } } ((diagram.nodes[3] as NodeModel).shape as BpmnShapeModel).activity.task.call = false; // event: { event: 'Start', trigger: 'None' } }, ((diagram.nodes[4] as NodeModel).shape as BpmnShapeModel).activity.task.call = true; // event: { event: 'ThrowingIntermediate', trigger: 'None' } }, ((diagram.nodes[5] as NodeModel).shape as BpmnShapeModel).activity.task.compensation = false; // event: { event: 'Intermediate', trigger: 'Escalation' } }, ((diagram.nodes[6] as NodeModel).shape as BpmnShapeModel).activity.task.compensation = true; diagram.dataBind(); expect(((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.task.type === 'Service' && (((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.task.loop === 'SequenceMultiInstance' )).toBe(true); done(); }); }); describe('Property Change - BPMN Shape - adhoc and boundary', () => { let diagram: Diagram; let ele: HTMLElement; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn5' }); document.body.appendChild(ele); //activity- subprocess - adhoc let node1: NodeModel = { id: 'node1', width: 100, height: 100, offsetX: 100, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', adhoc: true, events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Intermediate', trigger: 'Compensation' }] } } }, }; //activity-adhoc let node2: NodeModel = { id: 'node2', width: 100, height: 100, offsetX: 300, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', adhoc: false, events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Intermediate', trigger: 'Compensation' }] } } }, }; //event- boundary let node4: NodeModel = { id: 'node4', width: 100, height: 100, offsetX: 500, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', boundary: 'Call', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Intermediate', trigger: 'Compensation' }] } } }, }; //event-boundary let node5: NodeModel = { id: 'node5', width: 100, height: 100, offsetX: 700, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', boundary: 'Default', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Intermediate', trigger: 'Compensation' }] } } }, }; //event- boundary let node6: NodeModel = { id: 'node6', width: 100, height: 100, offsetX: 900, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', boundary: 'Event', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Intermediate', trigger: 'Compensation' }] } } }, }; diagram = new Diagram({ width: '1500px', height: '500px', nodes: [node1, node2, node4, node5, node6] }); diagram.appendTo('#bpmn5'); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Checking subprocess adhoc', (done: Function) => { //adhoc ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.adhoc = false; //adhoc ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.subProcess.adhoc = true; //boundary ((diagram.nodes[2] as NodeModel).shape as BpmnShapeModel).activity.subProcess.boundary = 'Default'; //boundary ((diagram.nodes[3] as NodeModel).shape as BpmnShapeModel).activity.subProcess.boundary = 'Event'; //boundary ((diagram.nodes[4] as NodeModel).shape as BpmnShapeModel).activity.subProcess.boundary = 'Call'; diagram.dataBind(); expect(((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.adhoc === false).toBe(true); done(); }); }); describe('Property Change - BPMN Shape - loop, collapsed, compensation', () => { let diagram: Diagram; let ele: HTMLElement; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn6' }); document.body.appendChild(ele); //activity- subprocess - loop let node1: NodeModel = { id: 'node1', width: 100, height: 100, offsetX: 100, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', loop: 'ParallelMultiInstance', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Start', trigger: 'None' }] } } }, }; //activity-collapsed let node2: NodeModel = { id: 'node2', width: 100, height: 100, offsetX: 300, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Start', trigger: 'None' }] } } }, }; //event- collapsed let node4: NodeModel = { id: 'node4', width: 100, height: 100, offsetX: 500, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: false, type: 'Event', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Start', trigger: 'None' }] } } }, }; //event-compensation let node5: NodeModel = { id: 'node5', width: 100, height: 100, offsetX: 700, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { compensation: true, type: 'Event', boundary: 'Default', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Intermediate', trigger: 'Compensation' }] } } }, }; //event-compensation let node5a: NodeModel = { id: 'node5a', width: 100, height: 100, offsetX: 900, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { compensation: false, type: 'Event', boundary: 'Default', events: [{ height: 20, width: 20, offset: { x: 0, y: 0 }, event: 'Intermediate', trigger: 'Conditional' }] } } }, }; diagram = new Diagram({ width: '1500px', height: '500px', nodes: [node1, node2, node4, node5, node5a] }); diagram.appendTo('#bpmn6'); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Checking subprocess-loop', (done: Function) => { //subprocess loop ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.loop = 'SequenceMultiInstance'; //collapsed ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.subProcess.collapsed = false; //collapsed ((diagram.nodes[2] as NodeModel).shape as BpmnShapeModel).activity.subProcess.collapsed = true; //compensation ((diagram.nodes[3] as NodeModel).shape as BpmnShapeModel).activity.subProcess.compensation = false; //compensation ((diagram.nodes[4] as NodeModel).shape as BpmnShapeModel).activity.subProcess.compensation = true; diagram.dataBind(); expect(((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.loop === 'SequenceMultiInstance').toBe(true); done(); }); }); describe('Property Change - BPMN Shape', () => { let diagram: Diagram; let ele: HTMLElement; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn7' }); document.body.appendChild(ele); //activity- annotations let node8: NodeModel = { id: 'node8', width: 100, height: 100, offsetX: 100, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, annotations: [{ id: 'label3', horizontalAlignment: 'Center', verticalAlignment: 'Top', content: 'Error', offset: { x: 0.5, y: 1 }, style: { color: 'black', fontSize: 12, fill: 'white', strokeColor: 'white', whiteSpace: 'PreserveAll' } as TextStyleModel }], ports: [{ shape: 'Square', id: 'port4', margin: sourceMargin, width: 6, height: 6, offset: { x: 0, y: 0 }, style: { fill: 'red', strokeColor: 'black', strokeWidth: 2, opacity: 1 } }], event: 'Intermediate', trigger: 'Error' }] } } }, }; //activity- ports let node9: NodeModel = { id: 'node9', width: 100, height: 100, offsetX: 300, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, annotations: [{ id: 'label3', horizontalAlignment: 'Center', verticalAlignment: 'Top', content: 'Error', offset: { x: 0.5, y: 1 }, style: { color: 'black', fontSize: 12, fill: 'white', strokeColor: 'white', whiteSpace: 'PreserveAll' } as TextStyleModel }], ports: [{ shape: 'Square', id: 'port4', margin: sourceMargin, width: 6, height: 6, offset: { x: 0, y: 0 }, style: { fill: 'red', strokeColor: 'black', strokeWidth: 2, opacity: 1 } }], event: 'Intermediate', trigger: 'Error' }] } } }, }; diagram = new Diagram({ width: '1500px', height: '500px', nodes: [node8, node9] }); diagram.appendTo('#bpmn7'); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Checking sub event annotation property change', (done: Function) => { //annotations ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].annotations[0].content = 'Link'; //ports ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].ports[0].shape = 'Circle'; diagram.dataBind(); expect((((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].annotations[0].content) === 'Link' ).toBe(true); done(); }); }); describe('Property Change - BPMN Shape - width and height, event, offset', () => { let diagram: Diagram; let ele: HTMLElement; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn8' }); document.body.appendChild(ele); //activity- width and height let node10: NodeModel = { id: 'node10', width: 100, height: 100, offsetX: 100, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, annotations: [{ id: 'label3', horizontalAlignment: 'Center', verticalAlignment: 'Top', content: 'Error', offset: { x: 0.5, y: 1 }, style: { color: 'black', fontSize: 12, fill: 'white', strokeColor: 'white', whiteSpace: 'PreserveAll' } as TextStyleModel }], ports: [{ shape: 'Square', id: 'port4', margin: sourceMargin, width: 6, height: 6, offset: { x: 0, y: 0 }, style: { fill: 'red', strokeColor: 'black', strokeWidth: 2, opacity: 1 } }], event: 'Intermediate', trigger: 'Error' }] } } }, }; //activity- event let node15: NodeModel = { id: 'node15', width: 100, height: 100, offsetX: 300, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', loop: 'ParallelMultiInstance', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Start', trigger: 'None' }] } } }, }; //activity- offset let node16: NodeModel = { id: 'node16', width: 100, height: 100, offsetX: 500, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', loop: 'ParallelMultiInstance', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, event: 'Start', trigger: 'None' }] } } }, }; diagram = new Diagram({ width: '1500px', height: '500px', nodes: [node10, node15, node16] }); diagram.appendTo('#bpmn8'); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Checking sub event property change', (done: Function) => { //width and height ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].width = 38; ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].height = 38; //event ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].event = 'Intermediate'; //offset ((diagram.nodes[2] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].offset = { x: 0, y: 0 }; diagram.dataBind(); expect((((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].width === 38) && (((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].height === 38) ).toBe(true); done(); }); }); describe('Property Change - BPMN Shape - margin and Alignmnet', () => { let diagram: Diagram; let ele: HTMLElement; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; let subeventMargin: MarginModel = { left: 10, top: 10 }; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn8' }); document.body.appendChild(ele); //activity- margin let node10: NodeModel = { id: 'node10', width: 100, height: 100, offsetX: 100, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, margin: subeventMargin, annotations: [{ id: 'label3', horizontalAlignment: 'Center', verticalAlignment: 'Top', content: 'Error', offset: { x: 0.5, y: 1 }, style: { color: 'black', fontSize: 12, fill: 'white', strokeColor: 'white', whiteSpace: 'PreserveAll' } as TextStyleModel }], ports: [{ shape: 'Square', id: 'port4', margin: sourceMargin, width: 6, height: 6, offset: { x: 0, y: 0 }, style: { fill: 'red', strokeColor: 'black', strokeWidth: 2, opacity: 1 } }], event: 'Intermediate', trigger: 'Error' }] } } }, }; //activity- alignmnet let node15: NodeModel = { id: 'node15', width: 100, height: 100, offsetX: 300, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', loop: 'ParallelMultiInstance', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, horizontalAlignment: 'Left', verticalAlignment: 'Top', event: 'Start', trigger: 'None' }] } } }, }; diagram = new Diagram({ width: '1500px', height: '500px', nodes: [node10, node15] }); diagram.appendTo('#bpmn8'); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Checking Subprocess - event - property change', (done: Function) => { //margin - { left: 10, top: 10 } ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].margin.left = 25; ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].margin.top = 25; // horizontalAlignment: HorizontalAlignment.Left, verticalAlignment: VerticalAlignment.Top, ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel ).activity.subProcess.events[0].horizontalAlignment = 'Right'; ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel ).activity.subProcess.events[0].verticalAlignment = 'Center'; diagram.dataBind(); expect((((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].margin.left === 25) && (((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events[0].margin.top === 25) ).toBe(true); done(); }); }); describe('Property Change - BPMN Shape -toggle - task subprocess', () => { let diagram: Diagram; let ele: HTMLElement; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn18' }); document.body.appendChild(ele); //activity- subprocess let node10: NodeModel = { id: 'node10', width: 100, height: 100, offsetX: 100, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: true, type: 'Event', events: [{ height: 20, width: 20, offset: { x: 0.9, y: 0.4 }, annotations: [{ id: 'label3', horizontalAlignment: 'Center', verticalAlignment: 'Top', content: 'Error', offset: { x: 0.5, y: 1 }, style: { color: 'black', fontSize: 12, fill: 'white', strokeColor: 'white', whiteSpace: 'PreserveAll' } as TextStyleModel }], ports: [{ shape: 'Square', id: 'port4', margin: sourceMargin, width: 6, height: 6, offset: { x: 0, y: 0 }, style: { fill: 'red', strokeColor: 'black', strokeWidth: 2, opacity: 1 } }], event: 'Intermediate', trigger: 'Error' }] } } }, }; //activity- event let node15: NodeModel = { id: 'node15', width: 100, height: 100, offsetX: 300, offsetY: 100, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'Task', task: { type: 'Script', loop: 'ParallelMultiInstance', call: false } } }, }; diagram = new Diagram({ width: '1500px', height: '500px', nodes: [node10, node15] }); diagram.appendTo('#bpmn18'); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Toggle - subprocess to task', (done: Function) => { //subprocess to task ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.activity = 'Task'; ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.task.type = 'Service'; ((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.task.loop = 'ParallelMultiInstance'; //task to subprocess ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.activity = 'SubProcess'; ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.subProcess.type = 'Event'; ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.subProcess.loop = 'ParallelMultiInstance'; ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.subProcess.compensation = true; ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.subProcess.boundary = 'Event'; ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.subProcess.collapsed = true; ((diagram.nodes[1] as NodeModel).shape as BpmnShapeModel).activity.subProcess.events = [{ event: 'Intermediate', trigger: 'Compensation', width: 25, height: 25 }]; diagram.dataBind(); expect((((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.activity === 'Task') && (((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.task.type === 'Service') && (((diagram.nodes[0] as NodeModel).shape as BpmnShapeModel).activity.task.loop === 'ParallelMultiInstance') ).toBe(true); done(); }); it('memory leak', () => { profile.sample(); let average: any = inMB(profile.averageChange) //Check average change in memory samples to not be over 10MB expect(average).toBeLessThan(10); let memory: any = inMB(getMemoryProfile()) //Check the final memory usage against the first usage, there should be little change if everything was properly deallocated expect(memory).toBeLessThan(profile.samples[0] + 0.25); }) }); describe('Property Change - BPMN Shape connector', () => { let diagram: Diagram; let ele: HTMLElement; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn5conn' }); document.body.appendChild(ele); let connector1: ConnectorModel = { id: 'connector1', type: 'Straight', sourcePoint: { x: 100, y: 100 }, targetPoint: { x: 150, y: 150 }, shape: { type: 'Bpmn', flow: 'Sequence', sequence: 'Normal' } as BpmnFlowModel }; let connector2: ConnectorModel = { id: 'connector2', type: 'Straight', sourcePoint: { x: 170, y: 170 }, targetPoint: { x: 250, y: 250 }, shape: { type: 'Bpmn', flow: 'Sequence', sequence: 'Conditional' } as BpmnFlowModel }; let connector3: ConnectorModel = { id: 'connector3', type: 'Straight', sourcePoint: { x: 270, y: 270 }, targetPoint: { x: 330, y: 330 }, shape: { type: 'Bpmn', flow: 'Sequence', sequence: 'Default' } as BpmnFlowModel }; let connector4: ConnectorModel = { id: 'connector4', type: 'Straight', sourcePoint: { x: 350, y: 350 }, targetPoint: { x: 400, y: 400 }, shape: { type: 'Bpmn', flow: 'Association', association: 'BiDirectional' } as BpmnFlowModel }; let connector5: ConnectorModel = { id: 'connector5', type: 'Straight', sourcePoint: { x: 430, y: 430 }, targetPoint: { x: 500, y: 500 }, shape: { type: 'Bpmn', flow: 'Association', association: 'Directional' } as BpmnFlowModel }; let connector6: ConnectorModel = { id: 'connector6', type: 'Straight', sourcePoint: { x: 530, y: 530 }, targetPoint: { x: 600, y: 600 }, shape: { type: 'Bpmn', flow: 'Association', association: 'Default' } as BpmnFlowModel }; let connector7: ConnectorModel = { id: 'connector7', type: 'Straight', sourcePoint: { x: 620, y: 620 }, targetPoint: { x: 700, y: 700 }, shape: { type: 'Bpmn', flow: 'Message', message: 'Default' } as BpmnFlowModel }; let connector8: ConnectorModel = { id: 'connector8', type: 'Straight', sourcePoint: { x: 730, y: 730 }, targetPoint: { x: 800, y: 800 }, shape: { type: 'Bpmn', flow: 'Message', message: 'InitiatingMessage' } as BpmnFlowModel }; let connector9: ConnectorModel = { id: 'connector9', type: 'Straight', sourcePoint: { x: 830, y: 830 }, targetPoint: { x: 890, y: 890 }, shape: { type: 'Bpmn', flow: 'Message', message: 'NonInitiatingMessage' } as BpmnFlowModel }; diagram = new Diagram({ width: '1000px', height: '1000px', connectors: [connector1, connector2, connector3, connector4, connector5, connector6, connector7, connector8, connector9] }); diagram.appendTo('#bpmn5conn'); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Checking BPMN Flows property change', (done: Function) => { ((diagram.connectors[0] as ConnectorModel).shape as BpmnFlowModel).sequence = 'Conditional'; ((diagram.connectors[1] as ConnectorModel).shape as BpmnFlowModel).sequence = 'Default'; ((diagram.connectors[2] as ConnectorModel).shape as BpmnFlowModel).sequence = 'Normal'; ((diagram.connectors[3] as ConnectorModel).shape as BpmnFlowModel).association = 'Directional'; ((diagram.connectors[4] as ConnectorModel).shape as BpmnFlowModel).association = 'Default'; ((diagram.connectors[5] as ConnectorModel).shape as BpmnFlowModel).association = 'BiDirectional'; ((diagram.connectors[6] as ConnectorModel).shape as BpmnFlowModel).message = 'InitiatingMessage'; ((diagram.connectors[7] as ConnectorModel).shape as BpmnFlowModel).message = 'NonInitiatingMessage'; ((diagram.connectors[8] as ConnectorModel).shape as BpmnFlowModel).message = 'Default'; diagram.dataBind(); expect((((diagram.connectors[0] as ConnectorModel).shape as BpmnFlowModel).sequence === 'Conditional') && (((diagram.connectors[3] as ConnectorModel).shape as BpmnFlowModel).association === 'Directional') && (((diagram.connectors[6] as ConnectorModel).shape as BpmnFlowModel).message === 'InitiatingMessage') ).toBe(true); done(); }); it('Property change - BPMN Flows', (done: Function) => { ((diagram.connectors[0] as ConnectorModel).shape as BpmnFlow).flow = 'Message'; ((diagram.connectors[0] as ConnectorModel).shape as BpmnFlowModel).message = 'InitiatingMessage'; ((diagram.connectors[1] as ConnectorModel).shape as BpmnFlow).flow = 'Association'; ((diagram.connectors[1] as ConnectorModel).shape as BpmnFlowModel).association = 'Directional'; ((diagram.connectors[3] as ConnectorModel).shape as BpmnFlowModel).flow = 'Sequence'; ((diagram.connectors[3] as ConnectorModel).shape as BpmnFlowModel).sequence = 'Normal'; diagram.dataBind(); expect((((diagram.connectors[0] as ConnectorModel).shape as BpmnFlowModel).message === 'InitiatingMessage') && (((diagram.connectors[1] as ConnectorModel).shape as BpmnFlowModel).association === 'Directional') && (((diagram.connectors[3] as ConnectorModel).shape as BpmnFlowModel).sequence === 'Normal') ).toBe(true); done(); }); }); describe('checking subprocess visibility', () => { let diagram: Diagram; let ele: HTMLElement; let sourceMargin: MarginModel = { left: 5, right: 5, bottom: 5, top: 5 }; let subeventMargin: MarginModel = { left: 10, top: 10 }; beforeAll((): void => { const isDef = (o: any) => o !== undefined && o !== null; if (!isDef(window.performance)) { console.log("Unsupported environment, window.performance.memory is unavailable"); this.skip(); //Skips test (in Chai) return; } ele = createElement('div', { id: 'bpmn8' }); document.body.appendChild(ele); diagram = new Diagram({ width: '1500px', height: '500px', nodes: [{ id: 'end', shape: { type: 'Bpmn', event: { event: 'NonInterruptingStart' } }, width: 100, height: 100, margin: { left: 300, top: 50 } }, { id: 'nodea', width: 400, height: 400, maxHeight: 600, maxWidth: 600, minWidth: 300, minHeight: 300, constraints: NodeConstraints.Default | NodeConstraints.AllowDrop, offsetX: 200, offsetY: 200, visible:false, shape: { type: 'Bpmn', shape: 'Activity', activity: { activity: 'SubProcess', subProcess: { collapsed: false, type: 'Event', processes: ['end'] } as BpmnSubProcessModel } as BpmnActivityModel, }, }] }); diagram.appendTo('#bpmn8'); }); afterAll((): void => { diagram.destroy(); ele.remove(); }); it('Checking Subprocess - event visibility ', (done: Function) => { let node: NodeModel = diagram.nameTable['nodea']; node.visible = true; diagram.dataBind(); expect(diagram.nodes[0].wrapper.children[0].visible == true).toBe(true); done(); }); }); });
the_stack
import tryParseUrl from '../try-parse-url' import { splitByWhitespace, splitByComma, splitByCommaPickFirstTokens } from './split-token-list' import { omit } from './util' import { AttributeInfo, AttributeInfoDict } from './types' // Default properties for the attributes listed below. const defaultItem: Omit<AttributeInfo, 'attribute'> = { // The name of the attribute // attribute: (no default value, required) // The elements this attribute can appear on, as an array of CSS Selectors elements: ['*'], // Parser for the attribute value, returns an array of zero, one, or multiple URLs. // Each url is an object { token, index }, to help replacing the url on the right spot. // (to e.g. replace the correct 5 in <meta http-equiv="refresh" content="5; url=5">) parse: value => { // Default is to expect a single URL (+ possibly whitespace). const url = value.trim() if (url.length === 0) return [] const index = value.indexOf(url[0]) // probably 0; otherwise the number of leading spaces. return [ { token: url, index } ] }, // Whether the attribute's URL refers to an "external resource"; i.e. something that is to be // considered "part of"/"transcluded into" the current document, rather than just referred to. // Might be slightly subjective in some cases. isSubresource: false, // How the subresource is used; corresponds to what is now called the 'destination' in the WHATWG // fetch spec (https://fetch.spec.whatwg.org/#concept-request-destination as of 2018-05-17) subresourceType: undefined, // Turn the extracted (possibly) relative URL into an absolute URL. makeAbsolute( url, element, // We allow the caller to override the document's URL and base URL. baseUrl = element.baseURI, documentURL = (element.ownerDocument !== null) ? element.ownerDocument.URL : undefined, ) { // Normally, the URL is simply relative to the document's base URL. return tryParseUrl(url, baseUrl) }, } // Helper for URL attributes that are defined to be relative to the element's 'codebase' attribute. const makeAbsoluteUsingCodebase: AttributeInfo['makeAbsolute'] = (url, element, ...etc) => { // Read the value of the codebase attribute, and turn it into an absolute URL. const codebaseValue = element.getAttribute('codebase') if (codebaseValue) { const [ codebaseUrlLocation ] = html40.codebase.parse(codebaseValue) if (codebaseUrlLocation) { const codebaseUrl = codebaseUrlLocation.token const codebaseAbsoluteUrl = html40.codebase.makeAbsolute(codebaseUrl, element, ...etc) return tryParseUrl(url, codebaseAbsoluteUrl) } } // If there is no (valid) codebase attribute, interpret relative URLs as usual. return defaultItem.makeAbsolute(url, element, ...etc) } // HTML 4.0 // Mostly derived from https://www.w3.org/TR/REC-html40/index/attributes.html export const html40: AttributeInfoDict = { action: { ...defaultItem, attribute: 'action', elements: ['form'], }, applet_archive: { ...defaultItem, attribute: 'archive', elements: ['applet'], parse: splitByComma, isSubresource: true, // subresourceType? No idea. makeAbsolute: makeAbsoluteUsingCodebase, // See https://www.w3.org/TR/REC-html40/struct/objects.html#adef-archive-APPLET }, object_archive: { ...defaultItem, attribute: 'archive', elements: ['object'], parse: splitByWhitespace, isSubresource: true, // subresourceType? No idea. makeAbsolute: makeAbsoluteUsingCodebase, // See https://www.w3.org/TR/REC-html40/struct/objects.html#adef-archive-OBJECT }, background: { ...defaultItem, attribute: 'background', elements: ['body'], isSubresource: true, subresourceType: 'image', }, cite: { ...defaultItem, attribute: 'cite', elements: ['blockquote', 'q', 'del', 'ins'], }, classid: { ...defaultItem, attribute: 'classid', elements: ['object'], isSubresource: true, // I guess? // subresourceType? No idea. makeAbsolute: makeAbsoluteUsingCodebase, }, codebase: { ...defaultItem, attribute: 'codebase', elements: ['object', 'applet'], }, data: { ...defaultItem, attribute: 'data', elements: ['object'], isSubresource: true, subresourceType: 'object', makeAbsolute: makeAbsoluteUsingCodebase, // See https://www.w3.org/TR/REC-html40/struct/objects.html#adef-data }, href: { ...defaultItem, attribute: 'href', elements: ['a', 'area', 'base', 'link:not([rel~=icon i]):not([rel~=stylesheet i])'], // Note: some links are resources, see below. }, link_icon_href: { ...defaultItem, attribute: 'href', elements: ['link[rel~=icon i]'], isSubresource: true, subresourceType: 'image', }, link_stylesheet_href: { ...defaultItem, attribute: 'href', elements: ['link[rel~=stylesheet i]'], isSubresource: true, subresourceType: 'style', }, longdesc: { ...defaultItem, attribute: 'longdesc', elements: ['img', 'frame', 'iframe'], }, profile: { ...defaultItem, attribute: 'profile', elements: ['head'], }, img_src: { ...defaultItem, attribute: 'src', elements: ['img', 'input[type=image i]'], isSubresource: true, subresourceType: 'image', }, frame_src: { ...defaultItem, attribute: 'src', elements: ['frame', 'iframe'], isSubresource: true, subresourceType: 'document', }, script_src: { ...defaultItem, attribute: 'src', elements: ['script'], isSubresource: true, subresourceType: 'script', }, // It seems usemap can only contain within-document URLs; hence omitting it from this list. // usemap: { // ...defaultItem, // attribute: 'usemap', // elements: ['img', 'input', 'object'], // }, // Some attributes that are not listed as Type=%URI in // <https://www.w3.org/TR/REC-html40/index/attributes.html>, but seem to belong here. param_ref_value: { ...defaultItem, attribute: 'value', elements: ['param[valuetype=ref i]'], // Note: "The URI must be passed to the object as is, i.e., unresolved." // See https://www.w3.org/TR/REC-html40/struct/objects.html#adef-valuetype }, meta_refresh_content: { ...defaultItem, attribute: 'content', elements: ['meta[http-equiv=refresh i]'], parse: value => { // Example: <meta http-equiv="refresh" content="2; url=http://www.example.com"> // To match many historical syntax variations, we try to follow whatwg's algorithm. // See <https://html.spec.whatwg.org/multipage/semantics.html#shared-declarative-refresh-steps> const match = value.match(/^(\s*[\d.]+\s*[;,\s]\s*(?:url\s*=\s*)?('|")?\s*)(.+)/i) if (!match) return [] // Probably a normal refresh that stays on the same page. // If the URL was preceded by a quote, truncate it at the next quote. const quote = match[2] let url = match[3] if (quote && url.includes(quote)) { url = url.slice(0, url.indexOf(quote)) } const index = match[1].length url = url.trim() // url could not start with whitespace, so index remains correct. return [{ token: url, index }] }, }, } // HTML 5.2. // Derived from https://www.w3.org/TR/2017/REC-html52-20171214/fullindex.html#attributes-table export const html52: AttributeInfoDict = { action: html40.action, cite: html40.cite, data: { ...html40.data, makeAbsolute: defaultItem.makeAbsolute, // html5 drops the codebase attribute }, formaction: { ...defaultItem, attribute: 'formaction', elements: ['button', 'input'], }, href: html40.href, // See https://www.w3.org/TR/2017/REC-html52-20171214/links.html#sec-link-types link_icon_href: html40.link_icon_href, link_stylesheet_href: html40.link_stylesheet_href, longdesc: { ...html40.longdesc, // minus frame/iframe elements: ['img'], }, manifest: { // Note: manifest is deprecated. ...defaultItem, attribute: 'manifest', elements: ['html'], isSubresource: true, // subresourceType? Maybe 'manifest'? Confusion with <link rel=manifest> makeAbsolute( url, element, _, documentURL = (element.ownerDocument !== null) ? element.ownerDocument.URL : undefined, ) { // The manifest is not influenced by a <base href="..."> tag. return tryParseUrl(url, documentURL) }, }, poster: { ...defaultItem, attribute: 'poster', elements: ['video'], isSubresource: true, subresourceType: 'image', }, audio_src: { ...defaultItem, attribute: 'src', elements: ['audio', 'audio>source'], isSubresource: true, subresourceType: 'audio', }, embed_src: { ...defaultItem, attribute: 'src', elements: ['embed'], isSubresource: true, subresourceType: 'embed', }, frame_src: { ...html40.frame_src, // minus the <frame> element elements: ['iframe'], }, img_src: html40.img_src, script_src: html40.script_src, track_src: { ...defaultItem, attribute: 'src', elements: ['track'], isSubresource: true, subresourceType: 'track', }, video_src: { ...defaultItem, attribute: 'src', elements: ['video', 'video>source'], isSubresource: true, subresourceType: 'video', }, srcset: { ...defaultItem, attribute: 'srcset', elements: ['img', 'picture>source'], // Example: <img srcset="http://image 2x, http://other-image 1.5x" ...> // TODO implement more sophisticated srcset parsing. // See https://html.spec.whatwg.org/multipage/images.html#parsing-a-srcset-attribute parse: splitByCommaPickFirstTokens, isSubresource: true, subresourceType: 'image', }, // Not listed in the attributes index, but seems to belong here. meta_refresh_content: html40.meta_refresh_content, } // WHATWG as of 2018-04-20 // https://html.spec.whatwg.org/multipage/indices.html#attributes-3 of 2018-04-20 export const whatwg: AttributeInfoDict = { // Includes all of HTML 5.2 except longdesc ...omit(['longdesc'])(html52), itemprop: { // Microdata's itemprop can contain absolute URLs, used as identifiers. // See https://html.spec.whatwg.org/multipage/microdata.html#names:-the-itemprop-attribute ...defaultItem, attribute: 'itemprop', parse: value => { return splitByWhitespace(value) .filter(({ token }) => token.includes(':')) // tokens without colon are property names. }, // May only contain absolute URLs, so we merely check whether they are valid URLs. makeAbsolute: url => tryParseUrl(url), }, itemtype: { // Note: "Except if otherwise specified by that specification, the URLs given as the item // types should not be automatically dereferenced." // See https://html.spec.whatwg.org/multipage/microdata.html#attr-itemtype ...defaultItem, attribute: 'itemtype', parse: splitByWhitespace, // May only contain absolute URLs, so we merely check whether they are valid URLs. makeAbsolute: url => tryParseUrl(url), }, itemid: { ...defaultItem, attribute: 'itemid', }, ping: { ...defaultItem, attribute: 'ping', elements: ['a', 'area'], }, } // Notes to self about link types that declare external resources. // Regarding link types in the WHATWG spec: // The preloading-related links might be nice to archive if we start supporting scripts: we // could hardcode their URL:value combination into an injected fetch replacement function. // Preloading relation types: modulepreload, preconnect, prefetch, preload, prerender // Another type: dns-prefetch; Seems even further off, does not actually load any resource. // Also, rel=pingback is listed as an external resource link. No idea why. // See https://html.spec.whatwg.org/multipage/links.html#linkTypes // Other: // A few other possibly interesting link relation types to external resources. // (hand-picked from <http://microformats.org/wiki/index.php?title=existing-rel-values&oldid=66721>) // apple-touch-icon / apple-touch-icon-precomposed / apple-touch-startup-image // enclosure (similar to prefetch etc?) // pgpkey / publickey
the_stack
import {APP_ID, Injectable, Inject, Renderer, RootRenderer, RenderComponentType, AnimationPlayer, ViewEncapsulation} from '@angular/core'; import {EventManager, AnimationDriver, DOCUMENT} from '@angular/platform-browser'; import {SharedCustomStylesHost} from './shared-custom-styles-host'; const Polymer: any = (<any>window).Polymer; const COMPONENT_REGEX = /%COMP%/g; const COMPONENT_VARIABLE = '%COMP%'; const HOST_ATTR = `_nghost-${COMPONENT_VARIABLE}`; const CONTENT_ATTR = `_ngcontent-${COMPONENT_VARIABLE}`; function flattenStyles( styleShimId: string, styles: Array<any|any[]>, target: string[]): string[] { for (let i = 0; i < styles.length; i++) { let style = styles[i]; if (Array.isArray(style)) { flattenStyles(styleShimId, style, target); } else { style = style.replace(COMPONENT_REGEX, styleShimId); target.push(style); } } return target; } function decoratePreventDefault(handler: Function) { return (event: any) => { const allowDefault = handler(event); if (allowDefault === false) { event.preventDefault(); event.returnValue = false; } }; } /** * The polymer renderer takes care of supporting angular > 2.2 shady DOM * * The problem: * Starting from v2.2, Angular uses direct DOM rendering in browser. BrowserDomAdapter is not invoked by Renderer. * Therefore, changing the default BrowserDomAdapter to PolymerDomAdapter trick is not helpful anymore. * The issue breaks setting Light DOM for Polymer elements in Shady DOM mode of Polymer v1.0. * * The solution: * Instead of PolymerDomAdapter, we created the PolymerRenderer by implementing the Renderer interface. * The PolymerRenderer calls Polymer.dom APIs instead of DOM methods. In order to make Angular use the PolymerRenderer, * we need to define and export custom platforms, i.e., platformPolymer and platformPolymerDynamic. * In practice, developers will have to switch the imports in their main.ts files to use our custom Polymer platforms * instead of the default platformBrowser and platformBrowserDynamic. * * DefaultPolymerRenderer used for ViewEncapsulation.None, other style incapsulation modes are implemented in subclasses. */ export class DefaultPolymerRenderer implements Renderer { constructor( private _eventManager: EventManager, private _animationDriver: AnimationDriver ) { } selectRootElement(selectorOrElement: string|Element): Element { let el: Element; if (typeof selectorOrElement === 'string') { el = Polymer.dom(document).querySelector(selectorOrElement); if (!el) { throw new Error(`Root element for selector "${selectorOrElement}" was not found`); } } else { el = selectorOrElement; } Polymer.dom(el).textContent = ''; return el; } createElement(parent: Element|DocumentFragment, name: string): Element { const el: Element = document.createElement(name); if (parent) { Polymer.dom(parent).appendChild(el); } return el; } createViewRoot(hostElement: Element): Element|DocumentFragment { return hostElement; } createTemplateAnchor(parent: Element|DocumentFragment): Element { const anchor = document.createElement('template-anchor'); anchor.setAttribute('hidden', 'hidden'); if (parent) { Polymer.dom(parent).appendChild(anchor); } return anchor; } createText(parent: Element|DocumentFragment, value: string): Text { const node = document.createTextNode(value); if (parent) { Polymer.dom(parent).appendChild(node); } return node; } projectNodes(parent: Element|DocumentFragment, nodes: Node[]) { if (!parent) return; const parentDomApi: any = Polymer.dom(parent); for (let i = 0; i < nodes.length; i++) { parentDomApi.appendChild(nodes[i]); } } attachViewAfter(node: Node, viewRootNodes: Node[]) { const parent: Element = Polymer.dom(node).parentNode; if (!parent || viewRootNodes.length === 0) return; const parentDomApi = Polymer.dom(parent); const nextSibling: Node = Polymer.dom(node).nextSibling; if (nextSibling) { for (let i = 0; i < viewRootNodes.length; i++) { parentDomApi.insertBefore(viewRootNodes[i], nextSibling); } } else { for (let i = 0; i < viewRootNodes.length; i++) { parentDomApi.appendChild(viewRootNodes[i]); } } } detachView(viewRootNodes: Node[]) { for (let i = 0; i < viewRootNodes.length; i++) { const node: Node = viewRootNodes[i]; const parent: Element = Polymer.dom(node).parentNode; if (parent) { Polymer.dom(parent).removeChild(node); } } } destroyView(hostElement: Element|DocumentFragment, viewAllNodes: Node[]) { } listen(renderElement: any, name: string, callback: Function): Function { return this._eventManager.addEventListener( renderElement, name, decoratePreventDefault(callback) ); } listenGlobal(target: string, name: string, callback: Function): Function { return this._eventManager.addGlobalEventListener( target, name, decoratePreventDefault(callback) ); } setElementProperty(renderElement: Element|DocumentFragment, propertyName: string, propertyValue: any): void { (renderElement as any)[propertyName] = propertyValue; } setElementAttribute(renderElement: Element|DocumentFragment, attributeName: string, attributeValue: string): void { if (attributeValue != null) { Polymer.dom(renderElement).setAttribute(attributeName, attributeValue); } else { Polymer.dom(renderElement).removeAttribute(attributeName); } } setBindingDebugInfo(renderElement: Element, propertyName: string, propertyValue: string): void { this.setElementAttribute(renderElement, propertyName, propertyValue); } setElementClass(renderElement: Element, className: string, isAdd: boolean) { if (isAdd) { Polymer.dom(renderElement).classList.add(className); } else { Polymer.dom(renderElement).classList.remove(className); } } setElementStyle(renderElement: HTMLElement, styleName: string, styleValue: string): void { if (styleValue) { renderElement.style.setProperty(styleName, styleValue); } else { renderElement.style.removeProperty(styleName); } } invokeElementMethod(renderElement: Element, methodName: string, args: any[]) { (renderElement as any)[methodName].apply(renderElement, args); } setText(renderNode: Text, text: string): void { renderNode.nodeValue = text; } animate(element: any, startingStyles: any, keyframes: any[], duration: number, delay: number, easing: string, previousPlayers: AnimationPlayer[] = []): AnimationPlayer { if (!element.domHost && document.body.contains(element)) { return this._animationDriver.animate(element, startingStyles, keyframes, duration, delay, easing, previousPlayers); } } } /** * Polymer renderer for ViewEncapsulation.Emulated styles encapsulation mode (defaultmode) */ export class EmulatedEncapsulationPolymerRenderer extends DefaultPolymerRenderer { private _contentAttr: string; private _hostAttr: string; constructor( eventManager: EventManager, animationDriver: AnimationDriver, sharedCustomStylesHost: SharedCustomStylesHost, componentType: RenderComponentType, styleShimId: string ) { super(eventManager, animationDriver); const styles = flattenStyles(styleShimId, componentType.styles, []); sharedCustomStylesHost.addStyles(styles); this._contentAttr = CONTENT_ATTR.replace(COMPONENT_REGEX, styleShimId); this._hostAttr = HOST_ATTR.replace(COMPONENT_REGEX, styleShimId); } createViewRoot(hostElement: Element): Element|DocumentFragment { super.setElementAttribute(hostElement, this._hostAttr, ''); return hostElement; } createElement(parent: Element|DocumentFragment, name: string): Element { const el: Element = super.createElement(parent, name); super.setElementAttribute(el, this._contentAttr, ''); return el; } } /** * Polymer renderer for ViewEncapsulation.Native styles encapsulation mode */ export class ShadowDomPolymerRenderer extends DefaultPolymerRenderer { private _shadowRoot: DocumentFragment; private _styles: string[]; constructor( eventManager: EventManager, animationDriver: AnimationDriver, private sharedCustomStylesHost: SharedCustomStylesHost, componentType: RenderComponentType, styleShimId: string ) { super(eventManager, animationDriver); this._styles = flattenStyles(styleShimId, componentType.styles, []); } createViewRoot(hostElement: Element): Element|DocumentFragment { super.createViewRoot(hostElement); this._shadowRoot = <DocumentFragment> (hostElement as any).createShadowRoot(); this.sharedCustomStylesHost.addHost(this._shadowRoot); this._styles.forEach(style => { const styleEl: Element = document.createElement('style'); styleEl.textContent = style; this._shadowRoot.appendChild(styleEl); }); return this._shadowRoot; } destroyView(hostElement: Element|DocumentFragment, viewAllNodes: Node[]) { this.sharedCustomStylesHost.removeHost(this._shadowRoot); } } @Injectable() export class PolymerRootRenderer implements RootRenderer { protected registeredComponents: Map<string, Renderer> = new Map<string, Renderer>(); private defaultRenderer: Renderer; constructor( @Inject(DOCUMENT) public document: any, public eventManager: EventManager, public sharedCustomStylesHost: SharedCustomStylesHost, public animationDriver: AnimationDriver, @Inject(APP_ID) public appId: string ) { this.defaultRenderer = new DefaultPolymerRenderer(eventManager, animationDriver); } renderComponent(componentType: RenderComponentType): Renderer { const styleShimId = `${this.appId}-${componentType.id}`; switch (componentType.encapsulation) { case ViewEncapsulation.Emulated: { let renderer = this.registeredComponents.get(componentType.id); if (!renderer) { renderer = new EmulatedEncapsulationPolymerRenderer( this.eventManager, this.animationDriver, this.sharedCustomStylesHost, componentType, styleShimId ); this.registeredComponents.set(componentType.id, renderer); } return renderer; } case ViewEncapsulation.Native: { return new ShadowDomPolymerRenderer( this.eventManager, this.animationDriver, this.sharedCustomStylesHost, componentType, styleShimId ); } default: { if (!this.registeredComponents.has(componentType.id)) { const styles = flattenStyles(styleShimId, componentType.styles, []); this.sharedCustomStylesHost.addStyles(styles); this.registeredComponents.set(componentType.id, this.defaultRenderer); } return this.defaultRenderer; } } } }
the_stack
module android.view { import Resources = android.content.res.Resources; import Handler = android.os.Handler; import SystemClock = android.os.SystemClock; import Float = java.lang.Float; import MotionEvent = android.view.MotionEvent; import View = android.view.View; import ViewConfiguration = android.view.ViewConfiguration; import TypedValue = android.util.TypedValue; /** * Detects scaling transformation gestures using the supplied {@link MotionEvent}s. * The {@link OnScaleGestureListener} callback will notify users when a particular * gesture event has occurred. * * This class should only be used with {@link MotionEvent}s reported via touch. * * To use this class: * <ul> * <li>Create an instance of the {@code ScaleGestureDetector} for your * {@link View} * <li>In the {@link View#onTouchEvent(MotionEvent)} method ensure you call * {@link #onTouchEvent(MotionEvent)}. The methods defined in your * callback will be executed when the events occur. * </ul> */ export class ScaleGestureDetector { private static TAG:string = "ScaleGestureDetector"; //private mContext:Context; private mListener:ScaleGestureDetector.OnScaleGestureListener; private mFocusX:number = 0; private mFocusY:number = 0; private mQuickScaleEnabled:boolean; private mCurrSpan:number = 0; private mPrevSpan:number = 0; private mInitialSpan:number = 0; private mCurrSpanX:number = 0; private mCurrSpanY:number = 0; private mPrevSpanX:number = 0; private mPrevSpanY:number = 0; private mCurrTime:number = 0; private mPrevTime:number = 0; private mInProgress:boolean; private mSpanSlop:number = 0; private mMinSpan:number = 0; // Bounds for recently seen values private mTouchUpper:number = 0; private mTouchLower:number = 0; private mTouchHistoryLastAccepted:number = 0; private mTouchHistoryDirection:number = 0; private mTouchHistoryLastAcceptedTime:number = 0; private mTouchMinMajor:number = 0; private mDoubleTapEvent:MotionEvent; private mDoubleTapMode:number = ScaleGestureDetector.DOUBLE_TAP_MODE_NONE; private mHandler:any; // ms private static TOUCH_STABILIZE_TIME:number = 128; private static DOUBLE_TAP_MODE_NONE:number = 0; private static DOUBLE_TAP_MODE_IN_PROGRESS:number = 1; private static SCALE_FACTOR:number = .5; ///** // * Consistency verifier for debugging purposes. // */ //private mInputEventConsistencyVerifier:InputEventConsistencyVerifier = InputEventConsistencyVerifier.isInstrumentationEnabled() ? new InputEventConsistencyVerifier(this, 0) : null; private mGestureDetector:GestureDetector; private mEventBeforeOrAboveStartingGestureEvent:boolean; /** * Creates a ScaleGestureDetector with the supplied listener. * @see android.os.Handler#Handler() * * @param context the application's context * @param listener the listener invoked for all the callbacks, this must * not be null. * @param handler the handler to use for running deferred listener events. * * @throws NullPointerException if {@code listener} is null. */ constructor(listener:ScaleGestureDetector.OnScaleGestureListener, handler?:any) { //this.mContext = context; this.mListener = listener; this.mSpanSlop = ViewConfiguration.get().getScaledTouchSlop() * 2; this.mTouchMinMajor = TypedValue.complexToDimensionPixelSize('48dp');//Resources.getDimensionPixelSize(com.android.internal.R.dimen.config_minScalingTouchMajor); this.mMinSpan = TypedValue.complexToDimensionPixelSize('27mm');//Resources.getDimensionPixelSize(com.android.internal.R.dimen.config_minScalingSpan); this.mHandler = handler; // Quick scale is enabled by default after JB_MR2 this.setQuickScaleEnabled(true); } /** * The touchMajor/touchMinor elements of a MotionEvent can flutter/jitter on * some hardware/driver combos. Smooth it out to get kinder, gentler behavior. * @param ev MotionEvent to add to the ongoing history */ private addTouchHistory(ev:MotionEvent):void { const currentTime:number = SystemClock.uptimeMillis(); const count:number = ev.getPointerCount(); let accept:boolean = currentTime - this.mTouchHistoryLastAcceptedTime >= ScaleGestureDetector.TOUCH_STABILIZE_TIME; let total:number = 0; let sampleCount:number = 0; for (let i:number = 0; i < count; i++) { const hasLastAccepted:boolean = !Number.isNaN(this.mTouchHistoryLastAccepted); const historySize:number = ev.getHistorySize(); const pointerSampleCount:number = historySize + 1; for (let h:number = 0; h < pointerSampleCount; h++) { let major:number; if (h < historySize) { major = ev.getHistoricalTouchMajor(i, h); } else { major = ev.getTouchMajor(i); } if (major < this.mTouchMinMajor) major = this.mTouchMinMajor; total += major; if (Number.isNaN(this.mTouchUpper) || major > this.mTouchUpper) { this.mTouchUpper = major; } if (Number.isNaN(this.mTouchLower) || major < this.mTouchLower) { this.mTouchLower = major; } if (hasLastAccepted) { function Math_signum(value:number):number{ if(value === 0 || Number.isNaN(value)) return value; return Math.abs(value)===value ? 1 : -1; } const directionSig:number = Math.floor(Math_signum(major - this.mTouchHistoryLastAccepted)); if (directionSig != this.mTouchHistoryDirection || (directionSig == 0 && this.mTouchHistoryDirection == 0)) { this.mTouchHistoryDirection = directionSig; const time:number = h < historySize ? ev.getHistoricalEventTime(h) : ev.getEventTime(); this.mTouchHistoryLastAcceptedTime = time; accept = false; } } } sampleCount += pointerSampleCount; } const avg:number = total / sampleCount; if (accept) { let newAccepted:number = (this.mTouchUpper + this.mTouchLower + avg) / 3; this.mTouchUpper = (this.mTouchUpper + newAccepted) / 2; this.mTouchLower = (this.mTouchLower + newAccepted) / 2; this.mTouchHistoryLastAccepted = newAccepted; this.mTouchHistoryDirection = 0; this.mTouchHistoryLastAcceptedTime = ev.getEventTime(); } } /** * Clear all touch history tracking. Useful in ACTION_CANCEL or ACTION_UP. * @see #addTouchHistory(MotionEvent) */ private clearTouchHistory():void { this.mTouchUpper = Number.NaN; this.mTouchLower = Number.NaN; this.mTouchHistoryLastAccepted = Number.NaN; this.mTouchHistoryDirection = 0; this.mTouchHistoryLastAcceptedTime = 0; } /** * Accepts MotionEvents and dispatches events to a {@link OnScaleGestureListener} * when appropriate. * * <p>Applications should pass a complete and consistent event stream to this method. * A complete and consistent event stream involves all MotionEvents from the initial * ACTION_DOWN to the final ACTION_UP or ACTION_CANCEL.</p> * * @param event The event to process * @return true if the event was processed and the detector wants to receive the * rest of the MotionEvents in this event stream. */ onTouchEvent(event:MotionEvent):boolean { //if (this.mInputEventConsistencyVerifier != null) { // this.mInputEventConsistencyVerifier.onTouchEvent(event, 0); //} this.mCurrTime = event.getEventTime(); const action:number = event.getActionMasked(); // Forward the event to check for double tap gesture if (this.mQuickScaleEnabled) { this.mGestureDetector.onTouchEvent(event); } const streamComplete:boolean = action == MotionEvent.ACTION_UP || action == MotionEvent.ACTION_CANCEL; if (action == MotionEvent.ACTION_DOWN || streamComplete) { // This means the app probably didn't give us all the events. Shame on it. if (this.mInProgress) { this.mListener.onScaleEnd(this); this.mInProgress = false; this.mInitialSpan = 0; this.mDoubleTapMode = ScaleGestureDetector.DOUBLE_TAP_MODE_NONE; } else if (this.mDoubleTapMode == ScaleGestureDetector.DOUBLE_TAP_MODE_IN_PROGRESS && streamComplete) { this.mInProgress = false; this.mInitialSpan = 0; this.mDoubleTapMode = ScaleGestureDetector.DOUBLE_TAP_MODE_NONE; } if (streamComplete) { this.clearTouchHistory(); return true; } } const configChanged:boolean = action == MotionEvent.ACTION_DOWN || action == MotionEvent.ACTION_POINTER_UP || action == MotionEvent.ACTION_POINTER_DOWN; const pointerUp:boolean = action == MotionEvent.ACTION_POINTER_UP; const skipIndex:number = pointerUp ? event.getActionIndex() : -1; // Determine focal point let sumX:number = 0, sumY:number = 0; const count:number = event.getPointerCount(); const div:number = pointerUp ? count - 1 : count; let focusX:number; let focusY:number; if (this.mDoubleTapMode == ScaleGestureDetector.DOUBLE_TAP_MODE_IN_PROGRESS) { // In double tap mode, the focal pt is always where the double tap // gesture started focusX = this.mDoubleTapEvent.getX(); focusY = this.mDoubleTapEvent.getY(); if (event.getY() < focusY) { this.mEventBeforeOrAboveStartingGestureEvent = true; } else { this.mEventBeforeOrAboveStartingGestureEvent = false; } } else { for (let i:number = 0; i < count; i++) { if (skipIndex == i) continue; sumX += event.getX(i); sumY += event.getY(i); } focusX = sumX / div; focusY = sumY / div; } this.addTouchHistory(event); // Determine average deviation from focal point let devSumX:number = 0, devSumY:number = 0; for (let i:number = 0; i < count; i++) { if (skipIndex == i) continue; // Convert the resulting diameter into a radius. const touchSize:number = this.mTouchHistoryLastAccepted / 2; devSumX += Math.abs(event.getX(i) - focusX) + touchSize; devSumY += Math.abs(event.getY(i) - focusY) + touchSize; } const devX:number = devSumX / div; const devY:number = devSumY / div; // Span is the average distance between touch points through the focal point; // i.e. the diameter of the circle with a radius of the average deviation from // the focal point. const spanX:number = devX * 2; const spanY:number = devY * 2; let span:number; if (this.inDoubleTapMode()) { span = spanY; } else { span = Math.sqrt(spanX * spanX + spanY * spanY); } // Dispatch begin/end events as needed. // If the configuration changes, notify the app to reset its current state by beginning // a fresh scale event stream. const wasInProgress:boolean = this.mInProgress; this.mFocusX = focusX; this.mFocusY = focusY; if (!this.inDoubleTapMode() && this.mInProgress && (span < this.mMinSpan || configChanged)) { this.mListener.onScaleEnd(this); this.mInProgress = false; this.mInitialSpan = span; this.mDoubleTapMode = ScaleGestureDetector.DOUBLE_TAP_MODE_NONE; } if (configChanged) { this.mPrevSpanX = this.mCurrSpanX = spanX; this.mPrevSpanY = this.mCurrSpanY = spanY; this.mInitialSpan = this.mPrevSpan = this.mCurrSpan = span; } const minSpan:number = this.inDoubleTapMode() ? this.mSpanSlop : this.mMinSpan; if (!this.mInProgress && span >= minSpan && (wasInProgress || Math.abs(span - this.mInitialSpan) > this.mSpanSlop)) { this.mPrevSpanX = this.mCurrSpanX = spanX; this.mPrevSpanY = this.mCurrSpanY = spanY; this.mPrevSpan = this.mCurrSpan = span; this.mPrevTime = this.mCurrTime; this.mInProgress = this.mListener.onScaleBegin(this); } // Handle motion; focal point and span/scale factor are changing. if (action == MotionEvent.ACTION_MOVE) { this.mCurrSpanX = spanX; this.mCurrSpanY = spanY; this.mCurrSpan = span; let updatePrev:boolean = true; if (this.mInProgress) { updatePrev = this.mListener.onScale(this); } if (updatePrev) { this.mPrevSpanX = this.mCurrSpanX; this.mPrevSpanY = this.mCurrSpanY; this.mPrevSpan = this.mCurrSpan; this.mPrevTime = this.mCurrTime; } } return true; } private inDoubleTapMode():boolean { return this.mDoubleTapMode == ScaleGestureDetector.DOUBLE_TAP_MODE_IN_PROGRESS; } /** * Set whether the associated {@link OnScaleGestureListener} should receive onScale callbacks * when the user performs a doubleTap followed by a swipe. Note that this is enabled by default * if the app targets API 19 and newer. * @param scales true to enable quick scaling, false to disable */ setQuickScaleEnabled(scales:boolean):void { this.mQuickScaleEnabled = scales; if (this.mQuickScaleEnabled && this.mGestureDetector == null) { let gestureListener:GestureDetector.SimpleOnGestureListener = (()=>{ const inner_this=this; class _Inner extends GestureDetector.SimpleOnGestureListener { onDoubleTap(e:MotionEvent):boolean { // Double tap: start watching for a swipe inner_this.mDoubleTapEvent = e; inner_this.mDoubleTapMode = ScaleGestureDetector.DOUBLE_TAP_MODE_IN_PROGRESS; return true; } } return new _Inner(); })(); this.mGestureDetector = new GestureDetector(gestureListener, this.mHandler); } } /** * Return whether the quick scale gesture, in which the user performs a double tap followed by a * swipe, should perform scaling. {@see #setQuickScaleEnabled(boolean)}. */ isQuickScaleEnabled():boolean { return this.mQuickScaleEnabled; } /** * Returns {@code true} if a scale gesture is in progress. */ isInProgress():boolean { return this.mInProgress; } /** * Get the X coordinate of the current gesture's focal point. * If a gesture is in progress, the focal point is between * each of the pointers forming the gesture. * * If {@link #isInProgress()} would return false, the result of this * function is undefined. * * @return X coordinate of the focal point in pixels. */ getFocusX():number { return this.mFocusX; } /** * Get the Y coordinate of the current gesture's focal point. * If a gesture is in progress, the focal point is between * each of the pointers forming the gesture. * * If {@link #isInProgress()} would return false, the result of this * function is undefined. * * @return Y coordinate of the focal point in pixels. */ getFocusY():number { return this.mFocusY; } /** * Return the average distance between each of the pointers forming the * gesture in progress through the focal point. * * @return Distance between pointers in pixels. */ getCurrentSpan():number { return this.mCurrSpan; } /** * Return the average X distance between each of the pointers forming the * gesture in progress through the focal point. * * @return Distance between pointers in pixels. */ getCurrentSpanX():number { return this.mCurrSpanX; } /** * Return the average Y distance between each of the pointers forming the * gesture in progress through the focal point. * * @return Distance between pointers in pixels. */ getCurrentSpanY():number { return this.mCurrSpanY; } /** * Return the previous average distance between each of the pointers forming the * gesture in progress through the focal point. * * @return Previous distance between pointers in pixels. */ getPreviousSpan():number { return this.mPrevSpan; } /** * Return the previous average X distance between each of the pointers forming the * gesture in progress through the focal point. * * @return Previous distance between pointers in pixels. */ getPreviousSpanX():number { return this.mPrevSpanX; } /** * Return the previous average Y distance between each of the pointers forming the * gesture in progress through the focal point. * * @return Previous distance between pointers in pixels. */ getPreviousSpanY():number { return this.mPrevSpanY; } /** * Return the scaling factor from the previous scale event to the current * event. This value is defined as * ({@link #getCurrentSpan()} / {@link #getPreviousSpan()}). * * @return The current scaling factor. */ getScaleFactor():number { if (this.inDoubleTapMode()) { // Drag is moving up; the further away from the gesture // start, the smaller the span should be, the closer, // the larger the span, and therefore the larger the scale const scaleUp:boolean = (this.mEventBeforeOrAboveStartingGestureEvent && (this.mCurrSpan < this.mPrevSpan)) || (!this.mEventBeforeOrAboveStartingGestureEvent && (this.mCurrSpan > this.mPrevSpan)); const spanDiff:number = (Math.abs(1 - (this.mCurrSpan / this.mPrevSpan)) * ScaleGestureDetector.SCALE_FACTOR); return this.mPrevSpan <= 0 ? 1 : scaleUp ? (1 + spanDiff) : (1 - spanDiff); } return this.mPrevSpan > 0 ? this.mCurrSpan / this.mPrevSpan : 1; } /** * Return the time difference in milliseconds between the previous * accepted scaling event and the current scaling event. * * @return Time difference since the last scaling event in milliseconds. */ getTimeDelta():number { return this.mCurrTime - this.mPrevTime; } /** * Return the event time of the current event being processed. * * @return Current event time in milliseconds. */ getEventTime():number { return this.mCurrTime; } } export module ScaleGestureDetector{ /** * The listener for receiving notifications when gestures occur. * If you want to listen for all the different gestures then implement * this interface. If you only want to listen for a subset it might * be easier to extend {@link SimpleOnScaleGestureListener}. * * An application will receive events in the following order: * <ul> * <li>One {@link OnScaleGestureListener#onScaleBegin(ScaleGestureDetector)} * <li>Zero or more {@link OnScaleGestureListener#onScale(ScaleGestureDetector)} * <li>One {@link OnScaleGestureListener#onScaleEnd(ScaleGestureDetector)} * </ul> */ export interface OnScaleGestureListener { /** * Responds to scaling events for a gesture in progress. * Reported by pointer motion. * * @param detector The detector reporting the event - use this to * retrieve extended info about event state. * @return Whether or not the detector should consider this event * as handled. If an event was not handled, the detector * will continue to accumulate movement until an event is * handled. This can be useful if an application, for example, * only wants to update scaling factors if the change is * greater than 0.01. */ onScale(detector:ScaleGestureDetector):boolean ; /** * Responds to the beginning of a scaling gesture. Reported by * new pointers going down. * * @param detector The detector reporting the event - use this to * retrieve extended info about event state. * @return Whether or not the detector should continue recognizing * this gesture. For example, if a gesture is beginning * with a focal point outside of a region where it makes * sense, onScaleBegin() may return false to ignore the * rest of the gesture. */ onScaleBegin(detector:ScaleGestureDetector):boolean ; /** * Responds to the end of a scale gesture. Reported by existing * pointers going up. * * Once a scale has ended, {@link ScaleGestureDetector#getFocusX()} * and {@link ScaleGestureDetector#getFocusY()} will return focal point * of the pointers remaining on the screen. * * @param detector The detector reporting the event - use this to * retrieve extended info about event state. */ onScaleEnd(detector:ScaleGestureDetector):void ; } /** * A convenience class to extend when you only want to listen for a subset * of scaling-related events. This implements all methods in * {@link OnScaleGestureListener} but does nothing. * {@link OnScaleGestureListener#onScale(ScaleGestureDetector)} returns * {@code false} so that a subclass can retrieve the accumulated scale * factor in an overridden onScaleEnd. * {@link OnScaleGestureListener#onScaleBegin(ScaleGestureDetector)} returns * {@code true}. */ export class SimpleOnScaleGestureListener implements ScaleGestureDetector.OnScaleGestureListener { onScale(detector:ScaleGestureDetector):boolean { return false; } onScaleBegin(detector:ScaleGestureDetector):boolean { return true; } onScaleEnd(detector:ScaleGestureDetector):void { // Intentionally empty } } } }
the_stack
import { AccountService, AppModule, entities as ExchangeEntities, IExchangeConfigurationService, IExternalDeviceService, IExternalUserService, IProductInfo, OrderService, TransferService } from '@energyweb/exchange'; import { CertificateUtils, Contracts } from '@energyweb/issuer'; import { BlockchainPropertiesService, IrecCertificationRequest } from '@energyweb/issuer-irec-api'; import { IUser, OrganizationStatus, Role, UserStatus } from '@energyweb/origin-backend-core'; import { DatabaseService, RolesGuard } from '@energyweb/origin-backend-utils'; import { getProviderWithFallback } from '@energyweb/utils-general'; import { CanActivate, ExecutionContext } from '@nestjs/common'; import { ConfigModule, ConfigService } from '@nestjs/config'; import { AuthGuard } from '@nestjs/passport'; import { Test } from '@nestjs/testing'; import { TypeOrmModule } from '@nestjs/typeorm'; import { useContainer } from 'class-validator'; import { entities as ExchangeIRECEntities, usedEntities } from '../src'; import { AppModule as ExchangeIRECModule } from '../src/app.module'; import { ProductDTO } from '../src'; import { UserService } from '@energyweb/origin-backend'; import { DeviceService } from '@energyweb/origin-device-registry-irec-local-api'; import { DeviceRegistryService } from '@energyweb/origin-device-registry-api'; const web3 = 'http://localhost:8545'; const provider = getProviderWithFallback(web3); // ganache account 1 export const deviceManager = { address: '0xd46aC0Bc23dB5e8AfDAAB9Ad35E9A3bA05E092E8', privateKey: '0xd9bc30dc17023fbb68fe3002e0ff9107b241544fd6d60863081c55e383f1b5a3' }; // ganache account 2 export const registryDeployer = { address: '0x9442ED348b161af888e6cB999951aE8b961F7B4B', privateKey: '0xc4b87d68ea2b91f9d3de3fcb77c299ad962f006ffb8711900cb93d94afec3dc3' }; // ganache account 2 export const otherDeviceManager = { address: '0xB00F0793d0ce69d7b07db16F92dC982cD6Bdf651', privateKey: '0xca77c9b06fde68bcbcc09f603c958620613f4be79f3abb4b2032131d0229462e' }; const deployRegistry = async () => { return Contracts.migrateRegistry(provider, registryDeployer.privateKey); }; const deployIssuer = async (registry: string) => { return Contracts.migrateIssuer(provider, registryDeployer.privateKey, registry); }; const deployPrivateIssuer = async (issuer: string) => { return Contracts.migratePrivateIssuer(provider, registryDeployer.privateKey, issuer); }; export enum TestUser { UserWithoutBlockchainAccount = '1', OrganizationDeviceManager = '2', Issuer = '3', OtherOrganizationDeviceManager = '4', PlatformAdmin = '5' } export const testUsers = new Map([ [ TestUser.OrganizationDeviceManager, { id: Number(TestUser.OrganizationDeviceManager), organization: { id: 1000, status: OrganizationStatus.Active, blockchainAccountAddress: deviceManager.address }, status: UserStatus.Active, rights: Role.OrganizationDeviceManager } as IUser ], [ TestUser.UserWithoutBlockchainAccount, { id: Number(TestUser.UserWithoutBlockchainAccount), organization: { id: 1001, status: OrganizationStatus.Active }, status: UserStatus.Active, rights: Role.OrganizationAdmin } as IUser ], [ TestUser.Issuer, { id: Number(TestUser.Issuer), organization: { id: 1003, status: OrganizationStatus.Active, blockchainAccountAddress: registryDeployer.address }, status: UserStatus.Active, rights: Role.Issuer } as IUser ], [ TestUser.OtherOrganizationDeviceManager, { id: Number(TestUser.OtherOrganizationDeviceManager), organization: { id: 1000, status: OrganizationStatus.Active, blockchainAccountAddress: otherDeviceManager.address }, status: UserStatus.Active, rights: Role.OrganizationDeviceManager } as IUser ], [ TestUser.PlatformAdmin, { id: 5, organization: { id: 1002 }, status: UserStatus.Active, rights: Role.Admin } as IUser ] ]); export const authenticatedUser = testUsers.get(TestUser.OrganizationDeviceManager); const authGuard: CanActivate = { canActivate: (context: ExecutionContext) => { const req = context.switchToHttp().getRequest(); req.user = testUsers.get(req.headers['test-user']); return true; } }; const deviceTypes = [ ['Solar'], ['Solar', 'Photovoltaic'], ['Solar', 'Photovoltaic', 'Roof mounted'], ['Solar', 'Photovoltaic', 'Ground mounted'], ['Solar', 'Photovoltaic', 'Classic silicon'], ['Solar', 'Concentration'], ['Wind'], ['Wind', 'Onshore'], ['Wind', 'Offshore'], ['Marine'], ['Marine', 'Tidal'], ['Marine', 'Tidal', 'Inshore'], ['Marine', 'Tidal', 'Offshore'] ]; export const bootstrapTestInstance = async ( deviceServiceMock?: IExternalDeviceService, userServiceMock?: IExternalUserService ) => { const registry = await deployRegistry(); const issuer = await deployIssuer(registry.address); const privateIssuer = await deployPrivateIssuer(issuer.address); await issuer.setPrivateIssuer(privateIssuer.address); const configService = new ConfigService({ WEB3: web3, // ganache account 0 EXCHANGE_ACCOUNT_DEPLOYER_PRIV: '0xd9066ff9f753a1898709b568119055660a77d9aae4d7a4ad677b8fb3d2a571e5', // ganache account 1 EXCHANGE_WALLET_PUB: '0xd46aC0Bc23dB5e8AfDAAB9Ad35E9A3bA05E092E8', EXCHANGE_WALLET_PRIV: '0xd9bc30dc17023fbb68fe3002e0ff9107b241544fd6d60863081c55e383f1b5a3', ISSUER_ID: 'Issuer ID', ENERGY_PER_UNIT: 1000000, EXCHANGE_PRICE_STRATEGY: 0 }); const moduleFixture = await Test.createTestingModule({ imports: [ TypeOrmModule.forRoot({ type: 'postgres', host: process.env.DB_HOST ?? 'localhost', port: Number(process.env.DB_PORT ?? 5432), username: process.env.DB_USERNAME ?? 'postgres', password: process.env.DB_PASSWORD ?? 'postgres', database: process.env.DB_DATABASE ?? 'origin', entities: [ ...ExchangeEntities, ...ExchangeIRECEntities, ...usedEntities, IrecCertificationRequest ], logging: ['info'] }), ConfigModule, AppModule, ExchangeIRECModule ], providers: [ DatabaseService, { provide: IExchangeConfigurationService, useValue: { getRegistryAddress: async () => '0xd46aC0Bc23dB5e8AfDAAB9Ad35E9A3bA05E092E8', getIssuerAddress: async () => '0xd46aC0Bc23dB5e8AfDAAB9Ad35E9A3bA05E092E8', getDeviceTypes: async () => deviceTypes, getGridOperators: async () => ['TH-PEA', 'TH-MEA'] } }, { provide: IExternalDeviceService, useValue: deviceServiceMock ?? { getDeviceProductInfo: async (): Promise<IProductInfo> => ({ deviceType: 'Solar;Photovoltaic;Classic silicon', country: 'Thailand', region: 'Central', province: 'Nakhon Pathom', operationalSince: 2016, gridOperator: 'TH-PEA' }) } }, { provide: IExternalUserService, useValue: userServiceMock ?? { getPlatformAdmin: async (): Promise<IUser> => ({ organization: { id: 1 } } as IUser) } } ] }) .overrideProvider(ConfigService) .useValue(configService) .overrideGuard(AuthGuard('default')) .useValue(authGuard) .overrideGuard(RolesGuard) .useValue(authGuard) .overrideProvider(UserService) .useValue({ getPlatformAdmin() { return testUsers.get(TestUser.PlatformAdmin); }, findOne(userId: TestUser) { return testUsers.get(userId); } }) .overrideProvider(DeviceService) .useValue({ findOne: () => ({ fuelType: '', status: 'Approved' }), findAll: (): object[] => [ { id: 1, ownerId: 1000, address: '1 Wind Farm Avenue, London', capacity: 500, commissioningDate: new Date('2001-08-10'), countryCode: 'GB', defaultAccount: 'someTradeAccount', deviceType: 'TC110', fuelType: 'ES200', issuer: 'someIssuerCode', latitude: '53.405088', longitude: '-1.744222', name: 'DeviceXYZ', notes: 'Lorem ipsum dolor sit amet', registrantOrganization: 'someRegistrantCode', registrationDate: new Date('2001-09-20'), status: 'Approved', code: 'mockDeviceCode', active: true } ] }) .overrideProvider(DeviceRegistryService) .useValue({ find: () => [{ id: 1, externalRegistryId: 1 }] }) .compile(); const app = moduleFixture.createNestApplication(); const transferService = await app.resolve<TransferService>(TransferService); const accountService = await app.resolve<AccountService>(AccountService); const databaseService = await app.resolve<DatabaseService>(DatabaseService); const orderService = await app.resolve<OrderService<ProductDTO>>(OrderService); const blockchainPropertiesService = await app.resolve<BlockchainPropertiesService>( BlockchainPropertiesService ); const blockchainProperties = await blockchainPropertiesService.create( provider.network.chainId, registry.address, issuer.address, web3, registryDeployer.privateKey, null, privateIssuer.address ); await CertificateUtils.approveOperator( registryDeployer.address, blockchainProperties.wrap(deviceManager.privateKey) ); await CertificateUtils.approveOperator( registryDeployer.address, blockchainProperties.wrap(otherDeviceManager.privateKey) ); app.useLogger(['log']); app.enableCors(); useContainer(app.select(AppModule), { fallbackOnErrors: true }); return { transferService, accountService, databaseService, orderService, app }; };
the_stack
import { Component, EventEmitter, Input, OnDestroy, OnInit, Output, ViewChild } from '@angular/core'; import { FormGroup } from '@angular/forms'; import { DynamicFormControlModel, DynamicFormService, DynamicInputModel } from '@ng-dynamic-forms/core'; import { TranslateService } from '@ngx-translate/core'; import { FileUploader } from 'ng2-file-upload'; import { BehaviorSubject, combineLatest as observableCombineLatest, Subscription } from 'rxjs'; import { AuthService } from '../../../core/auth/auth.service'; import { ObjectCacheService } from '../../../core/cache/object-cache.service'; import { ComColDataService } from '../../../core/data/comcol-data.service'; import { RemoteData } from '../../../core/data/remote-data'; import { RequestService } from '../../../core/data/request.service'; import { RestRequestMethod } from '../../../core/data/rest-request-method'; import { Bitstream } from '../../../core/shared/bitstream.model'; import { Collection } from '../../../core/shared/collection.model'; import { Community } from '../../../core/shared/community.model'; import { MetadataMap, MetadataValue } from '../../../core/shared/metadata.models'; import { ResourceType } from '../../../core/shared/resource-type'; import { hasValue, isNotEmpty } from '../../empty.util'; import { NotificationsService } from '../../notifications/notifications.service'; import { UploaderOptions } from '../../uploader/uploader-options.model'; import { UploaderComponent } from '../../uploader/uploader.component'; import { Operation } from 'fast-json-patch'; import { NoContent } from '../../../core/shared/NoContent.model'; import { getFirstCompletedRemoteData } from '../../../core/shared/operators'; /** * A form for creating and editing Communities or Collections */ @Component({ selector: 'ds-comcol-form', styleUrls: ['./comcol-form.component.scss'], templateUrl: './comcol-form.component.html' }) export class ComColFormComponent<T extends Collection | Community> implements OnInit, OnDestroy { /** * The logo uploader component */ @ViewChild(UploaderComponent) uploaderComponent: UploaderComponent; /** * DSpaceObject that the form represents */ @Input() dso: T; /** * Type of DSpaceObject that the form represents */ type: ResourceType; /** * @type {string} Key prefix used to generate form labels */ LABEL_KEY_PREFIX = '.form.'; /** * @type {string} Key prefix used to generate form error messages */ ERROR_KEY_PREFIX = '.form.errors.'; /** * The form model that represents the fields in the form */ formModel: DynamicFormControlModel[]; /** * The form group of this form */ formGroup: FormGroup; /** * The uploader configuration options * @type {UploaderOptions} */ uploadFilesOptions: UploaderOptions = Object.assign(new UploaderOptions(), { autoUpload: false }); /** * Emits DSO and Uploader when the form is submitted */ @Output() submitForm: EventEmitter<{ dso: T, uploader: FileUploader, deleteLogo: boolean, operations: Operation[], }> = new EventEmitter(); /** * Event emitted on back */ @Output() back: EventEmitter<any> = new EventEmitter(); /** * Fires an event when the logo has finished uploading (with or without errors) or was removed */ @Output() finish: EventEmitter<any> = new EventEmitter(); /** * Observable keeping track whether or not the uploader has finished initializing * Used to start rendering the uploader component */ initializedUploaderOptions = new BehaviorSubject(false); /** * Is the logo marked to be deleted? */ markLogoForDeletion = false; /** * Array to track all subscriptions and unsubscribe them onDestroy * @type {Array} */ protected subs: Subscription[] = []; /** * The service used to fetch from or send data to */ protected dsoService: ComColDataService<Community | Collection>; public constructor(protected formService: DynamicFormService, protected translate: TranslateService, protected notificationsService: NotificationsService, protected authService: AuthService, protected requestService: RequestService, protected objectCache: ObjectCacheService) { } ngOnInit(): void { this.formModel.forEach( (fieldModel: DynamicInputModel) => { fieldModel.value = this.dso.firstMetadataValue(fieldModel.name); } ); this.formGroup = this.formService.createFormGroup(this.formModel); this.updateFieldTranslations(); this.translate.onLangChange .subscribe(() => { this.updateFieldTranslations(); }); if (hasValue(this.dso.id)) { this.subs.push( observableCombineLatest([ this.dsoService.getLogoEndpoint(this.dso.id), this.dso.logo ]).subscribe(([href, logoRD]: [string, RemoteData<Bitstream>]) => { this.uploadFilesOptions.url = href; this.uploadFilesOptions.authToken = this.authService.buildAuthHeader(); // If the object already contains a logo, send out a PUT request instead of POST for setting a new logo if (hasValue(logoRD.payload)) { this.uploadFilesOptions.method = RestRequestMethod.PUT; } this.initializedUploaderOptions.next(true); }) ); } else { // Set a placeholder URL to not break the uploader component. This will be replaced once the object is created. this.uploadFilesOptions.url = 'placeholder'; this.uploadFilesOptions.authToken = this.authService.buildAuthHeader(); this.initializedUploaderOptions.next(true); } } /** * Checks which new fields were added and sends the updated version of the DSO to the parent component */ onSubmit() { if (this.markLogoForDeletion && hasValue(this.dso.id) && hasValue(this.dso._links.logo)) { this.dsoService.deleteLogo(this.dso).pipe( getFirstCompletedRemoteData() ).subscribe((response: RemoteData<NoContent>) => { if (response.hasSucceeded) { this.notificationsService.success( this.translate.get(this.type.value + '.edit.logo.notifications.delete.success.title'), this.translate.get(this.type.value + '.edit.logo.notifications.delete.success.content') ); } else { this.notificationsService.error( this.translate.get(this.type.value + '.edit.logo.notifications.delete.error.title'), response.errorMessage ); } this.dso.logo = undefined; this.uploadFilesOptions.method = RestRequestMethod.POST; this.refreshCache(); this.finish.emit(); }); } const formMetadata = {} as MetadataMap; this.formModel.forEach((fieldModel: DynamicInputModel) => { const value: MetadataValue = { value: fieldModel.value as string, language: null } as any; if (formMetadata.hasOwnProperty(fieldModel.name)) { formMetadata[fieldModel.name].push(value); } else { formMetadata[fieldModel.name] = [value]; } }); const updatedDSO = Object.assign({}, this.dso, { metadata: { ...this.dso.metadata, ...formMetadata }, type: Community.type }); const operations: Operation[] = []; this.formModel.forEach((fieldModel: DynamicInputModel) => { if (fieldModel.value !== this.dso.firstMetadataValue(fieldModel.name)) { operations.push({ op: 'replace', path: `/metadata/${fieldModel.name}`, value: { value: fieldModel.value, language: null, }, }); } }); this.submitForm.emit({ dso: updatedDSO, uploader: hasValue(this.uploaderComponent) ? this.uploaderComponent.uploader : undefined, deleteLogo: this.markLogoForDeletion, operations: operations, }); } /** * Used the update translations of errors and labels on init and on language change */ private updateFieldTranslations() { this.formModel.forEach( (fieldModel: DynamicInputModel) => { fieldModel.label = this.translate.instant(this.type.value + this.LABEL_KEY_PREFIX + fieldModel.id); if (isNotEmpty(fieldModel.validators)) { fieldModel.errorMessages = {}; Object.keys(fieldModel.validators).forEach((key) => { fieldModel.errorMessages[key] = this.translate.instant(this.type.value + this.ERROR_KEY_PREFIX + fieldModel.id + '.' + key); }); } } ); } /** * Mark the logo to be deleted * Send out a delete request to remove the logo from the community/collection and display notifications */ deleteLogo() { this.markLogoForDeletion = true; } /** * Undo marking the logo to be deleted */ undoDeleteLogo() { this.markLogoForDeletion = false; } /** * Refresh the object's cache to ensure the latest version */ private refreshCache() { this.requestService.removeByHrefSubstring(this.dso._links.self.href); this.objectCache.remove(this.dso._links.self.href); } /** * The request was successful, display a success notification */ public onCompleteItem() { if (hasValue(this.dso.id)) { this.refreshCache(); } this.notificationsService.success(null, this.translate.get(this.type.value + '.edit.logo.notifications.add.success')); this.finish.emit(); } /** * The request was unsuccessful, display an error notification */ public onUploadError() { this.notificationsService.error(null, this.translate.get(this.type.value + '.edit.logo.notifications.add.error')); this.finish.emit(); } /** * Unsubscribe from open subscriptions */ ngOnDestroy(): void { this.subs .filter((subscription) => hasValue(subscription)) .forEach((subscription) => subscription.unsubscribe()); } }
the_stack
import { AccessLevelList } from "../shared/access-level"; import { PolicyStatement } from "../shared"; /** * Statement provider for service [kinesis](https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkinesis.html). * * @param sid [SID](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_sid.html) of the statement */ export class Kinesis extends PolicyStatement { public servicePrefix = 'kinesis'; /** * Statement provider for service [kinesis](https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkinesis.html). * * @param sid [SID](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_sid.html) of the statement */ constructor (sid?: string) { super(sid); } /** * Adds or updates tags for the specified Amazon Kinesis stream. Each stream can have up to 10 tags. * * Access Level: Tagging * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_AddTagsToStream.html */ public toAddTagsToStream() { return this.to('AddTagsToStream'); } /** * Creates a Amazon Kinesis stream. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_CreateStream.html */ public toCreateStream() { return this.to('CreateStream'); } /** * Decreases the stream's retention period, which is the length of time data records are accessible after they are added to the stream. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_DecreaseStreamRetentionPeriod.html */ public toDecreaseStreamRetentionPeriod() { return this.to('DecreaseStreamRetentionPeriod'); } /** * Deletes a stream and all its shards and data. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_DeleteStream.html */ public toDeleteStream() { return this.to('DeleteStream'); } /** * Deregisters a stream consumer with a Kinesis data stream. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_DeregisterStreamConsumer.html */ public toDeregisterStreamConsumer() { return this.to('DeregisterStreamConsumer'); } /** * Describes the shard limits and usage for the account. * * Access Level: Read * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_DescribeLimits.html */ public toDescribeLimits() { return this.to('DescribeLimits'); } /** * Describes the specified stream. * * Access Level: Read * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_DescribeStream.html */ public toDescribeStream() { return this.to('DescribeStream'); } /** * Gets the description of a registered stream consumer. * * Access Level: Read * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_DescribeStreamConsumer.html */ public toDescribeStreamConsumer() { return this.to('DescribeStreamConsumer'); } /** * Provides a summarized description of the specified Kinesis data stream without the shard list. * * Access Level: Read * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_DescribeStreamSummary.html */ public toDescribeStreamSummary() { return this.to('DescribeStreamSummary'); } /** * Disables enhanced monitoring. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_DisableEnhancedMonitoring.html */ public toDisableEnhancedMonitoring() { return this.to('DisableEnhancedMonitoring'); } /** * API_EnableEnhancedMonitoring.html * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_EnableEnhancedMonitoring.html */ public toEnableEnhancedMonitoring() { return this.to('EnableEnhancedMonitoring'); } /** * Gets data records from a shard. * * Access Level: Read * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetRecords.html */ public toGetRecords() { return this.to('GetRecords'); } /** * Gets a shard iterator. A shard iterator expires five minutes after it is returned to the requester. * * Access Level: Read * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html */ public toGetShardIterator() { return this.to('GetShardIterator'); } /** * Increases the stream's retention period, which is the length of time data records are accessible after they are added to the stream. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_IncreaseStreamRetentionPeriod.html */ public toIncreaseStreamRetentionPeriod() { return this.to('IncreaseStreamRetentionPeriod'); } /** * Lists the shards in a stream and provides information about each shard. * * Access Level: List * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_ListShards.html */ public toListShards() { return this.to('ListShards'); } /** * Lists the stream consumers registered to receive data from a Kinesis stream using enhanced fan-out, and provides information about each consumer. * * Access Level: List * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_ListStreamConsumers.html */ public toListStreamConsumers() { return this.to('ListStreamConsumers'); } /** * Lists your streams. * * Access Level: List * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_ListStreams.html */ public toListStreams() { return this.to('ListStreams'); } /** * Lists the tags for the specified Amazon Kinesis stream. * * Access Level: Read * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_ListTagsForStream.html */ public toListTagsForStream() { return this.to('ListTagsForStream'); } /** * Merges two adjacent shards in a stream and combines them into a single shard to reduce the stream's capacity to ingest and transport data. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_MergeShards.html */ public toMergeShards() { return this.to('MergeShards'); } /** * Writes a single data record from a producer into an Amazon Kinesis stream. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecord.html */ public toPutRecord() { return this.to('PutRecord'); } /** * Writes multiple data records from a producer into an Amazon Kinesis stream in a single call (also referred to as a PutRecords request). * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecords.html */ public toPutRecords() { return this.to('PutRecords'); } /** * Registers a stream consumer with a Kinesis data stream. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_RegisterStreamConsumer.html */ public toRegisterStreamConsumer() { return this.to('RegisterStreamConsumer'); } /** * Description for SplitShard * * Access Level: Tagging * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_RemoveTagsFromStream.html */ public toRemoveTagsFromStream() { return this.to('RemoveTagsFromStream'); } /** * Description for SplitShard * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_SplitShard.html */ public toSplitShard() { return this.to('SplitShard'); } /** * Grants permission to enable or update server-side encryption using an AWS KMS key for a specified stream. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_StartStreamEncryption.html */ public toStartStreamEncryption() { return this.to('StartStreamEncryption'); } /** * Grants permission to disable server-side encryption for a specified stream. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_StopStreamEncryption.html */ public toStopStreamEncryption() { return this.to('StopStreamEncryption'); } /** * Listening to a specific shard with enhanced fan-out. * * Access Level: Read * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_SubscribeToShard.html */ public toSubscribeToShard() { return this.to('SubscribeToShard'); } /** * Updates the shard count of the specified stream to the specified number of shards. * * Access Level: Write * * https://docs.aws.amazon.com/kinesis/latest/APIReference/API_UpdateShardCount.html */ public toUpdateShardCount() { return this.to('UpdateShardCount'); } protected accessLevelList: AccessLevelList = { "Tagging": [ "AddTagsToStream", "RemoveTagsFromStream" ], "Write": [ "CreateStream", "DecreaseStreamRetentionPeriod", "DeleteStream", "DeregisterStreamConsumer", "DisableEnhancedMonitoring", "EnableEnhancedMonitoring", "IncreaseStreamRetentionPeriod", "MergeShards", "PutRecord", "PutRecords", "RegisterStreamConsumer", "SplitShard", "StartStreamEncryption", "StopStreamEncryption", "UpdateShardCount" ], "Read": [ "DescribeLimits", "DescribeStream", "DescribeStreamConsumer", "DescribeStreamSummary", "GetRecords", "GetShardIterator", "ListTagsForStream", "SubscribeToShard" ], "List": [ "ListShards", "ListStreamConsumers", "ListStreams" ] }; /** * Adds a resource of type stream to the statement * * https://docs.aws.amazon.com/kinesis/latest/dev/amazon-kinesis-streams.html * * @param streamName - Identifier for the streamName. * @param account - Account of the resource; defaults to empty string: all accounts. * @param region - Region of the resource; defaults to empty string: all regions. * @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`. */ public onStream(streamName: string, account?: string, region?: string, partition?: string) { var arn = 'arn:${Partition}:kinesis:${Region}:${Account}:stream/${StreamName}'; arn = arn.replace('${StreamName}', streamName); arn = arn.replace('${Account}', account || '*'); arn = arn.replace('${Region}', region || '*'); arn = arn.replace('${Partition}', partition || 'aws'); return this.on(arn); } /** * Adds a resource of type consumer to the statement * * https://docs.aws.amazon.com/kinesis/latest/dev/amazon-kinesis-consumers.html * * @param streamType - Identifier for the streamType. * @param streamName - Identifier for the streamName. * @param consumerName - Identifier for the consumerName. * @param consumerCreationTimpstamp - Identifier for the consumerCreationTimpstamp. * @param account - Account of the resource; defaults to empty string: all accounts. * @param region - Region of the resource; defaults to empty string: all regions. * @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`. */ public onConsumer(streamType: string, streamName: string, consumerName: string, consumerCreationTimpstamp: string, account?: string, region?: string, partition?: string) { var arn = 'arn:${Partition}:kinesis:${Region}:${Account}:${StreamType}/${StreamName}/consumer/${ConsumerName}:${ConsumerCreationTimpstamp}'; arn = arn.replace('${StreamType}', streamType); arn = arn.replace('${StreamName}', streamName); arn = arn.replace('${ConsumerName}', consumerName); arn = arn.replace('${ConsumerCreationTimpstamp}', consumerCreationTimpstamp); arn = arn.replace('${Account}', account || '*'); arn = arn.replace('${Region}', region || '*'); arn = arn.replace('${Partition}', partition || 'aws'); return this.on(arn); } /** * Adds a resource of type kmsKey to the statement * * https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#master_keys * * @param keyId - Identifier for the keyId. * @param account - Account of the resource; defaults to empty string: all accounts. * @param region - Region of the resource; defaults to empty string: all regions. * @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`. */ public onKmsKey(keyId: string, account?: string, region?: string, partition?: string) { var arn = 'arn:${Partition}:kms:${Region}:${Account}:key/${KeyId}'; arn = arn.replace('${KeyId}', keyId); arn = arn.replace('${Account}', account || '*'); arn = arn.replace('${Region}', region || '*'); arn = arn.replace('${Partition}', partition || 'aws'); return this.on(arn); } }
the_stack
import {createEvent, withTextBody, withSender} from "./event.js"; import {TimelineEvent} from "../matrix/storage/types"; export const TIMELINE_START_TOKEN = "timeline_start"; export function eventId(i: number): string { return `$event${i}`; } /** `from` is included, `to` is excluded */ export function eventIds(from: number, to: number): string[] { return [...Array(to-from).keys()].map(i => eventId(i + from)); } export class TimelineMock { private _counter: number; private _dagOrder: TimelineEvent[]; private _syncOrder: TimelineEvent[]; private _defaultSender: string; constructor(defaultSender: string) { this._counter = 0; this._dagOrder = []; this._syncOrder = []; this._defaultSender = defaultSender; } _defaultEvent(id: string): TimelineEvent { return withTextBody(`This is event ${id}`, withSender(this._defaultSender, createEvent("m.room.message", id))); } _createEvent(func?: (eventId: string) => TimelineEvent): TimelineEvent { const id = eventId(this._counter++); return func ? func(id) : this._defaultEvent(id); } _createEvents(n: number, func?: (eventId: string) => TimelineEvent) { const events: TimelineEvent[] = []; for (let i = 0; i < n; i++) { events.push(this._createEvent(func)); } return events; } insertAfter(token: string, n: number, func?: (eventId: string) => TimelineEvent) { const events = this._createEvents(n, func); const index = this._findIndex(token, "f", this._dagOrder); this._dagOrder.splice(index, 0, ...events); this._syncOrder.push(...events); return events[events.length - 1]?.event_id; } append(n: number, func?: (eventId: string) => TimelineEvent) { const events = this._createEvents(n, func); this._dagOrder.push(...events); this._syncOrder.push(...events); return events[events.length - 1]?.event_id; } _getStep(direction: "f" | "b") : 1 | -1 { return direction === "f" ? 1 : -1; } _findIndex(token: string, direction: "f" | "b", eventOrdering: TimelineEvent[]): number { const step = this._getStep(direction); if (token === TIMELINE_START_TOKEN) { const firstSyncEvent = this._syncOrder[0]; if (!firstSyncEvent) { // We have no events at all. Wherever you start looking, // you'll stop looking right away. Zero works as well as anything else. return 0; } const orderIndex = eventOrdering.findIndex(e => e.event_id === firstSyncEvent.event_id); return orderIndex; } // All other tokens are (non-inclusive) event indices const index = eventOrdering.findIndex(e => e.event_id === token); if (index === -1) { // We didn't find this event token at all. What are we // even looking at? throw new Error("Invalid token passed to TimelineMock"); } return index + step; } messages(begin: string, end: string | undefined, direction: "f" | "b", limit: number = 10) { const step = this._getStep(direction); let index = this._findIndex(begin, direction, this._dagOrder); const chunk: TimelineEvent[] = []; for (; limit > 0 && index >= 0 && index < this._dagOrder.length; index += step, limit--) { if (this._dagOrder[index].event_id === end) { break; } chunk.push(this._dagOrder[index]); } return { start: begin, end: chunk[chunk.length - 1]?.event_id || begin, chunk, state: [] }; } context(eventId: string, limit: number = 10) { if (limit <= 0) { throw new Error("Cannot fetch zero or less events!"); } let eventIndex = this._dagOrder.findIndex(e => e.event_id === eventId); if (eventIndex === -1) { throw new Error("Fetching context for unknown event"); } const event = this._dagOrder[eventIndex]; let offset = 1; const eventsBefore: TimelineEvent[] = []; const eventsAfter: TimelineEvent[] = []; while (limit !== 0 && (eventIndex - offset >= 0 || eventIndex + offset < this._dagOrder.length)) { if (eventIndex - offset >= 0) { eventsBefore.push(this._dagOrder[eventIndex - offset]); limit--; } if (limit !== 0 && eventIndex + offset < this._dagOrder.length) { eventsAfter.push(this._dagOrder[eventIndex + offset]); limit--; } offset++; } return { start: eventsBefore[eventsBefore.length - 1]?.event_id || eventId, end: eventsAfter[eventsAfter.length - 1]?.event_id || eventId, event, events_before: eventsBefore, events_after: eventsAfter, state: [] }; } sync(since?: string, limit: number = 10) { const startAt = since ? this._findIndex(since, "f", this._syncOrder) : 0; const index = Math.max(this._syncOrder.length - limit, startAt); const limited = this._syncOrder.length - startAt > limit; const events: TimelineEvent[] = []; for(let i = index; i < this._syncOrder.length; i++) { events.push(this._syncOrder[i]); } return { next_batch: events[events.length - 1]?.event_id || since || TIMELINE_START_TOKEN, timeline: { prev_batch: events[0]?.event_id || since || TIMELINE_START_TOKEN, events, limited } } } } export function tests() { const SENDER = "@alice:hs.tdl"; return { "Append events are returned via sync": assert => { const timeline = new TimelineMock(SENDER); timeline.append(10); const syncResponse = timeline.sync(); const events = syncResponse.timeline.events.map(e => e.event_id); assert.deepEqual(events, eventIds(0, 10)); assert.equal(syncResponse.timeline.limited, false); }, "Limiting a sync properly limits the returned events": assert => { const timeline = new TimelineMock(SENDER); timeline.append(20); const syncResponse = timeline.sync(undefined, 10); const events = syncResponse.timeline.events.map(e => e.event_id); assert.deepEqual(events, eventIds(10, 20)); assert.equal(syncResponse.timeline.limited, true); }, "The context endpoint returns messages in DAG order around an event": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); const context = timeline.context(eventId(15)); assert.equal(context.event.event_id, eventId(15)); assert.deepEqual(context.events_before.map(e => e.event_id).reverse(), eventIds(10, 15)); assert.deepEqual(context.events_after.map(e => e.event_id), eventIds(16, 21)); }, "The context endpoint returns the proper number of messages": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); for (const i of new Array(29).keys()) { const middleFetch = timeline.context(eventId(15), i + 1); assert.equal(middleFetch.events_before.length + middleFetch.events_after.length, i + 1); const startFetch = timeline.context(eventId(1), i + 1); assert.equal(startFetch.events_before.length + startFetch.events_after.length, i + 1); const endFetch = timeline.context(eventId(28), i + 1); assert.equal(endFetch.events_before.length + endFetch.events_after.length, i + 1); } }, "The previous batch from a sync returns the previous events": assert => { const timeline = new TimelineMock(SENDER); timeline.append(20); const sync = timeline.sync(undefined, 10); const messages = timeline.messages(sync.timeline.prev_batch, undefined, "b"); const events = messages.chunk.map(e => e.event_id).reverse(); assert.deepEqual(events, eventIds(0, 10)); }, "Two consecutive message fetches are continuous if no new events are inserted": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); const sync = timeline.sync(undefined, 10); const messages1 = timeline.messages(sync.timeline.prev_batch, undefined, "b"); const events1 = messages1.chunk.map(e => e.event_id).reverse(); assert.deepEqual(events1, eventIds(10, 20)); const messages2 = timeline.messages(messages1.end, undefined, "b"); const events2 = messages2.chunk.map(e => e.event_id).reverse(); assert.deepEqual(events2, eventIds(0, 10)); }, "Two consecutive message fetches detect newly inserted event": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); const messages1 = timeline.messages(eventId(20), undefined, "b", 10); const events1 = messages1.chunk.map(e => e.event_id).reverse(); assert.deepEqual(events1, eventIds(10, 20)); timeline.insertAfter(eventId(9), 1); const messages2 = timeline.messages(eventId(10), undefined, "b", 10); const events2 = messages2.chunk.map(e => e.event_id).reverse(); const expectedEvents2 = eventIds(1, 10); expectedEvents2.push(eventId(30)); assert.deepEqual(events2, expectedEvents2); }, "A sync that receives no events has the same next batch as it started with": assert => { const timeline = new TimelineMock(SENDER); timeline.append(10); const sync1 = timeline.sync(); const sync2 = timeline.sync(sync1.next_batch); assert.equal(sync1.next_batch, sync2.next_batch); }, "An event inserted at the staart still shows up in a sync": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); const sync1 = timeline.sync(undefined, 10); const sync2 = timeline.sync(sync1.next_batch, 10) assert.deepEqual(sync2.timeline.events, []); assert.equal(sync2.timeline.limited, false); timeline.insertAfter(TIMELINE_START_TOKEN, 1); const sync3 = timeline.sync(sync2.next_batch, 10) const events = sync3.timeline.events.map(e => e.event_id); assert.deepEqual(events, [eventId(30)]); }, "An event inserted at the start does not show up in a non-overlapping message fetch": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); const sync1 = timeline.sync(undefined, 10); const messages1 = timeline.messages(sync1.timeline.prev_batch, undefined, "f", 10); timeline.insertAfter(TIMELINE_START_TOKEN, 1); const messages2 = timeline.messages(sync1.timeline.prev_batch, undefined, "f", 10); assert.deepEqual(messages1.chunk, messages2.chunk); }, } }
the_stack
import * as Kilt from '@kiltprotocol/sdk-js' import { KeyRelationship, KeyringPair } from '@kiltprotocol/sdk-js' import type { Credential, Claim, CType, ICType, Did, IAcceptCredential, IRequestAttestation, ISubmitAttestation, ISubmitCredential, IDidKeyDetails, } from '@kiltprotocol/sdk-js' import { mnemonicGenerate } from '@polkadot/util-crypto' const NODE_URL = 'ws://127.0.0.1:9944' const SEP = '_' async function setup(): Promise<{ claimerLightDid: Did.LightDidDetails attesterFullDid: Did.FullDidDetails attester: KeyringPair claim: Claim ctype: CType keystore: Did.DemoKeystore }> { console.log( ((s) => s.padEnd(40 + s.length / 2, SEP).padStart(80, SEP))(' SETUP ') ) await Kilt.init({ address: NODE_URL }) // ------------------------- Attester ---------------------------------------- // To get an attestation, we need an Attester // we can generate a new keypair: const keyring = new Kilt.Utils.Keyring({ // KILT has registered the ss58 prefix 38 ss58Format: 38, type: 'ed25519', }) // generate a Mnemonic for the attester const attesterMnemonic = 'receive clutch item involve chaos clutch furnace arrest claw isolate okay together' // or we just use unsafe precalculated keys (just for demo purposes!): const attester = keyring.addFromMnemonic( attesterMnemonic, // using ed25519 bc this account has test coins from the start on the development chain spec { signingKeyPairType: 'ed25519' } ) console.log( 'Attester free balance is:', (await Kilt.Balance.getBalances(attester.address)).free.toString() ) // Build an on chain DID for the attester to make transactions on the KILT chain, using our demo keystore const keystore = new Kilt.Did.DemoKeystore() const attesterFullDid = await Kilt.Did.createOnChainDidFromSeed( attester, keystore, attesterMnemonic, // using ed25519 as key type because this is how the endowed account is set up Kilt.Did.SigningAlgorithms.Ed25519 ) // Will print `did:kilt:014sxSYXakw1ZXBymzT9t3Yw91mUaqKST5bFUEjGEpvkTuckar`. console.log(attesterFullDid.did) // ------------------------- CType ---------------------------------------- // First build a schema const ctypeSchema: ICType['schema'] = { $id: 'kilt:ctype:0x3b53bd9a535164136d2df46d0b7146b17b9821490bc46d4dfac7e06811631803', $schema: 'http://kilt-protocol.org/draft-01/ctype#', properties: { name: { type: 'string', }, age: { type: 'integer', }, }, type: 'object', title: 'title', } // Generate the Hash for it const ctypeHash = Kilt.CTypeUtils.getHashForSchema(ctypeSchema) // Put everything together const rawCtype: ICType = { schema: ctypeSchema, hash: ctypeHash, owner: attesterFullDid.did, } // Build the CType object const ctype = new Kilt.CType(rawCtype) // Store ctype on blockchain // signAndSubmitTx can be passed SubscriptionPromise.Options, to control resolve and reject criteria, set tip value, or activate re-sign-re-send capabilities. // ! This costs tokens ! // Also note, that the same ctype can only be stored once on the blockchain. try { await ctype .store() .then((tx) => attesterFullDid.authorizeExtrinsic(tx, keystore, attester.address) ) .then((tx) => Kilt.BlockchainUtils.signAndSubmitTx(tx, attester, { resolveOn: Kilt.BlockchainUtils.IS_IN_BLOCK, reSign: true, }) ) } catch (e) { console.log( 'Error while storing CType. Probably either insufficient funds or ctype does already exist.', e ) } // ------------------------- Claimer ---------------------------------------- // How to generate an account and subsequently a derived DID from the account. const claimerMnemonic = 'wish rather clinic rather connect culture frown like quote effort cart faculty' // Generate authentication and encryption keys used to derive a light DID from them. const claimerSigningKeypair = await keystore.generateKeypair({ alg: Kilt.Did.SigningAlgorithms.Ed25519, seed: claimerMnemonic, }) const claimerEncryptionKeypair = await keystore.generateKeypair({ alg: Kilt.Did.EncryptionAlgorithms.NaclBox, seed: claimerMnemonic, }) // Using the generated authentication and encryption keys to derive a light DID. const claimerLightDid = new Kilt.Did.LightDidDetails({ authenticationKey: { publicKey: claimerSigningKeypair.publicKey, type: Kilt.Did.DemoKeystore.getKeypairTypeForAlg( claimerSigningKeypair.alg ), }, encryptionKey: { publicKey: claimerEncryptionKeypair.publicKey, type: Kilt.Did.DemoKeystore.getKeypairTypeForAlg( claimerEncryptionKeypair.alg ), }, }) // At this point the generated account has no tokens. // If you want to interact with the blockchain, you will have to get some. // Contact faucet@kilt.io and provide the address of the account // All tokens generated are play tokens and hold no value const rawClaim = { name: 'Alice', age: 29, } const claim = Kilt.Claim.fromCTypeAndClaimContents( ctype, rawClaim, claimerLightDid.did ) console.log('Claimer', claimerLightDid.did, '\n') console.log('Attester', attesterFullDid.did, '\n') console.log('Ctype', ctype, '\n') console.log('Claim', claim, '\n') return { claimerLightDid, attesterFullDid, attester, ctype, claim, keystore, } } async function doAttestation( claimerLightDid: Did.LightDidDetails, attesterFullDid: Did.FullDidDetails, attester: KeyringPair, claim: Claim, keystore: Did.DemoKeystore ): Promise<{ credential: Credential }> { console.log( ((s) => s.padEnd(40 + s.length / 2, SEP).padStart(80, SEP))(' ATTESTATION ') ) // ------------------------- CLAIMER ----------------------------------------- // And we need to build a request for an attestation const requestForAttestation = Kilt.RequestForAttestation.fromClaim(claim) await requestForAttestation.signWithDid(keystore, claimerLightDid) // The claimer can send a message to the attester requesting to do the attestation const claimerRequestMessage = new Kilt.Message( { type: Kilt.Message.BodyType.REQUEST_ATTESTATION, content: { requestForAttestation }, }, claimerLightDid.did, attesterFullDid.did ) const claimerEncryptionKey = claimerLightDid.getKeys( KeyRelationship.keyAgreement )[0] as IDidKeyDetails<string> const attesterEncryptionKey = attesterFullDid.getKeys( KeyRelationship.keyAgreement )[0] as IDidKeyDetails<string> // The message can be encrypted as follows const encryptMessage = await claimerRequestMessage.encrypt( claimerEncryptionKey, attesterEncryptionKey, keystore ) // claimer sends [[encrypted]] to the attester // ------------------------- Attester ---------------------------------------- // When the Attester receives the message, she can decrypt it, // internally checks the sender is the owner of the account // and checks the hash and signature of the message const reqAttestationDec = await Kilt.Message.decrypt( encryptMessage, keystore, { senderDetails: claimerLightDid, receiverDetails: attesterFullDid } ) const claimersRequest = Kilt.RequestForAttestation.fromRequest( (reqAttestationDec.body as IRequestAttestation).content .requestForAttestation ) // Attester can check the data and verify the data has not been tampered with if (!claimersRequest.verifyData()) { console.log('data is false') } // Attester can check if the signature of the claimer matches the request for attestation object await claimersRequest.verifySignature() const attestation = Kilt.Attestation.fromRequestAndDid( claimersRequest, attesterFullDid.did ) console.log('the attestation: ', attestation) await attestation .store() .then((tx) => attesterFullDid.authorizeExtrinsic(tx, keystore, attester.address) ) .then((tx) => Kilt.BlockchainUtils.signAndSubmitTx(tx, attester, { resolveOn: Kilt.BlockchainUtils.IS_IN_BLOCK, reSign: true, }) ) // And send a message back const attesterAttestationMessage = new Kilt.Message( { type: Kilt.Message.BodyType.SUBMIT_ATTESTATION, content: { attestation }, }, attesterFullDid.did, claimerLightDid.did ) const submitAttestationEnc = await attesterAttestationMessage.encrypt( attesterEncryptionKey, claimerEncryptionKey, keystore ) // ------------------------- CLAIMER ----------------------------------------- // internally, the decrypt checks the sender is the owner of the account // and checks the hash and signature of the message const submitAttestationDec = await Kilt.Message.decrypt( submitAttestationEnc, keystore, { senderDetails: attesterFullDid, receiverDetails: claimerLightDid } ) const credential = Kilt.Credential.fromRequestAndAttestation( // The claimer has access to the request for attestation requestForAttestation, (submitAttestationDec.body as ISubmitAttestation).content.attestation ) console.log('RFA Message', reqAttestationDec.body, '\n') console.log('Submit attestation:', submitAttestationDec.body, '\n') console.log('Credential', credential, '\n') return { credential, } } async function doVerification( claimerLightDid: Did.LightDidDetails, credential: Credential, keystore: Did.DemoKeystore ): Promise<void> { console.log( ((s) => s.padEnd(40 + s.length / 2, SEP).padStart(80, SEP))( ' VERIFICATION ' ) ) const verifierMnemonic = mnemonicGenerate() const verifierSigningKeypair = await keystore.generateKeypair({ alg: Kilt.Did.SigningAlgorithms.Ed25519, seed: verifierMnemonic, }) const verifierEncryptionKeypair = await keystore.generateKeypair({ alg: Kilt.Did.EncryptionAlgorithms.NaclBox, seed: verifierMnemonic, }) // Generate authentication and encryption keys used to derive a light DID from them. const verifierLightDid = new Kilt.Did.LightDidDetails({ authenticationKey: { publicKey: verifierSigningKeypair.publicKey, type: Kilt.Did.DemoKeystore.getKeypairTypeForAlg( verifierSigningKeypair.alg ), }, encryptionKey: { publicKey: verifierEncryptionKeypair.publicKey, type: Kilt.Did.DemoKeystore.getKeypairTypeForAlg( verifierEncryptionKeypair.alg ), }, }) const claimerEncryptionKey = claimerLightDid.getKeys( KeyRelationship.keyAgreement )[0] as IDidKeyDetails<string> const verifierEncryptionKey = verifierLightDid.getKeys( KeyRelationship.keyAgreement )[0] as IDidKeyDetails<string> // ------------------------- Verifier ---------------------------------------- const verifierAcceptedCredentialsMessage = new Kilt.Message( { type: Kilt.Message.BodyType.ACCEPT_CREDENTIAL, content: [credential.request.claim.cTypeHash], }, verifierLightDid.did, claimerLightDid.did ) const verifierAcceptedCredentialsMessageEnc = await verifierAcceptedCredentialsMessage.encrypt( verifierEncryptionKey, claimerEncryptionKey, keystore ) // ------------------------- Claimer ----------------------------------------- // The claimer receives a message from the verifier of the accepted ctypes const verifierAcceptedCredentialsMessageDec = await Kilt.Message.decrypt( verifierAcceptedCredentialsMessageEnc, keystore, { senderDetails: verifierLightDid, receiverDetails: claimerLightDid } ) const ctypeHash = ( verifierAcceptedCredentialsMessageDec.body as IAcceptCredential ).content[0] console.log('claimer checks the ctypeHash matches', ctypeHash) const challenge = Kilt.Utils.UUID.generate() const presentation = await credential.createPresentation({ signer: keystore, claimerDid: claimerLightDid, challenge, }) const claimerSubmitCredentialsMessage = new Kilt.Message( { type: Kilt.Message.BodyType.SUBMIT_CREDENTIAL, content: [presentation], }, claimerLightDid.did, verifierLightDid.did ) // Claimer encrypts the credentials message to the verifier const claimerSubmitCredentialsMessageEnc = await claimerSubmitCredentialsMessage.encrypt( claimerEncryptionKey, verifierEncryptionKey, keystore ) // ------------------------- Verifier ---------------------------------------- // The verifier needs the public account of the attester. Either he already has a list of trusted // attesters or he needs to resolve them differently. A Decentralized Identity (DID) would be an // option for that. const verifierSubmitCredentialsMessageDec = await Kilt.Message.decrypt( claimerSubmitCredentialsMessageEnc, keystore, { senderDetails: claimerLightDid, receiverDetails: verifierLightDid } ) const presentationMessage = ( verifierSubmitCredentialsMessageDec.body as ISubmitCredential ).content const verifiablePresentation = Kilt.Credential.fromCredential( presentationMessage[0] ) const verified = await verifiablePresentation.verify({ challenge }) console.log('Received claims: ', JSON.stringify(presentationMessage[0])) console.log('All valid? ', verified) } // do an attestation and a verification async function example(): Promise<boolean> { const { claimerLightDid, attesterFullDid, claim, attester, keystore } = await setup() const { credential } = await doAttestation( claimerLightDid, attesterFullDid, attester, claim, keystore ) // should succeed await doVerification(claimerLightDid, credential, keystore) await doVerification(claimerLightDid, credential, keystore) // revoke await Kilt.Attestation.revoke(credential.getHash(), 0) // should fail await doVerification(claimerLightDid, credential, keystore) await doVerification(claimerLightDid, credential, keystore) return true } // connect to the blockchain, execute the examples and then disconnect ;(async () => { const done = await example() if (!done) { throw new Error('Example did not finish') } })() .catch((e) => { console.error('Error Error Error!\n') setTimeout(() => { throw e }, 1) }) .finally(() => Kilt.disconnect())
the_stack
import {Component, OnInit, OnDestroy, Input} from '@angular/core'; import { Subscription } from 'rxjs/Subscription'; import { PolicyAcrossApplicationService } from '../../services/policy-across-application.service'; import { AssetGroupObservableService } from '../../../core/services/asset-group-observable.service'; import { AutorefreshService } from '../../services/autorefresh.service'; import { environment } from './../../../../environments/environment'; import { LoggerService } from '../../../shared/services/logger.service'; import { ErrorHandlingService } from '../../../shared/services/error-handling.service'; import { UtilsService } from '../../../shared/services/utils.service'; import {ActivatedRoute, Router} from '@angular/router'; import {WorkflowService} from '../../../core/services/workflow.service'; import {DomainTypeObservableService} from '../../../core/services/domain-type-observable.service'; @Component({ selector: 'app-policy-across-application', templateUrl: './policy-across-application.component.html', styleUrls: ['./policy-across-application.component.css'], providers: [PolicyAcrossApplicationService, AutorefreshService] }) export class PolicyAcrossApplicationComponent implements OnInit, OnDestroy { selectedAssetGroup: string; selectedDomain: string; public apiData: any; public applicationValue: any; public errorMessage: any; public dataComing = true; public showLoader = true; public tableHeaderData: any; private subscriptionToAssetGroup: Subscription; private subscriptionToDomain: Subscription; private dataSubscription: Subscription; public placeHolderTextasset = ''; public placeHolderTextenv = ''; public returnedSearchapp = ''; public returnedSearchenv = ''; public seekdata = false; public sampleData; durationParams: any; autoRefresh: boolean; @Input() ruleID: any; public applicationName: any = ''; apiAppData: any = {}; apiEnvData: any = {}; showEnv = false; showAsset = true; private autorefreshInterval; @Input() pageLevel: number; private urlToRedirect: string; constructor( private policyAcrossApplicationService: PolicyAcrossApplicationService, private assetGroupObservableService: AssetGroupObservableService, private autorefreshService: AutorefreshService, private logger: LoggerService, private errorHandling: ErrorHandlingService, private utils: UtilsService, private router: Router, private activatedRoute: ActivatedRoute, private workflowService: WorkflowService, private domainObservableService: DomainTypeObservableService ) { this.subscriptionToAssetGroup = this.assetGroupObservableService.getAssetGroup().subscribe( assetGroupName => { this.selectedAssetGroup = assetGroupName; }); this.subscriptionToDomain = this.domainObservableService.getDomainType().subscribe(domain => { this.selectedDomain = domain; this.updateComponent(); }); } ngOnInit() { this.urlToRedirect = this.router.routerState.snapshot.url; this.durationParams = this.autorefreshService.getDuration(); this.durationParams = parseInt(this.durationParams, 10); this.autoRefresh = this.autorefreshService.autoRefresh; const afterLoad = this; if (this.autoRefresh !== undefined) { if ((this.autoRefresh === true ) || (this.autoRefresh.toString() === 'true')) { this.autorefreshInterval = setInterval(function() { afterLoad.getData(); }, this.durationParams); } } this.placeHolderTextasset = 'Search by App'; this.placeHolderTextenv = 'Search by Env'; this.updateComponent(); } getappSearch(search) { this.returnedSearchapp = search; } getenvSearch(search) { this.returnedSearchenv = search; } /* Function to repaint component */ updateComponent() { /* All functions variables which are required to be set for component to be reloaded should go here */ this.showLoader = true; this.dataComing = false; this.seekdata = false; this.showEnv = false; this.showAsset = true; this.getData(); } /* Function to get Data */ getData() { /* All functions to get data should go here */ this.getPolicyAcrossApplication(); } /** * This function fetches data for distribution across application */ getPolicyAcrossApplication() { if (this.dataSubscription) { this.dataSubscription.unsubscribe(); } if (this.ruleID !== undefined) { const queryParams = { 'ag': this.selectedAssetGroup, 'ruleId' : this.ruleID, 'domain': this.selectedDomain }; const policyAcrossApplicationUrl = environment.policyAcrossApplication.url; const policyAcrossApplicationMethod = environment.policyAcrossApplication.method; this.dataSubscription = this.policyAcrossApplicationService.getpolicyApplication(queryParams, policyAcrossApplicationUrl, policyAcrossApplicationMethod).subscribe( response => { try { if (response.length === 0) { this.getErrorValues(); this.errorMessage = 'policyDetailsMessage'; } else { /** * policy across application uses 2api * if the DISTRIBUTION accross asset api returns only one value , 2nd api(DISTRIBUTION across envs) will be called */ this.processAppData(response); if (response.length === 1) { /** * call the funtion which hits the 2nd api */ this.applicationName = response[0].AppName; this.getPolicyAcrossEnv(); } else { this.showAsset = true; this.showEnv = false; this.showLoader = false; this.seekdata = false; this.dataComing = true; } } }catch (e) { this.errorMessage = this.errorHandling.handleJavascriptError(e); this.getErrorValues(); } }, error => { this.errorMessage = error; this.getErrorValues(); }); } } /** * this function gets environment details for a particular application * this funtion is call if the api response = 1 from getPolicyAcrossApplication function */ getPolicyAcrossEnv() { if (this.dataSubscription) { this.dataSubscription.unsubscribe(); } if (this.ruleID !== undefined) { const queryParams = { 'ag': this.selectedAssetGroup, 'application' : this.applicationName, 'ruleId' : this.ruleID, 'domain': this.selectedDomain }; const policyAcrossEnvUrl = environment.policyAcrossEnv.url; const policyAcrossEnvMethod = environment.policyAcrossEnv.method; this.dataSubscription = this.policyAcrossApplicationService.getpolicyApplication(queryParams, policyAcrossEnvUrl, policyAcrossEnvMethod).subscribe( response => { try { this.showLoader = false; this.seekdata = false; this.dataComing = true; if (response.length === 0) { this.showEnv = false; this.showAsset = true; } else { this.showEnv = true; this.showAsset = false; this.processEnvData(response); } }catch (e) { this.errorMessage = this.errorHandling.handleJavascriptError(e); this.getErrorValues(); } }, error => { this.errorMessage = error; this.getErrorValues(); }); } } /** * This function is used to convert first letter to uppercase */ uppercasefirst(value) { if (value === null) { return 'Not assigned'; } return value.charAt(0).toUpperCase() + value.slice(1); } /** * This function is called to set the error blocks */ getErrorValues(message?: any): void { this.showEnv = false; this.showAsset = true; this.showLoader = false; this.dataComing = false; this.seekdata = true; if (message) { this.errorMessage = message; } } /** * This processAppData,processEnvData process data before passing it to asset/env component */ processAppData(data) { this.tableHeaderData = ['', 'Total asset', 'Compliant asset', 'Non-compliant asset']; this.apiAppData = data; } processEnvData(data) { this.showEnv = true; this.tableHeaderData = ['', 'Total asset', 'Compliant asset', 'Non-compliant asset']; this.apiEnvData = data; } /** * This function navigates the page mentioned in the routeTo variable with a querypareams */ navigatePage(event) { try { this.workflowService.addRouterSnapshotToLevel(this.router.routerState.snapshot.root); const apiTarget = {'TypeAsset' : 'scanned'}; const localObjKeys = Object.keys(event); const currentApp = event[localObjKeys[0]]; const colName = event[localObjKeys[1]]; const eachParams = {'ruleId': this.ruleID, 'application': currentApp}; if (colName.toLowerCase() === 'non-compliant asset') { eachParams['compliant'] = false; } if (colName.toLowerCase() === 'compliant asset') { eachParams['compliant'] = true; } let newParams = this.utils.makeFilterObj(eachParams); newParams = Object.assign(newParams, apiTarget); newParams['mandatory'] = 'ruleId'; this.router.navigate(['../../../', 'assets', 'asset-list'], {relativeTo: this.activatedRoute, queryParams: newParams, queryParamsHandling: 'merge'}); } catch (error) { this.errorMessage = this.errorHandling.handleJavascriptError(error); this.logger.log('error', error); } } navigatePageEnv(event) { try { this.workflowService.addRouterSnapshotToLevel(this.router.routerState.snapshot.root); const apiTarget = {'TypeAsset' : 'scanned'}; const localObjKeys = Object.keys(event); const currentApp = event[localObjKeys[0]]; const colName = event[localObjKeys[1]]; const eachParams = {'ruleId': this.ruleID, 'environment': currentApp}; if (colName.toLowerCase() === 'non-compliant asset') { eachParams['compliant'] = false; } if (colName.toLowerCase() === 'compliant asset') { eachParams['compliant'] = true; } let newParams = this.utils.makeFilterObj(eachParams); newParams = Object.assign(newParams, apiTarget); newParams['mandatory'] = 'ruleId'; this.router.navigate(['../../../', 'assets' , 'asset-list'], {relativeTo: this.activatedRoute, queryParams: newParams, queryParamsHandling: 'merge'}); } catch (error) { this.errorMessage = this.errorHandling.handleJavascriptError(error); this.logger.log('error', error); } } /* navigatePage function ends here */ /* * unsubscribing component */ ngOnDestroy() { try { this.subscriptionToAssetGroup.unsubscribe(); this.subscriptionToDomain.unsubscribe(); this.dataSubscription.unsubscribe(); clearInterval(this.autorefreshInterval); } catch (error) { this.errorMessage = this.errorHandling.handleJavascriptError(error); this.getErrorValues(); } } }
the_stack
import { Parser, MoveCommand, Layer } from './gcode-parser'; import * as THREE from 'three'; import * as OrbitControls from 'three-orbitcontrols'; import { LineMaterial } from './three-line2/LineMaterial'; import { LineGeometry } from './three-line2/LineGeometry'; import { LineSegments2 } from './three-line2/LineSegments2'; import { GridHelper } from './gridHelper'; import { LineBox } from './lineBox'; type RenderLayer = { extrusion: number[]; travel: number[]; z: number }; type Vector3 = { x: number; y: number; z: number }; type Point = Vector3; type BuildVolume = Vector3; type State = { x: number; y: number; z: number; e: number }; // feedrate? type WebGLPreviewOptions = { canvas?: HTMLCanvasElement; endLayer?: number; startLayer?: number; targetId?: string; // limit?: number; topLayerColor?: number; lastSegmentColor?: number; lineWidth?: number; buildVolume?: BuildVolume; initialCameraPosition?: number[]; debug?: boolean; allowDragNDrop: boolean; }; export class WebGLPreview { parser = new Parser(); // limit?: number; targetId: string; scene: THREE.Scene; camera: THREE.PerspectiveCamera; renderer: THREE.WebGLRenderer; group: THREE.Group; backgroundColor = 0xe0e0e0; travelColor = 0x990000; extrusionColor = 0x00ff00; topLayerColor?: number; lastSegmentColor?: number; container: HTMLElement; canvas: HTMLCanvasElement; renderExtrusion = true; renderTravel = false; lineWidth?: number; startLayer?: number; endLayer?: number; singleLayerMode = false; buildVolume: BuildVolume; initialCameraPosition = [-100, 400, 450]; debug = false; allowDragNDrop = false; private disposables: { dispose() : void }[] = []; constructor(opts: WebGLPreviewOptions) { this.scene = new THREE.Scene(); this.scene.background = new THREE.Color(this.backgroundColor); this.canvas = opts.canvas; this.targetId = opts.targetId; // this.endLayer = opts.limit; this.endLayer = opts.endLayer; this.startLayer = opts.startLayer; this.topLayerColor = opts.topLayerColor; this.lastSegmentColor = opts.lastSegmentColor; this.lineWidth = opts.lineWidth; this.buildVolume = opts.buildVolume; this.initialCameraPosition = opts.initialCameraPosition ?? this.initialCameraPosition; this.debug = opts.debug ?? this.debug; this.allowDragNDrop = opts.allowDragNDrop ?? this.allowDragNDrop; console.info('Using THREE r' + THREE.REVISION); console.debug('opts', opts); if (this.targetId) { console.warn('`targetId` is deprecated and will removed in the future. Use `canvas` instead.') } if (!this.canvas && !this.targetId) { throw Error('Set either opts.canvas or opts.targetId'); } if (!this.canvas) { const container = document.getElementById(this.targetId); if (!container) throw new Error('Unable to find element ' + this.targetId); this.renderer = new THREE.WebGLRenderer({preserveDrawingBuffer: true}); this.canvas = this.renderer.domElement; container.appendChild( this.canvas ); } else { this.renderer = new THREE.WebGLRenderer( { canvas: this.canvas, preserveDrawingBuffer: true }); } this.camera = new THREE.PerspectiveCamera( 25, this.canvas.offsetWidth/this.canvas.offsetHeight, 10, 5000 ); this.camera.position.fromArray(this.initialCameraPosition); const fogFar = (this.camera as THREE.PerspectiveCamera).far; const fogNear = fogFar * 0.8; this.scene.fog = new THREE.Fog( this.scene.background, fogNear, fogFar); this.resize(); /* eslint-disable no-unused-vars, @typescript-eslint/no-unused-vars */ const controls = new OrbitControls(this.camera, this.renderer.domElement); /* eslint-enable no-unused-vars, @typescript-eslint/no-unused-vars */ this.animate(); if (this.allowDragNDrop) this._enableDropHandler(); } get layers() :Layer[] { return this.parser.layers; } // convert from 1-based to 0-based get maxLayerIndex() : number { return (this.endLayer ?? this.layers.length) -1; } // convert from 1-based to 0-based get minLayerIndex() : number{ return this.singleLayerMode ? this.maxLayerIndex : (this.startLayer ?? 0) - 1; } animate() : void{ requestAnimationFrame(() => this.animate()); this.renderer.render(this.scene, this.camera); } processGCode(gcode: string | string[]) : void{ this.parser.parseGCode(gcode); this.render(); } render() : void { while (this.scene.children.length > 0) { this.scene.remove(this.scene.children[0]); } while (this.disposables.length > 0) { this.disposables.pop().dispose(); } if (this.debug) { // show webgl axes const axesHelper = new THREE.AxesHelper( Math.max(this.buildVolume.x/2, this.buildVolume.y/2) + 20 ); this.scene.add( axesHelper ); } if (this.buildVolume) { this.drawBuildVolume(); } this.group = new THREE.Group(); this.group.name = 'gcode'; const state = { x: 0, y: 0, z: 0, e: 0 }; for (let index = 0; index < this.layers.length; index++) { if (index > this.maxLayerIndex) break; const currentLayer: RenderLayer = { extrusion: [], travel: [], z: state.z }; const l = this.layers[index]; for (const cmd of l.commands) { if (cmd.gcode == 'g0' || cmd.gcode == 'g1') { const g = cmd as MoveCommand; const next: State = { x: g.params.x !== undefined ? g.params.x : state.x, y: g.params.y !== undefined ? g.params.y : state.y, z: g.params.z !== undefined ? g.params.z : state.z, e: g.params.e !== undefined ? g.params.e : state.e }; if (index >= this.minLayerIndex) { const extrude = g.params.e > 0; if ( (extrude && this.renderExtrusion) || (!extrude && this.renderTravel) ) { this.addLineSegment(currentLayer, state, next, extrude); } } // update state if (g.params.x) state.x = g.params.x; if (g.params.y) state.y = g.params.y; if (g.params.z) state.z = g.params.z; if (g.params.e) state.e = g.params.e; } } if (this.renderExtrusion) { const brightness = Math.round((80 * index) / this.layers.length); const extrusionColor = new THREE.Color( `hsl(0, 0%, ${brightness}%)` ).getHex(); if (index == this.layers.length - 1) { const layerColor = this.topLayerColor ?? extrusionColor; const lastSegmentColor = this.lastSegmentColor ?? layerColor; const endPoint = currentLayer.extrusion.splice(-3); this.addLine(currentLayer.extrusion, layerColor); const preendPoint = currentLayer.extrusion.splice(-3); this.addLine([...preendPoint, ...endPoint], lastSegmentColor); } else { this.addLine(currentLayer.extrusion, extrusionColor); } } if (this.renderTravel) { this.addLine(currentLayer.travel, this.travelColor); } } this.group.quaternion.setFromEuler(new THREE.Euler(-Math.PI / 2, 0, 0)); if (this.buildVolume) { this.group.position.set(-this.buildVolume.x/2, 0, this.buildVolume.y/2); } else { // FIXME: this is just a very crude approximation for centering this.group.position.set(-100, 0, 100); } this.scene.add(this.group); this.renderer.render(this.scene, this.camera); } drawBuildVolume() :void { this.scene.add( new GridHelper( this.buildVolume.x, 10, this.buildVolume.y, 10 )); const geometryBox = LineBox( this.buildVolume.x, this.buildVolume.z, this.buildVolume.y, 0x888888); geometryBox.position.setY(this.buildVolume.z/2); this.scene.add( geometryBox ); } clear() :void { this.startLayer = 1; this.endLayer = Infinity; this.singleLayerMode = false; this.parser = new Parser(); } resize() :void { const [w, h] = [this.canvas.offsetWidth, this.canvas.offsetHeight]; this.camera.aspect = w / h; this.camera.updateProjectionMatrix(); this.renderer.setPixelRatio(window.devicePixelRatio); this.renderer.setSize(w, h, false); } addLineSegment(layer: RenderLayer, p1: Point, p2: Point, extrude: boolean) : void { const line = extrude ? layer.extrusion : layer.travel; line.push(p1.x, p1.y, p1.z, p2.x, p2.y, p2.z); } addLine(vertices: number[], color: number) : void { if (typeof this.lineWidth === 'number' && this.lineWidth > 0) { this.addThickLine(vertices, color); return; } const geometry = new THREE.BufferGeometry(); geometry.setAttribute( 'position', new THREE.Float32BufferAttribute(vertices, 3) ); this.disposables.push(geometry); const material = new THREE.LineBasicMaterial({ color: color }); this.disposables.push(material); const lineSegments = new THREE.LineSegments(geometry, material); this.group.add(lineSegments); } addThickLine(vertices: number[], color: number) : void { if (!vertices.length) return; const geometry = new LineGeometry(); this.disposables.push(geometry) const matLine = new LineMaterial({ color: color, linewidth: this.lineWidth / (1000 * window.devicePixelRatio) }); this.disposables.push(matLine) geometry.setPositions(vertices); const line = new LineSegments2(geometry, matLine); this.group.add(line); } // experimental DnD support private _enableDropHandler() { this.canvas.addEventListener('dragover', (evt) => { evt.stopPropagation(); evt.preventDefault(); evt.dataTransfer.dropEffect = 'copy'; this.canvas.classList.add('dragging'); }); this.canvas.addEventListener('dragleave', (evt) => { evt.stopPropagation(); evt.preventDefault(); this.canvas.classList.remove('dragging'); }); this.canvas.addEventListener('drop', async (evt) => { evt.stopPropagation(); evt.preventDefault(); this.canvas.classList.remove('dragging'); const files = evt.dataTransfer.files; const file = files[0]; this.clear(); await this._readFromStream(file.stream()); this.render(); }); } async _readFromStream(stream: ReadableStream) : Promise<void> { const reader = stream.getReader(); let result; let tail = ''; let size = 0; do { result = await reader.read(); size += result.value?.length ?? 0; const str = decode(result.value); const idxNewLine = str.lastIndexOf('\n'); const maxFullLine = str.slice(0,idxNewLine); // parse increments but don't render yet this.parser.parseGCode(tail + maxFullLine); tail = str.slice(idxNewLine); } while (!result.done); console.debug('read from stream', size); } } function decode(uint8array: Uint8Array){ return new TextDecoder("utf-8").decode(uint8array); }
the_stack
import * as pulumi from "@pulumi/pulumi"; import * as utilities from "./utilities"; // Export members: export * from "./app"; export * from "./cdn"; export * from "./certificate"; export * from "./containerRegistry"; export * from "./containerRegistryDockerCredentials"; export * from "./customImage"; export * from "./databaseCluster"; export * from "./databaseConnectionPool"; export * from "./databaseDb"; export * from "./databaseFirewall"; export * from "./databaseReplica"; export * from "./databaseUser"; export * from "./dnsRecord"; export * from "./domain"; export * from "./droplet"; export * from "./dropletSnapshot"; export * from "./firewall"; export * from "./floatingIp"; export * from "./floatingIpAssignment"; export * from "./getAccount"; export * from "./getApp"; export * from "./getCertificate"; export * from "./getContainerRegistry"; export * from "./getDatabaseCluster"; export * from "./getDatabaseReplica"; export * from "./getDomain"; export * from "./getDomains"; export * from "./getDroplet"; export * from "./getDropletSnapshot"; export * from "./getDroplets"; export * from "./getFirewall"; export * from "./getFloatingIp"; export * from "./getImage"; export * from "./getImages"; export * from "./getKubernetesCluster"; export * from "./getKubernetesVersions"; export * from "./getLoadBalancer"; export * from "./getProject"; export * from "./getProjects"; export * from "./getRecord"; export * from "./getRecords"; export * from "./getRegion"; export * from "./getRegions"; export * from "./getSizes"; export * from "./getSpacesBucket"; export * from "./getSpacesBucketObject"; export * from "./getSpacesBucketObjects"; export * from "./getSpacesBuckets"; export * from "./getSshKey"; export * from "./getSshKeys"; export * from "./getTag"; export * from "./getTags"; export * from "./getVolume"; export * from "./getVolumeSnapshot"; export * from "./getVpc"; export * from "./kubernetesCluster"; export * from "./kubernetesNodePool"; export * from "./loadBalancer"; export * from "./monitorAlert"; export * from "./project"; export * from "./projectResources"; export * from "./provider"; export * from "./spacesBucket"; export * from "./spacesBucketObject"; export * from "./sshKey"; export * from "./tag"; export * from "./volume"; export * from "./volumeAttachment"; export * from "./volumeSnapshot"; export * from "./vpc"; // Export enums: export * from "./types/enums"; // Export sub-modules: import * as config from "./config"; import * as types from "./types"; export { config, types, }; // Import resources to register: import { App } from "./app"; import { Cdn } from "./cdn"; import { Certificate } from "./certificate"; import { ContainerRegistry } from "./containerRegistry"; import { ContainerRegistryDockerCredentials } from "./containerRegistryDockerCredentials"; import { CustomImage } from "./customImage"; import { DatabaseCluster } from "./databaseCluster"; import { DatabaseConnectionPool } from "./databaseConnectionPool"; import { DatabaseDb } from "./databaseDb"; import { DatabaseFirewall } from "./databaseFirewall"; import { DatabaseReplica } from "./databaseReplica"; import { DatabaseUser } from "./databaseUser"; import { DnsRecord } from "./dnsRecord"; import { Domain } from "./domain"; import { Droplet } from "./droplet"; import { DropletSnapshot } from "./dropletSnapshot"; import { Firewall } from "./firewall"; import { FloatingIp } from "./floatingIp"; import { FloatingIpAssignment } from "./floatingIpAssignment"; import { KubernetesCluster } from "./kubernetesCluster"; import { KubernetesNodePool } from "./kubernetesNodePool"; import { LoadBalancer } from "./loadBalancer"; import { MonitorAlert } from "./monitorAlert"; import { Project } from "./project"; import { ProjectResources } from "./projectResources"; import { SpacesBucket } from "./spacesBucket"; import { SpacesBucketObject } from "./spacesBucketObject"; import { SshKey } from "./sshKey"; import { Tag } from "./tag"; import { Volume } from "./volume"; import { VolumeAttachment } from "./volumeAttachment"; import { VolumeSnapshot } from "./volumeSnapshot"; import { Vpc } from "./vpc"; const _module = { version: utilities.getVersion(), construct: (name: string, type: string, urn: string): pulumi.Resource => { switch (type) { case "digitalocean:index/app:App": return new App(name, <any>undefined, { urn }) case "digitalocean:index/cdn:Cdn": return new Cdn(name, <any>undefined, { urn }) case "digitalocean:index/certificate:Certificate": return new Certificate(name, <any>undefined, { urn }) case "digitalocean:index/containerRegistry:ContainerRegistry": return new ContainerRegistry(name, <any>undefined, { urn }) case "digitalocean:index/containerRegistryDockerCredentials:ContainerRegistryDockerCredentials": return new ContainerRegistryDockerCredentials(name, <any>undefined, { urn }) case "digitalocean:index/customImage:CustomImage": return new CustomImage(name, <any>undefined, { urn }) case "digitalocean:index/databaseCluster:DatabaseCluster": return new DatabaseCluster(name, <any>undefined, { urn }) case "digitalocean:index/databaseConnectionPool:DatabaseConnectionPool": return new DatabaseConnectionPool(name, <any>undefined, { urn }) case "digitalocean:index/databaseDb:DatabaseDb": return new DatabaseDb(name, <any>undefined, { urn }) case "digitalocean:index/databaseFirewall:DatabaseFirewall": return new DatabaseFirewall(name, <any>undefined, { urn }) case "digitalocean:index/databaseReplica:DatabaseReplica": return new DatabaseReplica(name, <any>undefined, { urn }) case "digitalocean:index/databaseUser:DatabaseUser": return new DatabaseUser(name, <any>undefined, { urn }) case "digitalocean:index/dnsRecord:DnsRecord": return new DnsRecord(name, <any>undefined, { urn }) case "digitalocean:index/domain:Domain": return new Domain(name, <any>undefined, { urn }) case "digitalocean:index/droplet:Droplet": return new Droplet(name, <any>undefined, { urn }) case "digitalocean:index/dropletSnapshot:DropletSnapshot": return new DropletSnapshot(name, <any>undefined, { urn }) case "digitalocean:index/firewall:Firewall": return new Firewall(name, <any>undefined, { urn }) case "digitalocean:index/floatingIp:FloatingIp": return new FloatingIp(name, <any>undefined, { urn }) case "digitalocean:index/floatingIpAssignment:FloatingIpAssignment": return new FloatingIpAssignment(name, <any>undefined, { urn }) case "digitalocean:index/kubernetesCluster:KubernetesCluster": return new KubernetesCluster(name, <any>undefined, { urn }) case "digitalocean:index/kubernetesNodePool:KubernetesNodePool": return new KubernetesNodePool(name, <any>undefined, { urn }) case "digitalocean:index/loadBalancer:LoadBalancer": return new LoadBalancer(name, <any>undefined, { urn }) case "digitalocean:index/monitorAlert:MonitorAlert": return new MonitorAlert(name, <any>undefined, { urn }) case "digitalocean:index/project:Project": return new Project(name, <any>undefined, { urn }) case "digitalocean:index/projectResources:ProjectResources": return new ProjectResources(name, <any>undefined, { urn }) case "digitalocean:index/spacesBucket:SpacesBucket": return new SpacesBucket(name, <any>undefined, { urn }) case "digitalocean:index/spacesBucketObject:SpacesBucketObject": return new SpacesBucketObject(name, <any>undefined, { urn }) case "digitalocean:index/sshKey:SshKey": return new SshKey(name, <any>undefined, { urn }) case "digitalocean:index/tag:Tag": return new Tag(name, <any>undefined, { urn }) case "digitalocean:index/volume:Volume": return new Volume(name, <any>undefined, { urn }) case "digitalocean:index/volumeAttachment:VolumeAttachment": return new VolumeAttachment(name, <any>undefined, { urn }) case "digitalocean:index/volumeSnapshot:VolumeSnapshot": return new VolumeSnapshot(name, <any>undefined, { urn }) case "digitalocean:index/vpc:Vpc": return new Vpc(name, <any>undefined, { urn }) default: throw new Error(`unknown resource type ${type}`); } }, }; pulumi.runtime.registerResourceModule("digitalocean", "index/app", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/cdn", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/certificate", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/containerRegistry", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/containerRegistryDockerCredentials", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/customImage", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/databaseCluster", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/databaseConnectionPool", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/databaseDb", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/databaseFirewall", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/databaseReplica", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/databaseUser", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/dnsRecord", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/domain", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/droplet", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/dropletSnapshot", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/firewall", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/floatingIp", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/floatingIpAssignment", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/kubernetesCluster", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/kubernetesNodePool", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/loadBalancer", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/monitorAlert", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/project", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/projectResources", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/spacesBucket", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/spacesBucketObject", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/sshKey", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/tag", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/volume", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/volumeAttachment", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/volumeSnapshot", _module) pulumi.runtime.registerResourceModule("digitalocean", "index/vpc", _module) import { Provider } from "./provider"; pulumi.runtime.registerResourcePackage("digitalocean", { version: utilities.getVersion(), constructProvider: (name: string, type: string, urn: string): pulumi.ProviderResource => { if (type !== "pulumi:providers:digitalocean") { throw new Error(`unknown provider type ${type}`); } return new Provider(name, <any>undefined, { urn }); }, });
the_stack
import { GroupBodyCreate } from '../model/groupBodyCreate'; import { GroupBodyUpdate } from '../model/groupBodyUpdate'; import { GroupEntry } from '../model/groupEntry'; import { GroupMemberEntry } from '../model/groupMemberEntry'; import { GroupMemberPaging } from '../model/groupMemberPaging'; import { GroupMembershipBodyCreate } from '../model/groupMembershipBodyCreate'; import { GroupPaging } from '../model/groupPaging'; import { BaseApi } from './base.api'; import { throwIfNotDefined } from '../../../assert'; import { buildCollectionParam } from '../../../alfrescoApiClient'; /** * Groups service. * @module GroupsApi */ export class GroupsApi extends BaseApi { /** * Create a group * * **Note:** this endpoint is available in Alfresco 5.2.1 and newer versions. Create a group. The group id must start with \"GROUP\\_\". If this is omitted it will be added automatically. This format is also returned when listing groups or group memberships. It should be noted that the other group-related operations also expect the id to start with \"GROUP\\_\". If one or more parentIds are specified then the group will be created and become a member of each of the specified parent groups. If no parentIds are specified then the group will be created as a root group. The group will be created in the **APP.DEFAULT** and **AUTH.ALF** zones. You must have admin rights to create a group. * * @param groupBodyCreate The group to create. * @param opts Optional parameters * @param opts.include Returns additional information about the group. The following optional fields can be requested: * parentIds * zones * @param opts.fields A list of field names. You can use this parameter to restrict the fields returned within a response if, for example, you want to save on overall bandwidth. The list applies to a returned individual entity or entries within a collection. If the API method also supports the **include** parameter, then the fields specified in the **include** parameter are returned in addition to those specified in the **fields** parameter. * @return Promise<GroupEntry> */ createGroup(groupBodyCreate: GroupBodyCreate, opts?: any): Promise<GroupEntry> { throwIfNotDefined(groupBodyCreate, 'groupBodyCreate'); opts = opts || {}; const postBody = groupBodyCreate; const pathParams = { }; const queryParams = { 'include': buildCollectionParam(opts['include'], 'csv'), 'fields': buildCollectionParam(opts['fields'], 'csv') }; const headerParams = { }; const formParams = { }; const contentTypes = ['application/json']; const accepts = ['application/json']; return this.apiClient.callApi( '/groups', 'POST', pathParams, queryParams, headerParams, formParams, postBody, contentTypes, accepts , GroupEntry); } /** * Create a group membership * * **Note:** this endpoint is available in Alfresco 5.2.1 and newer versions. Create a group membership (for an existing person or group) within a group **groupId**. If the added group was previously a root group then it becomes a non-root group since it now has a parent. It is an error to specify an **id** that does not exist. You must have admin rights to create a group membership. * * @param groupId The identifier of a group. * @param groupMembershipBodyCreate The group membership to add (person or sub-group). * @param opts Optional parameters * @param opts.fields A list of field names. You can use this parameter to restrict the fields returned within a response if, for example, you want to save on overall bandwidth. The list applies to a returned individual entity or entries within a collection. If the API method also supports the **include** parameter, then the fields specified in the **include** parameter are returned in addition to those specified in the **fields** parameter. * @return Promise<GroupMemberEntry> */ createGroupMembership(groupId: string, groupMembershipBodyCreate: GroupMembershipBodyCreate, opts?: any): Promise<GroupMemberEntry> { throwIfNotDefined(groupId, 'groupId'); throwIfNotDefined(groupMembershipBodyCreate, 'groupMembershipBodyCreate'); opts = opts || {}; const postBody = groupMembershipBodyCreate; const pathParams = { 'groupId': groupId }; const queryParams = { 'fields': buildCollectionParam(opts['fields'], 'csv') }; const headerParams = { }; const formParams = { }; const contentTypes = ['application/json']; const accepts = ['application/json']; return this.apiClient.callApi( '/groups/{groupId}/members', 'POST', pathParams, queryParams, headerParams, formParams, postBody, contentTypes, accepts , GroupMemberEntry); } /** * Delete a group * * **Note:** this endpoint is available in Alfresco 5.2.1 and newer versions. Delete group **groupId**. The option to cascade delete applies this recursively to any hierarchy of group members. In this case, removing a group member does not delete the person or sub-group itself. If a removed sub-group no longer has any parent groups then it becomes a root group. You must have admin rights to delete a group. * * @param groupId The identifier of a group. * @param opts Optional parameters * @param opts.cascade If **true** then the delete will be applied in cascade to sub-groups. (default to false) * @return Promise<{}> */ deleteGroup(groupId: string, opts?: any): Promise<any> { throwIfNotDefined(groupId, 'groupId'); opts = opts || {}; const postBody: null = null; let cascadeDelete = opts['cascade'] ? opts['cascade'] : false; const pathParams = { 'groupId': groupId }; const queryParams = { 'cascade': cascadeDelete }; const headerParams = { }; const formParams = { }; const contentTypes = ['application/json']; const accepts = ['application/json']; return this.apiClient.callApi( '/groups/{groupId}', 'DELETE', pathParams, queryParams, headerParams, formParams, postBody, contentTypes, accepts ); } /** * Delete a group membership * * **Note:** this endpoint is available in Alfresco 5.2.1 and newer versions. Delete group member **groupMemberId** (person or sub-group) from group **groupId**. Removing a group member does not delete the person or sub-group itself. If a removed sub-group no longer has any parent groups then it becomes a root group. You must have admin rights to delete a group membership. * * @param groupId The identifier of a group. * @param groupMemberId The identifier of a person or group. * @return Promise<{}> */ deleteGroupMembership(groupId: string, groupMemberId: string): Promise<any> { throwIfNotDefined(groupId, 'groupId'); throwIfNotDefined(groupMemberId, 'groupMemberId'); const postBody: null = null; const pathParams = { 'groupId': groupId, 'groupMemberId': groupMemberId }; const queryParams = { }; const headerParams = { }; const formParams = { }; const contentTypes = ['application/json']; const accepts = ['application/json']; return this.apiClient.callApi( '/groups/{groupId}/members/{groupMemberId}', 'DELETE', pathParams, queryParams, headerParams, formParams, postBody, contentTypes, accepts ); } /** * Get group details * * **Note:** this endpoint is available in Alfresco 5.2.1 and newer versions. Get details for group **groupId**. You can use the **include** parameter to return additional information. * * @param groupId The identifier of a group. * @param opts Optional parameters * @param opts.include Returns additional information about the group. The following optional fields can be requested: * parentIds * zones * @param opts.fields A list of field names. You can use this parameter to restrict the fields returned within a response if, for example, you want to save on overall bandwidth. The list applies to a returned individual entity or entries within a collection. If the API method also supports the **include** parameter, then the fields specified in the **include** parameter are returned in addition to those specified in the **fields** parameter. * @return Promise<GroupEntry> */ getGroup(groupId: string, opts?: any): Promise<GroupEntry> { throwIfNotDefined(groupId, 'groupId'); opts = opts || {}; const postBody: null = null; const pathParams = { 'groupId': groupId }; const queryParams = { 'include': buildCollectionParam(opts['include'], 'csv'), 'fields': buildCollectionParam(opts['fields'], 'csv') }; const headerParams = { }; const formParams = { }; const contentTypes = ['application/json']; const accepts = ['application/json']; return this.apiClient.callApi( '/groups/{groupId}', 'GET', pathParams, queryParams, headerParams, formParams, postBody, contentTypes, accepts , GroupEntry); } /** * List memberships of a group * * **Note:** this endpoint is available in Alfresco 5.2.1 and newer versions. Gets a list of the group memberships for the group **groupId**. You can use the **where** parameter to filter the returned groups by **memberType**. Example to filter by **memberType**, use any one of: (memberType='GROUP') (memberType='PERSON') The default sort order for the returned list is for group members to be sorted by ascending displayName. You can override the default by using the **orderBy** parameter. You can specify one of the following fields in the **orderBy** parameter: * id * displayName * * @param groupId The identifier of a group. * @param opts Optional parameters * @param opts.skipCount The number of entities that exist in the collection before those included in this list. If not supplied then the default value is 0. (default to 0) * @param opts.maxItems The maximum number of items to return in the list. If not supplied then the default value is 100. (default to 100) * @param opts.orderBy A string to control the order of the entities returned in a list. You can use the **orderBy** parameter to sort the list by one or more fields. Each field has a default sort order, which is normally ascending order. Read the API method implementation notes above to check if any fields used in this method have a descending default search order. To sort the entities in a specific order, you can use the **ASC** and **DESC** keywords for any field. * @param opts.where A string to restrict the returned objects by using a predicate. * @param opts.fields A list of field names. You can use this parameter to restrict the fields returned within a response if, for example, you want to save on overall bandwidth. The list applies to a returned individual entity or entries within a collection. If the API method also supports the **include** parameter, then the fields specified in the **include** parameter are returned in addition to those specified in the **fields** parameter. * @return Promise<GroupMemberPaging> */ listGroupMemberships(groupId: string, opts?: any): Promise<GroupMemberPaging> { throwIfNotDefined(groupId, 'groupId'); opts = opts || {}; const postBody: null = null; const pathParams = { 'groupId': groupId }; const queryParams = { 'skipCount': opts['skipCount'], 'maxItems': opts['maxItems'], 'orderBy': buildCollectionParam(opts['orderBy'], 'csv'), 'where': opts['where'], 'fields': buildCollectionParam(opts['fields'], 'csv') }; const headerParams = { }; const formParams = { }; const contentTypes = ['application/json']; const accepts = ['application/json']; return this.apiClient.callApi( '/groups/{groupId}/members', 'GET', pathParams, queryParams, headerParams, formParams, postBody, contentTypes, accepts , GroupMemberPaging); } /** * List group memberships * * **Note:** this endpoint is available in Alfresco 5.2.1 and newer versions. Gets a list of group membership information for person **personId**. You can use the -me- string in place of <personId> to specify the currently authenticated user. You can use the **include** parameter to return additional information. You can use the **where** parameter to filter the returned groups by **isRoot**. For example, the following **where** clause will return just the root groups: (isRoot=true) The **where** parameter can also be used to filter by ***zone***. This may be combined with isRoot to narrow a result set even further. For example, the following where clause will only return groups belonging to the MY.ZONE zone. where=(zones in ('MY.ZONE')) This may be combined with the isRoot filter, as shown below: where=(isRoot=false AND zones in ('MY.ZONE')) ***Note:*** restrictions include * AND is the only supported operator when combining isRoot and zones filters * Only one zone is supported by the filter * The quoted zone name must be placed in parenthesis — a 400 error will result if these are omitted. The default sort order for the returned list is for groups to be sorted by ascending displayName. You can override the default by using the **orderBy** parameter. You can specify one or more of the following fields in the **orderBy** parameter: * id * displayName * * @param personId The identifier of a person. * @param opts Optional parameters * @param opts.skipCount The number of entities that exist in the collection before those included in this list. If not supplied then the default value is 0. (default to 0) * @param opts.maxItems The maximum number of items to return in the list. If not supplied then the default value is 100. (default to 100) * @param opts.orderBy A string to control the order of the entities returned in a list. You can use the **orderBy** parameter to sort the list by one or more fields. Each field has a default sort order, which is normally ascending order. Read the API method implementation notes above to check if any fields used in this method have a descending default search order. To sort the entities in a specific order, you can use the **ASC** and **DESC** keywords for any field. * @param opts.include Returns additional information about the group. The following optional fields can be requested: * parentIds * zones * @param opts.where A string to restrict the returned objects by using a predicate. * @param opts.fields A list of field names. You can use this parameter to restrict the fields returned within a response if, for example, you want to save on overall bandwidth. The list applies to a returned individual entity or entries within a collection. If the API method also supports the **include** parameter, then the fields specified in the **include** parameter are returned in addition to those specified in the **fields** parameter. * @return Promise<GroupPaging> */ listGroupMembershipsForPerson(personId: string, opts?: any): Promise<GroupPaging> { throwIfNotDefined(personId, 'personId'); opts = opts || {}; const postBody: null = null; const pathParams = { 'personId': personId }; const queryParams = { 'skipCount': opts['skipCount'], 'maxItems': opts['maxItems'], 'orderBy': buildCollectionParam(opts['orderBy'], 'csv'), 'include': buildCollectionParam(opts['include'], 'csv'), 'where': opts['where'], 'fields': buildCollectionParam(opts['fields'], 'csv') }; const headerParams = { }; const formParams = { }; const contentTypes = ['application/json']; const accepts = ['application/json']; return this.apiClient.callApi( '/people/{personId}/groups', 'GET', pathParams, queryParams, headerParams, formParams, postBody, contentTypes, accepts , GroupPaging); } /** * List groups * * **Note:** this endpoint is available in Alfresco 5.2.1 and newer versions. Gets a list of groups. You can use the **include** parameter to return additional information. You can use the **where** parameter to filter the returned groups by **isRoot**. For example, the following **where** clause will return just the root groups: (isRoot=true) The **where** parameter can also be used to filter by ***zone***. This may be combined with isRoot to narrow a result set even further. For example, the following where clause will only return groups belonging to the MY.ZONE zone. where=(zones in ('MY.ZONE')) This may be combined with the isRoot filter, as shown below: where=(isRoot=false AND zones in ('MY.ZONE')) ***Note:*** restrictions include * AND is the only supported operator when combining isRoot and zones filters * Only one zone is supported by the filter * The quoted zone name must be placed in parenthesis — a 400 error will result if these are omitted. The default sort order for the returned list is for groups to be sorted by ascending displayName. You can override the default by using the **orderBy** parameter. You can specify one of the following fields in the **orderBy** parameter: * id * displayName * * @param opts Optional parameters * @param opts.skipCount The number of entities that exist in the collection before those included in this list. If not supplied then the default value is 0. (default to 0) * @param opts.maxItems The maximum number of items to return in the list. If not supplied then the default value is 100. (default to 100) * @param opts.orderBy A string to control the order of the entities returned in a list. You can use the **orderBy** parameter to sort the list by one or more fields. Each field has a default sort order, which is normally ascending order. Read the API method implementation notes above to check if any fields used in this method have a descending default search order. To sort the entities in a specific order, you can use the **ASC** and **DESC** keywords for any field. * @param opts.include Returns additional information about the group. The following optional fields can be requested: * parentIds * zones * @param opts.where A string to restrict the returned objects by using a predicate. * @param opts.fields A list of field names. You can use this parameter to restrict the fields returned within a response if, for example, you want to save on overall bandwidth. The list applies to a returned individual entity or entries within a collection. If the API method also supports the **include** parameter, then the fields specified in the **include** parameter are returned in addition to those specified in the **fields** parameter. * @return Promise<GroupPaging> */ listGroups(opts?: any): Promise<GroupPaging> { opts = opts || {}; const postBody: null = null; const pathParams = { }; const queryParams = { 'skipCount': opts['skipCount'], 'maxItems': opts['maxItems'], 'orderBy': buildCollectionParam(opts['orderBy'], 'csv'), 'include': buildCollectionParam(opts['include'], 'csv'), 'where': opts['where'], 'fields': buildCollectionParam(opts['fields'], 'csv') }; const headerParams = { }; const formParams = { }; const contentTypes = ['application/json']; const accepts = ['application/json']; return this.apiClient.callApi( '/groups', 'GET', pathParams, queryParams, headerParams, formParams, postBody, contentTypes, accepts , GroupPaging); } /** * Update group details * * **Note:** this endpoint is available in Alfresco 5.2.1 and newer versions. Update details (displayName) for group **groupId**. You must have admin rights to update a group. * * @param groupId The identifier of a group. * @param groupBodyUpdate The group information to update. * @param opts Optional parameters * @param opts.include Returns additional information about the group. The following optional fields can be requested: * parentIds * zones * @param opts.fields A list of field names. You can use this parameter to restrict the fields returned within a response if, for example, you want to save on overall bandwidth. The list applies to a returned individual entity or entries within a collection. If the API method also supports the **include** parameter, then the fields specified in the **include** parameter are returned in addition to those specified in the **fields** parameter. * @return Promise<GroupEntry> */ updateGroup(groupId: string, groupBodyUpdate: GroupBodyUpdate, opts?: any): Promise<GroupEntry> { throwIfNotDefined(groupId, 'groupId'); throwIfNotDefined(groupBodyUpdate, 'groupBodyUpdate'); opts = opts || {}; const postBody = groupBodyUpdate; const pathParams = { 'groupId': groupId }; const queryParams = { 'include': buildCollectionParam(opts['include'], 'csv'), 'fields': buildCollectionParam(opts['fields'], 'csv') }; const headerParams = { }; const formParams = { }; const contentTypes = ['application/json']; const accepts = ['application/json']; return this.apiClient.callApi( '/groups/{groupId}', 'PUT', pathParams, queryParams, headerParams, formParams, postBody, contentTypes, accepts , GroupEntry); } }
the_stack
import Tooltip from "./MyTooltip"; import * as React from "react"; import CompactLocationURL from "./CompactLocationURL"; import { Alert, CommitData, Weakness } from "../shared/shared-types"; import { AlertCountComparison, countLocations, icons, mkCommitUrl, mkRuleListSentence, mkToolIDString, RelevantQueryResultCountChangeConclusion, SetSourceFocus } from "./util"; namespace Icons { function mkStyle(color?: string) { if (!color) { return {}; } return { color: color }; } type Props = { color?: string; style?: React.CSSProperties }; export class Cross extends React.Component<Props> { render() { return ( <span style={{ color: "transparent", textShadow: "0px 0px " + this.props.color, ...(this.props.style || {}) }} > {"\u274C"} </span> ); } } export class Empty extends React.Component<Props> { render() { return ( <span style={{ ...mkStyle(this.props.color), ...(this.props.style || {}) }} > &nbsp; </span> ); } } export class Dash extends React.Component<Props> { render() { return ( <span style={{ ...mkStyle(this.props.color), ...(this.props.style || {}) }} > {"--"} </span> ); } } export class Checkmark extends React.Component<Props> { render() { return ( <span style={{ ...mkStyle(this.props.color), ...(this.props.style || {}) }} > {"\u2714"} </span> ); } } export class Box extends React.Component<Props & { color: string }> { render() { // this is actually a large white square, made transparent, and with a shadow of the right color. return ( <span style={{ color: "transparent", textShadow: "0px 0px " + this.props.color, ...(this.props.style || {}) }} > {"\u2B1C"} </span> ); } } export class QuestionMark extends React.Component<Props> { render() { return ( <span style={{ ...mkStyle(this.props.color), ...(this.props.style || {}) }} > {"?"} </span> ); } } } export namespace Missing { type Props = { toolID: string; commit: CommitData }; export class Sentence extends React.Component<Props> { render() { return ( <span> {mkToolIDString(this.props.toolID)} has not been run on{" "} {mkCommitUrl(this.props.commit)} </span> ); } } export class Icon extends React.Component<Props> { render() { return ( <Tooltip title={ <Sentence toolID={this.props.toolID} commit={this.props.commit} /> } > <span> <Icons.QuestionMark color="lightgray" /> </span> </Tooltip> ); } } } export namespace SpecificDetection { type IconProps = { toolID: string; alerts: Alert[]; hasRun: boolean; commit?: CommitData; }; function getColor(_props: IconProps): string { return "lightgray"; } export class Sentence extends React.Component<IconProps> { render() { let props = this.props; if (!props.hasRun) { return <Missing.Sentence toolID={props.toolID} commit={props.commit} />; } let detected = props.alerts.length > 0; let relevantRulesAndUrlsMap: Map< string, { ruleID: string; url: string } > = new Map(); props.alerts.forEach(a => relevantRulesAndUrlsMap.set(a.ruleID, { ruleID: a.ruleID, url: a.url }) ); let relevantRulesAndUrls: { ruleID: string; url: string }[] = Array.from( relevantRulesAndUrlsMap.values() ); return detected ? ( <span> {mkToolIDString(props.toolID)} flagged this with rule {relevantRulesAndUrls.length > 1 && "s"}:{" "} {mkRuleListSentence(relevantRulesAndUrls)}. </span> ) : ( <span>{mkToolIDString(props.toolID)} did not detect this.</span> ); } } export class TableCellIcon extends React.Component<IconProps> { constructor(props: IconProps) { super(props); } render() { let props = this.props; let icon; if (!props.hasRun) { icon = <Icons.QuestionMark color={getColor(props)} />; } else if (props.alerts.length > 0) { icon = <Icons.Checkmark color={getColor(props)} />; } else { icon = <Icons.Empty color={getColor(props)} />; } return ( <Tooltip title={<Sentence {...this.props} />}> <span>{icon}</span> </Tooltip> ); } } } export namespace Detection { type IconProps = { toolID: string; hasRun: boolean; unpatchedCommit: CommitData; detected: boolean; hits: Alert[]; weaknesses: Weakness[]; }; function getColor(_props: IconProps): string { return "lightgray"; } export class Sentence extends React.Component<IconProps> { render() { // TODO unify this with RecognitionConclusion (currently 50% duplication) let props = this.props; let detectionSummary: React.ReactNode, hitsSummary: React.ReactNode; if (props.detected) { detectionSummary = ( <span className="tool-summary weakness-detected"> detected this CVE </span> ); hitsSummary = ( <span> (flagging {props.weaknesses.length} weaknesses with{" "} {props.hits.length} alerts on {mkCommitUrl(props.unpatchedCommit)}) </span> ); } else { detectionSummary = ( <span className="tool-summary weakness-not-detected"> did not detect this CVE </span> ); } return ( <> {mkToolIDString(props.toolID)} {detectionSummary} {hitsSummary}. </> ); } } export class TableCellIcon extends React.Component<IconProps> { constructor(props: IconProps) { super(props); } render() { let props = this.props; let icon; if (!props.hasRun) { icon = <Icons.QuestionMark color={getColor(props)} />; } else if (props.detected) { icon = <Icons.Checkmark color={getColor(props)} />; } else { icon = <Icons.Empty color={getColor(props)} />; } return ( <Tooltip title={<Sentence {...this.props} />}> <span>{icon}</span> </Tooltip> ); } } export function FlaggedLineSentence(props: { toolID: string; rules: { ruleID: string; url: string }[]; }) { return ( <> {mkToolIDString(props.toolID)} flagged this with rule {props.rules.length > 1 && "s"}: {mkRuleListSentence(props.rules)} </> ); } export function DetectionConclusionLine(props: { toolID: string; weakness: Weakness; alerts: Alert[]; setSourceFocus: SetSourceFocus; }) { let detected = props.alerts.length > 0; let relevantRulesAndUrlsMap: Map< string, { ruleID: string; url: string } > = new Map(); props.alerts.forEach(a => relevantRulesAndUrlsMap.set(a.ruleID, { ruleID: a.ruleID, url: a.url }) ); let relevantRulesAndUrls: { ruleID: string; url: string }[] = Array.from( relevantRulesAndUrlsMap.values() ); let rules = detected && ( <FlaggedLineSentence toolID={props.toolID} rules={relevantRulesAndUrls} /> ); return ( <> {detected ? icons.checkmark : icons.cross} <CompactLocationURL commit={props.weakness.commit} file={props.weakness.file} line={props.weakness.line} setSourceFocus={props.setSourceFocus} /> : {props.weakness.explanation}. {detected && rules} </> ); } type Props = { toolID: string; weaknesses: Weakness[]; alerts: Alert[]; setSourceFocus: SetSourceFocus; }; export function ShortExplanation(props: { toolID: string; hits: Alert[]; weaknesses: Weakness[]; unpatchedCommit: CommitData; }) { // TODO unify this with RecognitionConclusion (currently 50% duplication) let hitCount = countLocations(props.hits); let detected = hitCount > 0; let detectionSummary: React.ReactNode, hitsSummary: React.ReactNode; if (detected) { detectionSummary = ( <span className="tool-summary weakness-detected"> detected this CVE </span> ); hitsSummary = ( <span> (flagging {hitCount} out of {countLocations(props.weaknesses)}{" "} weaknesses in {mkCommitUrl(props.unpatchedCommit)}) </span> ); } else { detectionSummary = ( <span className="tool-summary weakness-not-detected"> did not detect this CVE </span> ); } return ( <> {mkToolIDString(props.toolID)} {detectionSummary} {hitsSummary}. </> ); } export class DetailedExplanation extends React.Component<Props> { render() { let props = this.props; return ( <ul> {props.weaknesses.map((w, i) => ( <li style={{ listStyleType: "none" }} key={i}> <DetectionConclusionLine toolID={props.toolID} weakness={w} alerts={props.alerts.filter( a => a.file === w.file && a.line === w.line )} setSourceFocus={props.setSourceFocus} /> </li> ))} </ul> ); } } } export namespace Recognition { type ExplanationProps = { toolID: string; patchCommit: CommitData; unpatchedCommit: CommitData; alertCountComparison: AlertCountComparison; commitRuns: string[]; }; type IconProps = { conclusion: RelevantQueryResultCountChangeConclusion; toolID: string; patchCommit: CommitData; unpatchedCommit: CommitData; commitRuns: string[]; hits: Alert[]; weaknesses: Weakness[]; }; export function getConclusionDescription( conclusion: RelevantQueryResultCountChangeConclusion ): string { switch (conclusion) { case RelevantQueryResultCountChangeConclusion.Negative: return "Vulnerability detected, patch recognized"; case RelevantQueryResultCountChangeConclusion.NeutralOrPositive: return "Vulnerability detected, patch not recognized"; case RelevantQueryResultCountChangeConclusion.Uncomputable: return "Vulnerability not detected"; case RelevantQueryResultCountChangeConclusion.Missing: return "Analysis run missing"; } } export function getColor( conclusion: RelevantQueryResultCountChangeConclusion ): string { switch (conclusion) { case RelevantQueryResultCountChangeConclusion.Negative: return "#21b321"; // green case RelevantQueryResultCountChangeConclusion.NeutralOrPositive: return "rgb(255 175 29)"; // orange case RelevantQueryResultCountChangeConclusion.Uncomputable: return "rgb(224 9 9)"; // red case RelevantQueryResultCountChangeConclusion.Missing: return "lightgray"; } } function getDullColor(_props: IconProps): string { return "lightgray"; } export class Sentence extends React.Component<IconProps> { render() { let props = this.props; let detectionSummary: React.ReactNode, hitsSummary: React.ReactNode, separator: string, recognizedSummary: React.ReactNode; switch (props.conclusion) { case RelevantQueryResultCountChangeConclusion.Negative: case RelevantQueryResultCountChangeConclusion.NeutralOrPositive: { detectionSummary = ( <span className="tool-summary weakness-detected"> detected this CVE </span> ); hitsSummary = ( <span> (flagging {countLocations(props.hits)} out of{" "} {countLocations(props.weaknesses)} weakness locations in{" "} {mkCommitUrl(props.unpatchedCommit)}) </span> ); separator = ", "; if ( props.conclusion === RelevantQueryResultCountChangeConclusion.Negative ) { recognizedSummary = ( <> and{" "} <span className="tool-summary patch-recognized"> correctly recognized the patch </span>{" "} in {mkCommitUrl(props.patchCommit)} </> ); } else { recognizedSummary = ( <> but{" "} <span className="tool-summary patch-not-recognized"> did not recognize the patch </span>{" "} in {mkCommitUrl(props.patchCommit)} (resulting in a false positive) </> ); } break; } case RelevantQueryResultCountChangeConclusion.Uncomputable: { detectionSummary = ( <span className="tool-summary weakness-not-detected"> did not detect this CVE </span> ); separator = ". "; recognizedSummary = ( <> It should have produced at least one relevant alert in{" "} {mkCommitUrl(props.unpatchedCommit)} </> ); break; } case RelevantQueryResultCountChangeConclusion.Missing: { if (!props.commitRuns.includes(props.unpatchedCommit.commitID)) { return ( <Missing.Sentence toolID={props.toolID} commit={props.unpatchedCommit} /> ); } if (!props.commitRuns.includes(props.patchCommit.commitID)) { return ( <Missing.Sentence toolID={props.toolID} commit={props.patchCommit} /> ); } } } return ( <> {mkToolIDString(props.toolID)} {detectionSummary} {hitsSummary} {separator} {recognizedSummary}. </> ); } } export class TableCellIcon extends React.Component<IconProps> { constructor(props: IconProps) { super(props); } render() { let props = this.props; let icon; switch (props.conclusion) { case RelevantQueryResultCountChangeConclusion.Negative: icon = <Icons.Checkmark color={getDullColor(props)} />; break; case RelevantQueryResultCountChangeConclusion.NeutralOrPositive: case RelevantQueryResultCountChangeConclusion.Uncomputable: icon = <Icons.Empty color={getDullColor(props)} />; break; case RelevantQueryResultCountChangeConclusion.Missing: icon = <Icons.QuestionMark color={getDullColor(props)} />; break; } return ( <Tooltip title={<Sentence {...this.props} />}> <span style={{ fontSize: "200%" }}>{icon}</span> </Tooltip> ); } } export class DetailedTableCellIcon extends React.Component<IconProps> { constructor(props: IconProps) { super(props); } render() { let props = this.props; let icon; switch (props.conclusion) { case RelevantQueryResultCountChangeConclusion.Negative: // FIXME The thing just wouldn't turn (green). So I made it transparent and put a (green) shadow behind it. icon = ( <Icons.Checkmark style={{ textShadow: `0px 0px ${getColor(props.conclusion)}`, color: "transparent" }} /> ); break; case RelevantQueryResultCountChangeConclusion.NeutralOrPositive: icon = <Icons.Box color={getColor(props.conclusion)} />; break; case RelevantQueryResultCountChangeConclusion.Uncomputable: icon = <Icons.Cross color={getColor(props.conclusion)} />; break; case RelevantQueryResultCountChangeConclusion.Missing: icon = <Icons.QuestionMark color={getColor(props.conclusion)} />; break; } return ( <Tooltip title={<Sentence {...this.props} />}> <span>{icon}</span> </Tooltip> ); } } export class DetailedExplanation extends React.Component<ExplanationProps> { render() { if ( !this.props.commitRuns.includes(this.props.unpatchedCommit.commitID) ) { return ( <Missing.Icon toolID={this.props.toolID} commit={this.props.unpatchedCommit} /> ); } if (!this.props.commitRuns.includes(this.props.patchCommit.commitID)) { return ( <Missing.Icon toolID={this.props.toolID} commit={this.props.patchCommit} /> ); } let detected = this.props.alertCountComparison.conclusion === RelevantQueryResultCountChangeConclusion.Negative || this.props.alertCountComparison.conclusion === RelevantQueryResultCountChangeConclusion.NeutralOrPositive, recognized = this.props.alertCountComparison.conclusion === RelevantQueryResultCountChangeConclusion.Negative; let relevantRules = this.props.alertCountComparison.counts; let negativeDeltaRules = this.props.alertCountComparison.counts .map(c => ({ ruleID: c.ruleID, delta: c.after - c.before, ruleURL: c.ruleURL })) .filter(e => e.delta < 0); let negativeDeltaSum = negativeDeltaRules.reduce( (delta, e) => (delta += e.delta), 0 ); return ( <div> {detected ? ( recognized ? ( <> On patch {mkCommitUrl(this.props.patchCommit)},{" "} {mkToolIDString(this.props.toolID)} reported {-negativeDeltaSum}{" "} fewer alerts for {mkRuleListSentence(negativeDeltaRules)}. </> ) : ( <> On patch {mkCommitUrl(this.props.patchCommit)},{" "} {mkToolIDString(this.props.toolID)} reported 0 fewer alerts for{" "} {mkRuleListSentence(relevantRules)}. </> ) ) : ( <> {mkToolIDString(this.props.toolID)} did not detect this CVE. It is therefore impossible to draw any conclusions about{" "} {mkToolIDString(this.props.toolID)} recognising the patch in{" "} {mkCommitUrl(this.props.patchCommit)}. </> )} </div> ); } } }
the_stack
import { ServiceClientOptions, RequestOptions, ServiceCallback, HttpOperationResponse, ServiceClientCredentials } from 'ms-rest'; import * as models from "./models"; import * as operations from "./operations"; declare class IntuneResourceManagementClient { /** * Initializes a new instance of the IntuneResourceManagementClient class. * @constructor * * @class * @param {credentials} credentials - Credentials needed for the client to connect to Azure. * * @param {string} [baseUri] - The base URI of the service. * * @param {object} [options] - The parameter options * * @param {Array} [options.filters] - Filters to be added to the request pipeline * * @param {object} [options.requestOptions] - Options for the underlying request object * {@link https://github.com/request/request#requestoptions-callback Options doc} * * @param {boolean} [options.noRetryPolicy] - If set to true, turn off default retry policy * * @param {string} [options.apiVersion] - Service Api Version. * * @param {string} [options.acceptLanguage] - Gets or sets the preferred language for the response. * * @param {number} [options.longRunningOperationRetryTimeout] - Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30. * * @param {boolean} [options.generateClientRequestId] - When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. * */ constructor(credentials: ServiceClientCredentials, baseUri?: string, options?: ServiceClientOptions); credentials: ServiceClientCredentials; apiVersion: string; acceptLanguage: string; longRunningOperationRetryTimeout: number; generateClientRequestId: boolean; // Operation groups ios: operations.Ios; android: operations.Android; /** * Returns location for user tenant. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<LocationCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getLocationsWithHttpOperationResponse(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.LocationCollection>>; /** * Returns location for user tenant. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {LocationCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {LocationCollection} [result] - The deserialized result object if an error did not occur. * See {@link LocationCollection} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getLocations(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.LocationCollection>; getLocations(callback: ServiceCallback<models.LocationCollection>): void; getLocations(options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.LocationCollection>): void; /** * Returns location for given tenant. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<Location>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getLocationByHostNameWithHttpOperationResponse(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.Location>>; /** * Returns location for given tenant. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {Location} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {Location} [result] - The deserialized result object if an error did not occur. * See {@link Location} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getLocationByHostName(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.Location>; getLocationByHostName(callback: ServiceCallback<models.Location>): void; getLocationByHostName(options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.Location>): void; /** * Returns Intune Manageable apps. * * @param {string} hostName Location hostName for the tenant * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {number} [options.top] * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<ApplicationCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getAppsWithHttpOperationResponse(hostName: string, options?: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.ApplicationCollection>>; /** * Returns Intune Manageable apps. * * @param {string} hostName Location hostName for the tenant * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {number} [options.top] * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {ApplicationCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {ApplicationCollection} [result] - The deserialized result object if an error did not occur. * See {@link ApplicationCollection} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getApps(hostName: string, options?: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<models.ApplicationCollection>; getApps(hostName: string, callback: ServiceCallback<models.ApplicationCollection>): void; getApps(hostName: string, options: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.ApplicationCollection>): void; /** * Get devices for a user. * * @param {string} hostName Location hostName for the tenant * * @param {string} userName user unique Name * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {number} [options.top] * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<DeviceCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getMAMUserDevicesWithHttpOperationResponse(hostName: string, userName: string, options?: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.DeviceCollection>>; /** * Get devices for a user. * * @param {string} hostName Location hostName for the tenant * * @param {string} userName user unique Name * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {number} [options.top] * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {DeviceCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {DeviceCollection} [result] - The deserialized result object if an error did not occur. * See {@link DeviceCollection} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getMAMUserDevices(hostName: string, userName: string, options?: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<models.DeviceCollection>; getMAMUserDevices(hostName: string, userName: string, callback: ServiceCallback<models.DeviceCollection>): void; getMAMUserDevices(hostName: string, userName: string, options: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.DeviceCollection>): void; /** * Get a unique device for a user. * * @param {string} hostName Location hostName for the tenant * * @param {string} userName unique user name * * @param {string} deviceName device name * * @param {object} [options] Optional Parameters. * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<Device>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getMAMUserDeviceByDeviceNameWithHttpOperationResponse(hostName: string, userName: string, deviceName: string, options?: { select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.Device>>; /** * Get a unique device for a user. * * @param {string} hostName Location hostName for the tenant * * @param {string} userName unique user name * * @param {string} deviceName device name * * @param {object} [options] Optional Parameters. * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {Device} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {Device} [result] - The deserialized result object if an error did not occur. * See {@link Device} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getMAMUserDeviceByDeviceName(hostName: string, userName: string, deviceName: string, options?: { select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<models.Device>; getMAMUserDeviceByDeviceName(hostName: string, userName: string, deviceName: string, callback: ServiceCallback<models.Device>): void; getMAMUserDeviceByDeviceName(hostName: string, userName: string, deviceName: string, options: { select? : string, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.Device>): void; /** * Wipe a device for a user. * * @param {string} hostName Location hostName for the tenant * * @param {string} userName unique user name * * @param {string} deviceName device name * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<WipeDeviceOperationResult>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ wipeMAMUserDeviceWithHttpOperationResponse(hostName: string, userName: string, deviceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WipeDeviceOperationResult>>; /** * Wipe a device for a user. * * @param {string} hostName Location hostName for the tenant * * @param {string} userName unique user name * * @param {string} deviceName device name * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {WipeDeviceOperationResult} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {WipeDeviceOperationResult} [result] - The deserialized result object if an error did not occur. * See {@link WipeDeviceOperationResult} for more * information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ wipeMAMUserDevice(hostName: string, userName: string, deviceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WipeDeviceOperationResult>; wipeMAMUserDevice(hostName: string, userName: string, deviceName: string, callback: ServiceCallback<models.WipeDeviceOperationResult>): void; wipeMAMUserDevice(hostName: string, userName: string, deviceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WipeDeviceOperationResult>): void; /** * Returns operationResults. * * @param {string} hostName Location hostName for the tenant * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {number} [options.top] * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<OperationResultCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getOperationResultsWithHttpOperationResponse(hostName: string, options?: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.OperationResultCollection>>; /** * Returns operationResults. * * @param {string} hostName Location hostName for the tenant * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {number} [options.top] * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {OperationResultCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {OperationResultCollection} [result] - The deserialized result object if an error did not occur. * See {@link OperationResultCollection} for more * information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getOperationResults(hostName: string, options?: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<models.OperationResultCollection>; getOperationResults(hostName: string, callback: ServiceCallback<models.OperationResultCollection>): void; getOperationResults(hostName: string, options: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.OperationResultCollection>): void; /** * Returns Intune Tenant level statuses. * * @param {string} hostName Location hostName for the tenant * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<StatusesDefault>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getMAMStatusesWithHttpOperationResponse(hostName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.StatusesDefault>>; /** * Returns Intune Tenant level statuses. * * @param {string} hostName Location hostName for the tenant * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {StatusesDefault} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {StatusesDefault} [result] - The deserialized result object if an error did not occur. * See {@link StatusesDefault} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getMAMStatuses(hostName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.StatusesDefault>; getMAMStatuses(hostName: string, callback: ServiceCallback<models.StatusesDefault>): void; getMAMStatuses(hostName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.StatusesDefault>): void; /** * Returns Intune flagged user collection * * @param {string} hostName Location hostName for the tenant * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {number} [options.top] * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<FlaggedUserCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getMAMFlaggedUsersWithHttpOperationResponse(hostName: string, options?: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.FlaggedUserCollection>>; /** * Returns Intune flagged user collection * * @param {string} hostName Location hostName for the tenant * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {number} [options.top] * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {FlaggedUserCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {FlaggedUserCollection} [result] - The deserialized result object if an error did not occur. * See {@link FlaggedUserCollection} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getMAMFlaggedUsers(hostName: string, options?: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<models.FlaggedUserCollection>; getMAMFlaggedUsers(hostName: string, callback: ServiceCallback<models.FlaggedUserCollection>): void; getMAMFlaggedUsers(hostName: string, options: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.FlaggedUserCollection>): void; /** * Returns Intune flagged user details * * @param {string} hostName Location hostName for the tenant * * @param {string} userName Flagged userName * * @param {object} [options] Optional Parameters. * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<FlaggedUser>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getMAMFlaggedUserByNameWithHttpOperationResponse(hostName: string, userName: string, options?: { select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.FlaggedUser>>; /** * Returns Intune flagged user details * * @param {string} hostName Location hostName for the tenant * * @param {string} userName Flagged userName * * @param {object} [options] Optional Parameters. * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {FlaggedUser} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {FlaggedUser} [result] - The deserialized result object if an error did not occur. * See {@link FlaggedUser} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getMAMFlaggedUserByName(hostName: string, userName: string, options?: { select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<models.FlaggedUser>; getMAMFlaggedUserByName(hostName: string, userName: string, callback: ServiceCallback<models.FlaggedUser>): void; getMAMFlaggedUserByName(hostName: string, userName: string, options: { select? : string, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.FlaggedUser>): void; /** * Returns Intune flagged enrolled app collection for the User * * @param {string} hostName Location hostName for the tenant * * @param {string} userName User name for the tenant * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {number} [options.top] * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<FlaggedEnrolledAppCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getMAMUserFlaggedEnrolledAppsWithHttpOperationResponse(hostName: string, userName: string, options?: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.FlaggedEnrolledAppCollection>>; /** * Returns Intune flagged enrolled app collection for the User * * @param {string} hostName Location hostName for the tenant * * @param {string} userName User name for the tenant * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {number} [options.top] * * @param {string} [options.select] select specific fields in entity. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {FlaggedEnrolledAppCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {FlaggedEnrolledAppCollection} [result] - The deserialized result object if an error did not occur. * See {@link FlaggedEnrolledAppCollection} for more * information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getMAMUserFlaggedEnrolledApps(hostName: string, userName: string, options?: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }): Promise<models.FlaggedEnrolledAppCollection>; getMAMUserFlaggedEnrolledApps(hostName: string, userName: string, callback: ServiceCallback<models.FlaggedEnrolledAppCollection>): void; getMAMUserFlaggedEnrolledApps(hostName: string, userName: string, options: { filter? : string, top? : number, select? : string, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.FlaggedEnrolledAppCollection>): void; /** * Returns location for user tenant. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<LocationCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getLocationsNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.LocationCollection>>; /** * Returns location for user tenant. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {LocationCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {LocationCollection} [result] - The deserialized result object if an error did not occur. * See {@link LocationCollection} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getLocationsNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.LocationCollection>; getLocationsNext(nextPageLink: string, callback: ServiceCallback<models.LocationCollection>): void; getLocationsNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.LocationCollection>): void; /** * Returns Intune Manageable apps. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<ApplicationCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getAppsNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.ApplicationCollection>>; /** * Returns Intune Manageable apps. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {ApplicationCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {ApplicationCollection} [result] - The deserialized result object if an error did not occur. * See {@link ApplicationCollection} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getAppsNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.ApplicationCollection>; getAppsNext(nextPageLink: string, callback: ServiceCallback<models.ApplicationCollection>): void; getAppsNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.ApplicationCollection>): void; /** * Get devices for a user. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<DeviceCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getMAMUserDevicesNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.DeviceCollection>>; /** * Get devices for a user. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {DeviceCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {DeviceCollection} [result] - The deserialized result object if an error did not occur. * See {@link DeviceCollection} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getMAMUserDevicesNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.DeviceCollection>; getMAMUserDevicesNext(nextPageLink: string, callback: ServiceCallback<models.DeviceCollection>): void; getMAMUserDevicesNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.DeviceCollection>): void; /** * Returns operationResults. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<OperationResultCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getOperationResultsNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.OperationResultCollection>>; /** * Returns operationResults. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {OperationResultCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {OperationResultCollection} [result] - The deserialized result object if an error did not occur. * See {@link OperationResultCollection} for more * information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getOperationResultsNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.OperationResultCollection>; getOperationResultsNext(nextPageLink: string, callback: ServiceCallback<models.OperationResultCollection>): void; getOperationResultsNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.OperationResultCollection>): void; /** * Returns Intune Tenant level statuses. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<StatusesDefault>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getMAMStatusesNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.StatusesDefault>>; /** * Returns Intune Tenant level statuses. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {StatusesDefault} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {StatusesDefault} [result] - The deserialized result object if an error did not occur. * See {@link StatusesDefault} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getMAMStatusesNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.StatusesDefault>; getMAMStatusesNext(nextPageLink: string, callback: ServiceCallback<models.StatusesDefault>): void; getMAMStatusesNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.StatusesDefault>): void; /** * Returns Intune flagged user collection * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<FlaggedUserCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getMAMFlaggedUsersNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.FlaggedUserCollection>>; /** * Returns Intune flagged user collection * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {FlaggedUserCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {FlaggedUserCollection} [result] - The deserialized result object if an error did not occur. * See {@link FlaggedUserCollection} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getMAMFlaggedUsersNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.FlaggedUserCollection>; getMAMFlaggedUsersNext(nextPageLink: string, callback: ServiceCallback<models.FlaggedUserCollection>): void; getMAMFlaggedUsersNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.FlaggedUserCollection>): void; /** * Returns Intune flagged enrolled app collection for the User * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<FlaggedEnrolledAppCollection>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ getMAMUserFlaggedEnrolledAppsNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.FlaggedEnrolledAppCollection>>; /** * Returns Intune flagged enrolled app collection for the User * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {FlaggedEnrolledAppCollection} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {FlaggedEnrolledAppCollection} [result] - The deserialized result object if an error did not occur. * See {@link FlaggedEnrolledAppCollection} for more * information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ getMAMUserFlaggedEnrolledAppsNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.FlaggedEnrolledAppCollection>; getMAMUserFlaggedEnrolledAppsNext(nextPageLink: string, callback: ServiceCallback<models.FlaggedEnrolledAppCollection>): void; getMAMUserFlaggedEnrolledAppsNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.FlaggedEnrolledAppCollection>): void; } export = IntuneResourceManagementClient;
the_stack
interface Object { /** * Points to the constructor function that created this object. * Note that this property is treated as an XML element in the XML class. */ constructor: Function; /** * Retrieves and returns the Reflection object associated with this method or a property. * Note that this property is treated as an XML element in the XML class. */ reflect: Reflection; /** * Creates and returns a string representation of this object. * This function serializes the object, so that it can, for example, be passed between engines. Pass the returned string back to eval() to recreate the object. Works only with built-in classes. */ toSource(): string; /** * Many objects (such as Date) override this method in favor of their own implementation. If an object has no string value and no user-defined toString() method, the default method returns [object type], where "type" is the object type or the name of the constructor function that created the object. */ toString(): string; /** * Removes the watch function of a property. * @param name The name of the property to unwatch. */ unwatch(name: string): void; /** * If the object has no primitive value, returns the object itself. Note that you rarely need to call this method yourself. The JavaScript interpreter automatically invokes it when encountering an object where a primitive value is expected. */ valueOf(): Object; /** * Adds a watch function to a property, which is called when the value changes. * This function can accept, modify, or reject a new value that the user, application, or a script has attempted to place in a property. * @param name The name of the property to watch. * @param func The function to be called when the value of this property changes. * This function must three arguments, and return as its result the value to be stored in the property. The arguments are: * name: the name of the property that changes. * oldValue: the old property value. * newValue: the new property value that was specified. */ watch(name: string, func: Function): void; } interface ObjectConstructor { /** * Note that this property is treated as an XML element in the XML class. */ readonly prototype: Object; /** * Reports whether an object is still valid. * @param what The object to check. */ isValid(what: Object): boolean; } /** * The $ object provides a number of debugging facilities and informational methods. */ declare const $: Helper; interface Helper extends Object { /** * The ExtendScript build information. */ readonly build: string; /** * The ExtendScript build date. */ readonly buildDate: Date; /** * The character used as the decimal point character in formatted numeric output. */ decimalPoint: string; /** * The name of the current ExtendScript engine, if set. */ readonly engineName: string; /** * The most recent run-time error information. * Assigning error text to this property generates a run-time error; however, the preferred way to generate a run-time error is to throw an {Error} object. */ error: Error; /** * The file name of the current script. */ readonly fileName: string; /** * Gets or sets low-level debug output flags. * A logical AND of bit flag values: * - 0x0002 (2): Displays each line with its line number as it is executed. * - 0x0040 (64): Enables excessive garbage collection. Usually, garbage collection starts when the number of objects has increased by a certain amount since the last garbage collection. This flag causes ExtendScript to garbage collect after almost every statement. This impairs performance severely, but is useful when you suspect that an object gets released too soon. * - 0x0080 (128): Displays all calls with their arguments and the return value. * - 0x0100 (256): Enables extended error handling (@see strict). * - 0x0200 (512): Enables the localization feature of the toString method. Equivalent to the localize property. */ flags: number; /** * A reference to the global object, which contains the JavaScript global namespace. */ readonly global: Object; /** * A high-resolution timer, measuring the time in microseconds. The timer starts when ExtendScript is initialized during the application startup sequence. Every read access resets the timer to Zero. */ readonly hiresTimer: number; /** * The path for include files for the current script. */ readonly includePath: string; /** * The current debugging level, which enables or disables the JavaScript debugger. * One of: * - 0 (no debugging), * - 1 (break on runtime errors), * - or 2 (full debug mode). */ level: number; /** * The current line number of the currently executing script. */ readonly line: number; /** * Gets or sets the current locale. * The string contains five characters in the form LL_RR, where LL is an ISO 639 language specifier, and RR is an ISO 3166 region specifier. * Initially, this is the value that the application or the platform returns for the current user. You can set it to temporarily change the locale for testing. To return to the application or platform setting, set to undefined, null, or the empty string. */ locale: string; /** * Set to true to enable the extended localization features of the built-in toString() method. */ localize: boolean; /** * The ExtendScript memory cache size, in bytes. */ memCache: number; /** * The current operating system version information. * @example * // Result: Windows XP 5.1 Service Pack 2 * $.os */ readonly os: string; /** * An {ScreenObject} array containing information about the display screens attached to your computer. */ readonly screens: Array<ScreenObject>; /** * The current stack trace. */ readonly stack: string; /** * Sets or clears strict mode for object modification. * When true, any attempt to write to a read-only property causes a runtime error. Some objects do not permit the creation of new properties when true. */ strict: boolean; /** * The version number of the ExtendScript engine. * Formatted as a three-part number and description; for example: "3.92.95 (debug)". */ readonly version: string; /** * Shows an About box for the ExtendScript component, and returns * the text for the box. */ about(): string; /** * Breaks execution at the current position. * @param condition A string containing a JavaScript statement to be used as a condition. If the statement evaluates to true or nonzero when this point is reached, execution stops. */ bp(condition?: any): void; /** * Invokes the platform-specific color selection dialog, and returns the selected color. * @param color The color to be preselected in the dialog, as 0xRRGGBB, or -1 for the platform default. */ colorPicker(color: number): number; /** * Loads and evaluates a file. * @param file The file to load. * @param timeout An optional timeout in milliseconds. */ evalFile(file: File, timeout?: number): any; /** * Initiates garbage collection in the ExtendScript engine. */ gc(): void; /** * Retrieves the value of an environment variable. * @param name The name of the variable. */ getEnv(name: string): string; /** * Sets the value of an environment variable. * @param name The name of the variable. * @param value The value of the variable. */ setEnv(name: string, value: string): void; /** * Suspends the calling thread for a number of milliseconds. * During a sleep period, checks at 100 millisecond intervals to see whether the sleep should be terminated. This can happen if there is a break request, or if the script timeout has expired. * @param msecs Number of milliseconds to sleep. */ sleep(msecs: number): void; /** * Converts this object to a string. */ toString(): string; /** * Prints text to the Console. * @param text The text to print. All arguments are concatenated. */ write(text: any): void; /** * Prints text to the Console, and adds a newline character. * @param text - The text to print. All arguments are concatenated. */ writeln(text: any): void; } /** * Provides information about a class. */ declare class Reflection extends Object { /** * The long description text. */ readonly description: string; /** * The short description text. */ readonly help: string; /** * An array of method descriptions. */ readonly methods: ReflectionInfo[]; /** * The class name. */ readonly name: string; /** * An array of property descriptions. */ readonly properties: ReflectionInfo[]; /** * Sample code, if present. */ readonly sampleCode: string; /** * A file containing sample code. May be null. */ readonly sampleFile: File; /** * An array of class method descriptions. */ readonly staticMethods: ReflectionInfo[]; /** * An array of class property descriptions. */ readonly staticProperties: ReflectionInfo[]; /** * Finds an element description by name. * @param name The name of the element to find. */ find(name: string): ReflectionInfo; /** * Returns this class information as XML in OMV format. */ toXML(): XML; } // Consider this for the ReflectionInfo.type parameter's type annotation: // type ReflectionInfoTypeOption = "unknown" | "readonly" | "readwrite" | "createonly" | "method" | "parameter"; /** * Provides information about a method, a property or a method parameters. */ declare class ReflectionInfo extends Object { /** * The description of method or function arguments. */ readonly arguments: ReflectionInfo[]; /** * The data type. */ readonly dataType: string; /** * The default value. */ readonly defaultValue: any; /** * The long description text. */ readonly description: string; /** * The short description text. */ readonly help: string; /** * Contains true if the class describes a collection class. */ readonly isCollection: boolean; /** * The maximum value. */ readonly max: number; /** * The minimum value. */ readonly min: number; /** * The element name. */ readonly name: string; /** * The class object that this element belongs to. */ readonly parent: Reflection; /** * Sample code, if present. */ readonly sampleCode: string; /** * A file containing sample code. May be null. */ readonly sampleFile: File; /** * The element type. * One of unknown, readonly, readwrite, createonly, method or parameter. */ readonly type: string; } declare class ScreenObject extends Object { /** * Pixel position of the left side of the screen in global coordinates. */ readonly left: number; /** * Pixel position of the top side of the screen in global coordinates. */ readonly top: number; /** * Pixel position of the right side of the screen in global coordinates. */ readonly right: number; /** * Pixel position of the bottom side of the screen in global coordinates. */ readonly bottom: number; /** * True if the screen describes the primary display. */ readonly primary: boolean; } /** * Represents a file in the local file system in a platform-independent manner. */ declare class File extends Object { // TODO: Fill this in. } /** * Wraps XML into an object. */ declare class XML extends Object { // TODO: Fill this in. }
the_stack
/// <reference types="node" /> import { IncomingMessage, ServerResponse } from "http"; /** Utility type */ export type BaseOrArray<T> = T | readonly T[]; /** * Route handler callback. * * In synchronous mode, the handler is called with all matched tokens as * arguments. If a handler returns `false`, the router will skip all remaining * handlers. * * In asynchronous mode, the last parameter is always a continuation function * which accepts a single argument. If the continuation is called with a truthy * value or `false`, the router will skip all remaining handlers. */ export type Handler<ThisType> = (this: ThisType, ...args: any[]) => any; export type RouteEntry<ThisType> = BaseOrArray<Handler<ThisType>>; export interface RoutingTable<ThisType> { [route: string]: RouteEntry<ThisType> | RoutingTable<ThisType>; } /** * Router options object */ export interface RoutingOptions<ThisType> { /** * Controls route recursion. * Default is `false` client-side, and `"backward"` server-side. */ recurse?: "forward" | "backward" | false | undefined; /** * If set to `false`, then trailing slashes (or other delimiters) are * allowed in routes. Default is `true`. */ strict?: boolean | undefined; /** * Controls async routing. Default is `false`. */ async?: boolean | undefined; /** * Character separator between route fragments. Default is `/`. */ delimiter?: string | undefined; /** * Function to call if no route is found on a call to `router.dispatch()`. */ notfound?: Handler<{ method: string; path: string }> | undefined; /** * A function (or list of functions) to call on every call to * `router.dispatch()` when a route is found. */ on?: RouteEntry<ThisType> | undefined; /** * A function (or list of functions) to call before every call to * `router.dispatch()` when a route is found. */ before?: RouteEntry<ThisType> | undefined; // Client-only options /** * (_Client Only_) * An object to which string-based routes will be bound. This can be * especially useful for late-binding to route functions (such as async * client-side requires). */ resource?: { [handlerName: string]: Handler<ThisType>; } | undefined; /** * (_Client Only_) * A function (or list of functions) to call when a given route is no longer * the active route. */ after?: RouteEntry<ThisType> | undefined; /** * (_Client Only_) * If set to `true` and client supports `pushState()`, then uses HTML5 * History API instead of hash fragments. */ html5history?: boolean | undefined; /** * (_Client Only_) * If `html5history` is enabled, the route handler by default is executed * upon `Router.init()` since with real URIs the router can not know if it * should call a route handler or not. Setting this to `false` disables the * route handler initial execution. */ run_handler_in_init?: boolean | undefined; /** * (_Client Only_) * If `html5history` is enabled, the `window.location` hash by default is * converted to a route upon `Router.init()` since with canonical URIs the * router can not know if it should convert the hash to a route or not. * Setting this to `false` disables the hash conversion on router * initialisation. */ convert_hash_in_init?: boolean | undefined; } // `director.Router` and `director.http.Router` have several methods with // incompatible signatures. To reuse method definitions as much as possible, // methods with compatible signatures have been moved into a separate abstract // class named `AbstractRouterBase`. /** * Abstract class that provides methods shared by all Router subclasses. */ export abstract class AbstractRouterBase<ThisType> { constructor(routes?: RoutingTable<ThisType>); /** * Configures this instance with the specified `options`. * @param options Options to configure this instance with */ configure(options?: RoutingOptions<ThisType>): this; /** * Mounts the sanitized `routes` onto the root context for this instance. * @param routes Routes to mount onto this instance * @param path Path within the Routing Table to insert the routes into */ mount(routes: RoutingTable<ThisType>, path?: BaseOrArray<string>): void; /** * Sets up a `params` function which replaces any instance of `token`, * inside of a given `str` with `matcher`. This is very useful if you have a * common regular expression throughout your code base which you wish to be * more DRY. * @param token Token which to replace (e.g. `:dog`, 'cat') * @param matcher Target to replace the token with */ param(token: string, matcher: string | RegExp | ((substring: string, ...args: any[]) => string)): this; /** * Evalutes the `routesFn` in the given `path` scope. * @param path Nested scope in which to path * @param routesFn Function to evaluate in the new scope */ path(path: string | RegExp, routesFn: (this: this, self: this) => void): void; /** * Adds a new `route` to this instance for the given `path`, using `"on"` as * the method. * * This is an alias for `.on(path, route)`. * @param path Path to set this route on * @param route Handler for the specified path */ route(path: BaseOrArray<string | RegExp>, route: RouteEntry<ThisType>): void; /** * Adds a new `route` to this instance for the given `method` and `path`. * * This is an alias for `.on(method, path, route)`. * @param method Method to use * @param path Path to set this route on * @param route Handler for the specified method and path */ route(method: BaseOrArray<string>, path: BaseOrArray<string | RegExp>, route: RouteEntry<ThisType>): void; } /** * Platform-independent Router class */ export class Router extends AbstractRouterBase<Router> { /** * Finds a set of functions on the traversal towards `method` and `path` in * the core routing table, then invokes them based on settings in this * instance. * @param method Method to dispatch * @param path Path to dispatch * @param callback (Optional) Continuation to respond to for async scenarios * @return Whether a route was matched for the given `method` and `path` */ dispatch(method: string, path: string, callback?: (err?: any) => void): boolean; /** * Adds a new `route` to this instance for the given `path`, using `"on"` as * the method. * @param path Path to set this route on * @param route Handler for the specified path */ on(path: BaseOrArray<string | RegExp>, route: RouteEntry<Router>): void; /** * Adds a new `route` to this instance for the given `method` and `path`. * @param method Method to use * @param path Path to set this route on * @param route Handler for the specified method and path */ on(method: BaseOrArray<string>, path: BaseOrArray<string | RegExp>, route: RouteEntry<Router>): void; } export namespace http { /** * Type of the `this` object for HTTP route handlers. */ interface HttpRouterContext { req: IncomingMessage; res: ServerResponse; } interface HttpRoutingOptions extends RoutingOptions<HttpRouterContext> { /** * If set to `true`, the router will perform routing immediately instead * of waiting for the `end` event, buffering and parsing the entire * request body. * This can be used when you want to manually buffer the request. */ stream?: boolean | undefined; } interface HttpRouteHandlerOptions { /** * Patterns to test against the `content-type` of the incoming request. */ accept?: BaseOrArray<string | RegExp> | undefined; /** * If set to `true`, the router will not buffer the request for this * route. * This can be used when you want to manually buffer the request. */ stream?: boolean | undefined; } /** * Server-side HTTP Router class for Node.js */ class Router extends AbstractRouterBase<HttpRouterContext> implements HttpRouterGeneratedMethodsCheck { /** * Ask the router to attach objects or manipulate `this` object on which * the function passed to the http router will get applied. * @param func Function to execute on `this` before applying to router * function */ // Note: This type definition does not yet support attaching arbitrary // properties to the Router object. Please use type assertions to work // around this limitation. attach(func: (this: this) => void): void; configure(options?: HttpRoutingOptions): this; /** * Finds a set of functions on the traversal towards `method` and `path` * in the core routing table then invokes them based on settings in this * instance. * * Note: `HEAD` requests are dispatched to `get` routes. * @param req Incoming request to dispatch * @param res Outgoing response to dispatch * @param callback (Optional) Continuation to respond to for async * scenarios * @return Whether a route was matched for the given request */ dispatch( req: IncomingMessage, res: ServerResponse, callback?: (err?: any, req?: IncomingMessage, res?: ServerResponse) => void, ): boolean; /** * Adds a new `route` to this instance for the given `method` and `path`. * @param method Method to use * @param path Path to set this route on * @param route Handler for the specified method and path */ on(method: BaseOrArray<string>, path: string | RegExp, route: RouteEntry<HttpRouterContext>): void; /** * Adds a new `route` to this instance for the given `method` and `path`. * @param method Method to use * @param path Path to set this route on * @param options Additional options for this route * @param route Handler for the specified method and path */ on( method: BaseOrArray<string>, path: string | RegExp, options: HttpRouteHandlerOptions | undefined | null, route: RouteEntry<HttpRouterContext>, ): void; // Generated helper methods // These are dynamically added via `Router.extend()` readonly options: typeof _GeneratedHelperMethod; readonly get: typeof _GeneratedHelperMethod; readonly post: typeof _GeneratedHelperMethod; readonly put: typeof _GeneratedHelperMethod; readonly delete: typeof _GeneratedHelperMethod; readonly trace: typeof _GeneratedHelperMethod; readonly connect: typeof _GeneratedHelperMethod; readonly propfind: typeof _GeneratedHelperMethod; readonly proppatch: typeof _GeneratedHelperMethod; readonly mkcol: typeof _GeneratedHelperMethod; readonly copy: typeof _GeneratedHelperMethod; readonly move: typeof _GeneratedHelperMethod; readonly lock: typeof _GeneratedHelperMethod; readonly unlock: typeof _GeneratedHelperMethod; readonly ["version-control"]: typeof _GeneratedHelperMethod; readonly report: typeof _GeneratedHelperMethod; readonly checkout: typeof _GeneratedHelperMethod; readonly checkin: typeof _GeneratedHelperMethod; readonly uncheckout: typeof _GeneratedHelperMethod; readonly mkworkspace: typeof _GeneratedHelperMethod; readonly update: typeof _GeneratedHelperMethod; readonly label: typeof _GeneratedHelperMethod; readonly merge: typeof _GeneratedHelperMethod; readonly ["baseline-control"]: typeof _GeneratedHelperMethod; readonly mkactivity: typeof _GeneratedHelperMethod; readonly orderpatch: typeof _GeneratedHelperMethod; readonly acl: typeof _GeneratedHelperMethod; readonly search: typeof _GeneratedHelperMethod; readonly patch: typeof _GeneratedHelperMethod; readonly before: typeof _GeneratedHelperMethod; readonly after: typeof _GeneratedHelperMethod; } /** * Adds a new `route` to this instance for a specific method and an empty * `path`. * @param route Handler for the method and path */ function _GeneratedHelperMethod(this: Router, route: RouteEntry<HttpRouterContext>): void; /** * Adds a new `route` to this instance for a specific method and `path`. * @param path Path to set this route on * @param route Handler for the method and path */ function _GeneratedHelperMethod(this: Router, path: string | RegExp, route: RouteEntry<HttpRouterContext>): void; /** * Adds a new `route` to this instance for a specific method and `path`. * @param path Path to set this route on * @param options Additional options for this route * @param route Handler for the method and path */ function _GeneratedHelperMethod( this: Router, path: string | RegExp, options: HttpRouteHandlerOptions | undefined | null, route: RouteEntry<HttpRouterContext>, ): void; /** * Names of helper methods dynamically exposed by the HTTP Router. */ type HttpRouterGeneratedMethodNames = typeof methods[number] | "before" | "after"; /** * Helper interface that checks if any dynamically generated helper methods * were accidentally omitted from the definition of `director.http.Router`. */ type HttpRouterGeneratedMethodsCheck = { [key in HttpRouterGeneratedMethodNames]: typeof _GeneratedHelperMethod; }; // HTTP methods // Defined in /lib/director/http/methods.js const methods: readonly [ // Hypertext Transfer Protocol -- HTTP/1.1 // http://www.ietf.org/rfc/rfc2616.txt "options", "get", "post", "put", "delete", "trace", "connect", // HTTP Extensions for Distributed Authoring -- WEBDAV // http://www.ietf.org/rfc/rfc2518.txt "propfind", "proppatch", "mkcol", "copy", "move", "lock", "unlock", // Versioning Extensions to WebDAV // http://www.ietf.org/rfc/rfc3253.txt "version-control", "report", "checkout", "checkin", "uncheckout", "mkworkspace", "update", "label", "merge", "baseline-control", "mkactivity", // Ordered Collections Protocol (WebDAV) // http://www.ietf.org/rfc/rfc3648.txt "orderpatch", // Web Distributed Authoring and Versioning (WebDAV) Access Control Protocol // http://www.ietf.org/rfc/rfc3744.txt "acl", // Web Distributed Authoring and Versioning (WebDAV) SEARCH // http://www.ietf.org/rfc/rfc5323.txt "search", // PATCH Method for HTTP // http://www.ietf.org/rfc/rfc5789.txt "patch", ]; // HTTP Error classes // Defined in /lib/director/http/responses.js class NotModified extends Error { status: 304; options: { removeContentHeaders: true; }; } /** Exception class for erroneous requests */ class BadRequest extends Error { status: 400; headers: {}; body: { /** Error message (same as `this.message`) */ error: string; }; } class NotAuthorized extends Error { status: 401; headers: {}; body: { /** Error message (same as `this.message`) */ error: string; }; } class Forbidden extends Error { status: 403; headers: {}; body: { /** Error message (same as `this.message`) */ error: string; }; } class NotFound extends Error { status: 404; headers: {}; body: { /** Error message (same as `this.message`) */ error: string; }; } class MethodNotAllowed extends Error { status: 405; headers: { /** Allowed HTTP methods */ allow: string; }; body: { /** Error message (same as `this.message`) */ error: string; }; message: "method not allowed."; /** * @param allowed Allowed HTTP methods */ constructor(allowed: string); } class NotAcceptable extends Error { status: 406; headers: {}; body: { /** Error message (same as `this.message`) */ error: string; only: "application/json"; }; } class NotImplemented extends Error { status: 501; headers: {}; body: { /** Error message (same as `this.message`) */ error: string; }; } } export namespace cli { /** * Type of the `this` object for CLI route handlers. */ interface CliRouterContext<TTY> { tty: TTY; cmd: string; } /** * Server-side CLI Router class for Node.js */ class Router<TTY = any> extends AbstractRouterBase<CliRouterContext<TTY>> { /** * Finds a set of functions on the traversal towards `method` and `path` in * the core routing table, then invokes them based on settings in this * instance. * @param method Method to dispatch * @param path Path to dispatch * @param tty Path to dispatch * @param callback (Optional) Continuation to respond to for async scenarios * @return Whether a route was matched for the given `method` and `path` */ dispatch(method: string, path: string, tty?: TTY, callback?: (err?: any) => void): boolean; /** * Adds a new `route` to this instance for the given `path`, using `"on"` * as the method. * @param path Path to set this route on * @param route Handler for the specified path */ on(path: BaseOrArray<string | RegExp>, route: RouteEntry<CliRouterContext<TTY>>): void; /** * Adds a new `route` to this instance for the specified `method` and `path`. * @param method Method to use * @param path Path to set this route on * @param route Handler for the specified method and path */ on( method: BaseOrArray<string>, path: BaseOrArray<string | RegExp>, route: RouteEntry<CliRouterContext<TTY>>, ): void; } }
the_stack
import { BehaviorSubject, firstValueFrom, Observable, Subject, Subscription } from 'rxjs'; import { filter } from 'rxjs/operators'; import type { DeepReadonlyObject, ReplicationOptions, ReplicationPullHandlerResult, ReplicationPullOptions, ReplicationPushOptions, RxCollection, RxDocumentData, RxReplicationState, WithDeleted } from '../../types'; import { getChangesSinceLastPushSequence, getLastPullDocument, setLastPullDocument, setLastPushSequence } from './replication-checkpoint'; import { flatClone, getHeightOfRevision, lastOfArray, promiseWait } from '../../util'; import { overwritable } from '../../overwritable'; import { createRevisionForPulledDocument, wasRevisionfromPullReplication } from './revision-flag'; import { _handleToStorageInstance } from '../../rx-collection-helper'; import { newRxError } from '../../rx-error'; import { getDocumentDataOfRxChangeEvent } from '../../rx-change-event'; export class RxReplicationStateBase<RxDocType> { public readonly subs: Subscription[] = []; public initialReplicationComplete$: Observable<any> = undefined as any; private subjects = { received: new Subject(), // all documents that are received from the endpoint send: new Subject(), // all documents that are send to the endpoint error: new Subject(), // all errors that are received from the endpoint, emits new Error() objects canceled: new BehaviorSubject(false), // true when the replication was canceled active: new BehaviorSubject(false), // true when something is running, false when not initialReplicationComplete: new BehaviorSubject(false) // true the initial replication-cycle is over }; private runningPromise: Promise<void> = Promise.resolve(); private runQueueCount: number = 0; /** * Counts how many times the run() method * has been called. Used in tests. */ public runCount: number = 0; constructor( public readonly replicationIdentifier: string, public readonly collection: RxCollection<RxDocType>, public readonly pull?: ReplicationPullOptions<RxDocType>, public readonly push?: ReplicationPushOptions<RxDocType>, public readonly live?: boolean, public liveInterval?: number, public retryTime?: number, ) { // stop the replication when the collection gets destroyed this.collection.onDestroy.then(() => { this.cancel(); }); // create getters for the observables Object.keys(this.subjects).forEach(key => { Object.defineProperty(this, key + '$', { get: function () { return this.subjects[key].asObservable(); } }); }); } isStopped(): boolean { if (this.collection.destroyed) { return true; } if (!this.live && this.subjects.initialReplicationComplete.getValue()) { return true; } if (this.subjects.canceled['_value']) { return true; } return false; } awaitInitialReplication(): Promise<true> { return firstValueFrom( this.initialReplicationComplete$.pipe( filter(v => v === true), ) ); } cancel(): Promise<any> { if (this.isStopped()) { return Promise.resolve(false); } this.subs.forEach(sub => sub.unsubscribe()); this.subjects.canceled.next(true); return Promise.resolve(true); } /** * Ensures that this._run() does not run in parallel */ async run(retryOnFail = true): Promise<void> { if (this.isStopped()) { return; } if (this.runQueueCount > 2) { return this.runningPromise; } this.runQueueCount++; this.runningPromise = this.runningPromise.then(async () => { this.subjects.active.next(true); const willRetry = await this._run(retryOnFail); this.subjects.active.next(false); if ( retryOnFail && !willRetry && this.subjects.initialReplicationComplete.getValue() === false ) { this.subjects.initialReplicationComplete.next(true); } this.runQueueCount--; }); return this.runningPromise; } /** * Runs the whole cycle once, * first pushes the local changes to the remote, * then pulls the remote changes to the local. * Returns true if a retry must be done */ async _run(retryOnFail = true): Promise<boolean> { this.runCount++; if (this.push) { const ok = await this.runPush(); if (!ok && retryOnFail) { setTimeout(() => this.run(), this.retryTime); /* Because we assume that conflicts are solved on the server side, if push failed, do not attempt to pull before push was successful otherwise we do not know how to merge changes with the local state */ return true; } } if (this.pull) { const ok = await this.runPull(); if (!ok && retryOnFail) { setTimeout(() => this.run(), this.retryTime); return true; } } return false; } /** * Pull all changes from the server, * start from the last pulled change. * @return true if successfully, false if something errored */ async runPull(): Promise<boolean> { if (!this.pull) { throw newRxError('SNH'); } if (this.isStopped()) { return Promise.resolve(false); } const latestDocument = await getLastPullDocument(this.collection, this.replicationIdentifier); let result: ReplicationPullHandlerResult<RxDocType>; try { result = await this.pull.handler(latestDocument); } catch (err) { this.subjects.error.next(err); return false; } const pulledDocuments = result.documents; // optimization shortcut, do not proceed if there are no documents. if (pulledDocuments.length === 0) { return true; } /** * Run schema validation in dev-mode */ if (overwritable.isDevMode()) { try { pulledDocuments.forEach((doc: any) => { const withoutDeleteFlag = flatClone(doc); delete withoutDeleteFlag._deleted; this.collection.schema.validate(withoutDeleteFlag); }); } catch (err) { this.subjects.error.next(err); return false; } } if (this.isStopped()) { return true; } await this.handleDocumentsFromRemote(pulledDocuments); pulledDocuments.map((doc: any) => this.subjects.received.next(doc)); if (pulledDocuments.length === 0) { if (this.live) { // console.log('no more docs, wait for ping'); } else { // console.log('RxGraphQLReplicationState._run(): no more docs and not live; complete = true'); } } else { const newLatestDocument = lastOfArray(pulledDocuments); await setLastPullDocument( this.collection, this.replicationIdentifier, newLatestDocument ); /** * We have more documents on the remote, * So re-run the pulling. */ if (result.hasMoreDocuments) { await this.runPull(); } } return true; } async handleDocumentsFromRemote( docs: (WithDeleted<RxDocType> | DeepReadonlyObject<WithDeleted<RxDocType>>)[] ): Promise<boolean> { const toStorageDocs: RxDocumentData<RxDocType>[] = []; const docIds: string[] = docs.map(doc => doc[this.collection.schema.primaryPath]) as any; const docsFromLocal = await this.collection.storageInstance.findDocumentsById(docIds, true); for (const originalDoc of docs) { const doc: any = flatClone(originalDoc); const documentId: string = doc[this.collection.schema.primaryPath]; const docStateInLocalStorageInstance = docsFromLocal.get(documentId); let newRevision = createRevisionForPulledDocument( this.replicationIdentifier, doc ); if (docStateInLocalStorageInstance) { const hasHeight = getHeightOfRevision(docStateInLocalStorageInstance._rev); const newRevisionHeight = hasHeight + 1; newRevision = newRevisionHeight + '-' + newRevision; } else { newRevision = '1-' + newRevision; } doc._rev = newRevision; toStorageDocs.push(doc); } if (toStorageDocs.length > 0) { await this.collection.database.lockedRun( async () => { await this.collection.storageInstance.bulkAddRevisions( toStorageDocs.map(doc => _handleToStorageInstance(this.collection, doc)) ); } ); } return true; } /** * Pushes unreplicated local changes to the remote. * @return true if successfull, false if not */ async runPush(): Promise<boolean> { if (!this.push) { throw newRxError('SNH'); } const batchSize = this.push.batchSize ? this.push.batchSize : 5; const changesResult = await getChangesSinceLastPushSequence<RxDocType>( this.collection, this.replicationIdentifier, batchSize, ); const pushDocs: WithDeleted<RxDocType>[] = Array .from(changesResult.changedDocs.values()) .map(row => { const doc: WithDeleted<RxDocType> = flatClone(row.doc) as any; // TODO _deleted should be required on type RxDocumentData // so we do not need this check here if (!doc.hasOwnProperty('_deleted')) { doc._deleted = false; } delete (doc as any)._rev; delete (doc as any)._attachments; return doc; }); try { await this.push.handler(pushDocs); } catch (err) { this.subjects.error.next(err); return false; } pushDocs.forEach(pushDoc => this.subjects.send.next(pushDoc)); await setLastPushSequence( this.collection, this.replicationIdentifier, changesResult.lastSequence ); // batch had documents so there might be more changes to replicate if (changesResult.changedDocs.size !== 0) { await this.runPush(); } return true; } } export async function replicateRxCollection<RxDocType>( { replicationIdentifier, collection, pull, push, live = false, liveInterval = 1000 * 10, retryTime = 1000 * 5, waitForLeadership }: ReplicationOptions<RxDocType> ): Promise<RxReplicationState<RxDocType>> { if ( waitForLeadership && // do not await leadership if not multiInstance collection.database.multiInstance ) { await collection.database.waitForLeadership(); } const replicationState = new RxReplicationStateBase<RxDocType>( replicationIdentifier, collection, pull, push, live, liveInterval, retryTime, ); // trigger run once replicationState.run(); // start sync-interval if (replicationState.live) { if (pull) { (async () => { while (!replicationState.isStopped()) { await promiseWait(replicationState.liveInterval); if (replicationState.isStopped()) { return; } await replicationState.run( // do not retry on liveInterval-runs because they might stack up // when failing false ); } })(); } if (push) { /** * When a document is written to the collection, * we might have to run the replication run() once */ const changeEventsSub = collection.$.pipe( filter(cE => !cE.isLocal) ) .subscribe(changeEvent => { if (replicationState.isStopped()) { return; } const doc = getDocumentDataOfRxChangeEvent(changeEvent); const rev = doc._rev; if ( rev && !wasRevisionfromPullReplication( replicationIdentifier, rev ) ) { replicationState.run(); } }); replicationState.subs.push(changeEventsSub); } } return replicationState as any; } export * from './replication-checkpoint'; export * from './revision-flag';
the_stack
import * as debug_ from "debug"; import * as path from "path"; import * as fs from "fs"; import { isAudiobookFn, isDivinaFn, isPdfFn } from "readium-desktop/common/isManifestType"; import { inject, injectable } from "inversify"; import * as moment from "moment"; import { CoverView, PublicationView } from "readium-desktop/common/views/publication"; import { convertContributorArrayToStringArray, } from "readium-desktop/main/converter/tools/localisation"; import { PublicationDocument, PublicationDocumentWithoutTimestampable } from "readium-desktop/main/db/document/publication"; import { diSymbolTable } from "readium-desktop/main/diSymbolTable"; import { PublicationStorage } from "readium-desktop/main/storage/publication-storage"; import { tryCatchSync } from "readium-desktop/utils/tryCatch"; import { TaJsonDeserialize, TaJsonSerialize } from "@r2-lcp-js/serializable"; import { Publication as R2Publication } from "@r2-shared-js/models/publication"; import { PublicationParsePromise } from "@r2-shared-js/parser/publication-parser"; import { diMainGet } from "../di"; import { lcpLicenseIsNotWellFormed } from "readium-desktop/common/lcp"; import { LCP } from "@r2-lcp-js/parser/epub/lcp"; // import { type Store } from "redux"; // import { RootState } from "../redux/states"; const debug = debug_("readium-desktop:main#converter/publication"); // memory cache, to minimize filesystem access interface ICache { r2PublicationStr?: string; r2LCPStr?: string; } const _pubCache: Record<string, ICache> = {}; @injectable() export class PublicationViewConverter { @inject(diSymbolTable["publication-storage"]) private readonly publicationStorage!: PublicationStorage; // @inject(diSymbolTable.store) // private readonly store!: Store<RootState>; public removeFromMemoryCache(identifier: string) { if (_pubCache[identifier]) { delete _pubCache[identifier]; } } public updateLcpCache(publicationDocument: PublicationDocumentWithoutTimestampable, r2LCP: LCP) { const pubFolder = this.publicationStorage.buildPublicationPath( publicationDocument.identifier, ); debug("====> updateLcpCache: ", pubFolder); const lcpPath = path.join(pubFolder, "license.lcpl"); const r2LCPStr = r2LCP.JsonSource ? r2LCP.JsonSource : JSON.stringify(TaJsonSerialize(r2LCP)); if (_pubCache[publicationDocument.identifier]?.r2PublicationStr) { _pubCache[publicationDocument.identifier].r2LCPStr = r2LCPStr; } fs.writeFileSync(lcpPath, r2LCPStr, { encoding: "utf-8"}); } public updatePublicationCache(publicationDocument: PublicationDocumentWithoutTimestampable, r2Publication: R2Publication) { _pubCache[publicationDocument.identifier] = {}; const pubFolder = this.publicationStorage.buildPublicationPath( publicationDocument.identifier, ); debug("====> updatePublicationCache: ", pubFolder); const manifestPath = path.join(pubFolder, "manifest.json"); const r2PublicationStr = JSON.stringify(TaJsonSerialize(r2Publication), null, 2); _pubCache[publicationDocument.identifier].r2PublicationStr = r2PublicationStr; fs.writeFileSync(manifestPath, r2PublicationStr, { encoding: "utf-8"}); if (r2Publication.LCP) { this.updateLcpCache(publicationDocument, r2Publication.LCP); } } public async unmarshallR2Publication( publicationDocument: PublicationDocument, ): Promise<R2Publication> { const pubFolder = this.publicationStorage.buildPublicationPath( publicationDocument.identifier, ); debug("====> unmarshallR2Publication: ", pubFolder); if (_pubCache[publicationDocument.identifier]?.r2PublicationStr) { const r2PublicationStr = _pubCache[publicationDocument.identifier].r2PublicationStr; debug("====> manifest (memory cache)"); const r2PublicationJson = JSON.parse(r2PublicationStr); const r2Publication = TaJsonDeserialize(r2PublicationJson, R2Publication); const r2LCPStr = _pubCache[publicationDocument.identifier]?.r2LCPStr; if (r2LCPStr) { try { debug("====> LCP (memory cache)"); const r2LCPJson = JSON.parse(r2LCPStr); if (!lcpLicenseIsNotWellFormed(r2LCPJson)) { const r2LCP = TaJsonDeserialize(r2LCPJson, LCP); r2LCP.ZipPath = "dummy/license.lcpl"; r2LCP.JsonSource = r2LCPStr; r2LCP.init(); r2Publication.LCP = r2LCP; } else { debug("NOT WELL FORMED LCP?"); } } catch (_err) {} } return r2Publication; } try { const manifestPath = path.join(pubFolder, "manifest.json"); const r2PublicationStr = fs.readFileSync(manifestPath, { encoding: "utf-8"}); debug("====> manifest: ", manifestPath); const r2PublicationJson = JSON.parse(r2PublicationStr); const r2Publication = TaJsonDeserialize(r2PublicationJson, R2Publication); try { const lcpPath = path.join(pubFolder, "license.lcpl"); const r2LCPStr = fs.readFileSync(lcpPath, { encoding: "utf-8"}); debug("====> LCP: ", lcpPath); const r2LCPJson = JSON.parse(r2LCPStr); if (!lcpLicenseIsNotWellFormed(r2LCPJson)) { const r2LCP = TaJsonDeserialize(r2LCPJson, LCP); r2LCP.ZipPath = "dummy/license.lcpl"; r2LCP.JsonSource = r2LCPStr; r2LCP.init(); r2Publication.LCP = r2LCP; } else { debug("NOT WELL FORMED LCP?"); } } catch (_err) {} this.updatePublicationCache(publicationDocument, r2Publication); return r2Publication; } catch (err) { debug(err, " FALLBACK: parsing publication from filesystem ..."); const epubPath = this.publicationStorage.getPublicationEpubPath( publicationDocument.identifier, ); const r2Publication = await PublicationParsePromise(epubPath); // just like when calling lsdLcpUpdateInject(): // r2Publication.LCP.ZipPath is set to META-INF/license.lcpl // r2Publication.LCP.init(); is called to prepare for decryption (native NodeJS plugin) // r2Publication.LCP.JsonSource is set // after PublicationParsePromise, cleanup zip handler // (no need to fetch ZIP data beyond this point) r2Publication.freeDestroy(); this.updatePublicationCache(publicationDocument, r2Publication); return r2Publication; } } // Note: PublicationDocument and PublicationView are both Identifiable, with identical `identifier` public async convertDocumentToView(document: PublicationDocument): Promise<PublicationView> { // Legacy Base64 data blobs // const r2PublicationBase64 = document.resources.r2PublicationBase64; // const r2PublicationStr = Buffer.from(r2PublicationBase64, "base64").toString("utf-8"); // const r2PublicationJson = JSON.parse(r2PublicationStr); // const r2PublicationJson = document.resources.r2PublicationJson; // const r2Publication = TaJsonDeserialize(r2PublicationJson, R2Publication); const r2Publication = await this.unmarshallR2Publication(document); const r2PublicationJson = TaJsonSerialize(r2Publication); // note: does not include r2Publication.LCP const publishers = convertContributorArrayToStringArray( r2Publication.Metadata.Publisher, ); const authors = convertContributorArrayToStringArray( r2Publication.Metadata.Author, ); let publishedAt: string | undefined; if (r2Publication.Metadata.PublicationDate) { publishedAt = moment(r2Publication.Metadata.PublicationDate).toISOString(); } let modifiedAt: string | undefined; if (r2Publication.Metadata.Modified) { modifiedAt = moment(r2Publication.Metadata.Modified).toISOString(); } let cover: CoverView | undefined; if (document.coverFile) { cover = { thumbnailUrl : document.coverFile.url, coverUrl: document.coverFile.url, }; } // TODO become a side effect function : AIE !! // could be refactored when the publications documents will be in the state const store = diMainGet("store"); const state = store.getState(); const readerStateLocator = tryCatchSync(() => state.win.registry.reader[document.identifier]?.reduxState.locator, ""); const duration = typeof r2Publication.Metadata.Duration === "number" ? r2Publication.Metadata.Duration : undefined; const nbOfTracks = typeof r2Publication.Metadata.AdditionalJSON?.tracks === "number" ? r2Publication.Metadata.AdditionalJSON?.tracks : undefined; const isAudio = r2Publication.Metadata.RDFType?.toLowerCase().includes("audio") || isAudiobookFn(r2Publication.Metadata) || ( readerStateLocator?.audioPlaybackInfo && readerStateLocator?.audioPlaybackInfo.globalDuration && typeof readerStateLocator?.locator.locations.position === "number"); const isDivina = isDivinaFn(r2Publication); const isPDF = isPdfFn(r2Publication); // locatorExt.docInfo.isFixedLayout const isFXL = r2Publication.Metadata?.Rendition?.Layout === "fixed"; // "DAISY_audioNCX" "DAISY_textNCX" "DAISY_audioFullText" const isDaisy = !!r2Publication.Metadata?.AdditionalJSON?.ReadiumWebPublicationConvertedFrom; let lastReadTimeStamp = undefined; // Timestampable document.createdAt (new Date()).getTime() const lastReadingQueue = state.publication?.lastReadingQueue; // this.store?.getState()? if (lastReadingQueue) { for (const qItem of lastReadingQueue) { const timeStamp = qItem[0]; // (new Date()).getTime() const pubIdentifier = qItem[1]; if (pubIdentifier === document.identifier) { lastReadTimeStamp = timeStamp; break; } } } return { isAudio, isDivina, isPDF, isDaisy, isFXL, lastReadTimeStamp, a11y_accessMode: r2Publication.Metadata.AccessMode, // string[] a11y_accessibilityFeature: r2Publication.Metadata.AccessibilityFeature, // string[] a11y_accessibilityHazard: r2Publication.Metadata.AccessibilityHazard, // string[] a11y_certifiedBy: r2Publication.Metadata.CertifiedBy, // string[] a11y_certifierCredential: r2Publication.Metadata.CertifierCredential, // string[] a11y_certifierReport: r2Publication.Metadata.CertifierReport, // string[] a11y_conformsTo: r2Publication.Metadata.ConformsTo, // string[] a11y_accessModeSufficient: r2Publication.Metadata.AccessModeSufficient, // (string[])[] // convertMultiLangStringToString a11y_accessibilitySummary: r2Publication.Metadata.AccessibilitySummary, // string | IStringMap identifier: document.identifier, // preserve Identifiable identifier title: document.title || "-", // default title authors, description: r2Publication.Metadata.Description, languages: r2Publication.Metadata.Language, publishers, workIdentifier: r2Publication.Metadata.Identifier, publishedAt, modifiedAt, tags: document.tags, cover, customCover: document.customCover, lcp: document.lcp, lcpRightsCopies: document.lcpRightsCopies, RDFType: r2Publication.Metadata.RDFType, duration, nbOfTracks, // doc: r2Publiction.Metadata, r2PublicationJson, // Legacy Base64 data blobs // r2PublicationBase64, lastReadingLocation: readerStateLocator, }; } }
the_stack
import { Injectable } from '@angular/core'; import { CoreSites } from '@services/sites'; import { CoreApp } from '@services/app'; import { CoreTextUtils } from '@services/utils/text'; import { AddonMessagesOfflineConversationMessagesDBRecord, AddonMessagesOfflineMessagesDBRecord, CONVERSATION_MESSAGES_TABLE, MESSAGES_TABLE, } from './database/messages'; import { makeSingleton } from '@singletons'; import { AddonMessagesConversation } from './messages'; /** * Service to handle Offline messages. */ @Injectable({ providedIn: 'root' }) export class AddonMessagesOfflineProvider { /** * Delete a message. * * @param conversationId Conversation ID. * @param message The message. * @param timeCreated The time the message was created. * @param siteId Site ID. If not defined, current site. * @return Promise resolved if stored, rejected if failure. */ async deleteConversationMessage(conversationId: number, message: string, timeCreated: number, siteId?: string): Promise<void> { const site = await CoreSites.getSite(siteId); await site.getDb().deleteRecords(CONVERSATION_MESSAGES_TABLE, { conversationid: conversationId, text: message, timecreated: timeCreated, }); } /** * Delete all the messages in a conversation. * * @param conversationId Conversation ID. * @param siteId Site ID. If not defined, current site. * @return Promise resolved if stored, rejected if failure. */ async deleteConversationMessages(conversationId: number, siteId?: string): Promise<void> { const site = await CoreSites.getSite(siteId); await site.getDb().deleteRecords(CONVERSATION_MESSAGES_TABLE, { conversationid: conversationId, }); } /** * Delete a message. * * @param toUserId User ID to send the message to. * @param message The message. * @param timeCreated The time the message was created. * @param siteId Site ID. If not defined, current site. * @return Promise resolved if stored, rejected if failure. */ async deleteMessage(toUserId: number, message: string, timeCreated: number, siteId?: string): Promise<void> { const site = await CoreSites.getSite(siteId); await site.getDb().deleteRecords(MESSAGES_TABLE, { touserid: toUserId, smallmessage: message, timecreated: timeCreated, }); } /** * Get all messages where deviceoffline is set to 1. * * @param siteId Site ID. If not defined, current site. * @return Promise resolved with messages. */ async getAllDeviceOfflineMessages( siteId?: string, ): Promise<AddonMessagesOfflineAnyMessagesFormatted[]> { const site = await CoreSites.getSite(siteId); const [ messages, conversations, ] = await Promise.all([ site.getDb().getRecords<AddonMessagesOfflineMessagesDBRecord>(MESSAGES_TABLE, { deviceoffline: 1 }), site.getDb().getRecords<AddonMessagesOfflineConversationMessagesDBRecord>( CONVERSATION_MESSAGES_TABLE, { deviceoffline: 1 }, ), ]); const messageResult: AddonMessagesOfflineAnyMessagesFormatted[] = this.parseMessages(messages); const formattedConv = this.parseConversationMessages(conversations); return messageResult.concat(formattedConv); } /** * Get all offline messages. * * @param siteId Site ID. If not defined, current site. * @return Promise resolved with messages. */ async getAllMessages( siteId?: string, ): Promise<AddonMessagesOfflineAnyMessagesFormatted[]> { const site = await CoreSites.getSite(siteId); const [ messages, conversations, ] = await Promise.all([ site.getDb().getAllRecords<AddonMessagesOfflineMessagesDBRecord>(MESSAGES_TABLE), site.getDb().getAllRecords<AddonMessagesOfflineConversationMessagesDBRecord>(CONVERSATION_MESSAGES_TABLE), ]); const messageResult: AddonMessagesOfflineAnyMessagesFormatted[] = this.parseMessages(messages); const formattedConv = this.parseConversationMessages(conversations); return messageResult.concat(formattedConv); } /** * Get offline messages to send to a certain user. * * @param conversationId Conversation ID. * @param userIdFrom To add to the conversation messages when parsing. * @param siteId Site ID. If not defined, current site. * @return Promise resolved with messages. */ async getConversationMessages( conversationId: number, userIdFrom?: number, siteId?: string, ): Promise<AddonMessagesOfflineConversationMessagesDBRecordFormatted[]> { const site = await CoreSites.getSite(siteId); const messages: AddonMessagesOfflineConversationMessagesDBRecord[] = await site.getDb().getRecords( CONVERSATION_MESSAGES_TABLE, { conversationid: conversationId }, ); return this.parseConversationMessages(messages, userIdFrom); } /** * Get offline messages to send to a certain user. * * @param toUserId User ID to get messages to. * @param siteId Site ID. If not defined, current site. * @return Promise resolved with messages. */ async getMessages(toUserId: number, siteId?: string): Promise<AddonMessagesOfflineMessagesDBRecordFormatted[]> { const site = await CoreSites.getSite(siteId); const messages: AddonMessagesOfflineMessagesDBRecord[] = await site.getDb().getRecords(MESSAGES_TABLE, { touserid: toUserId }); return this.parseMessages(messages); } /** * Check if there are offline messages to send to a conversation. * * @param conversationId Conversation ID. * @param siteId Site ID. If not defined, current site. * @return Promise resolved with boolean: true if has offline messages, false otherwise. */ async hasConversationMessages(conversationId: number, siteId?: string): Promise<boolean> { const messages = await this.getConversationMessages(conversationId, undefined, siteId); return !!messages.length; } /** * Check if there are offline messages to send to a certain user. * * @param toUserId User ID to check. * @param siteId Site ID. If not defined, current site. * @return Promise resolved with boolean: true if has offline messages, false otherwise. */ async hasMessages(toUserId: number, siteId?: string): Promise<boolean> { const messages = await this.getMessages(toUserId, siteId); return !!messages.length; } /** * Parse some fields of each offline conversation messages. * * @param messages List of messages to parse. * @param userIdFrom To add to the conversation messages when parsin. * @return Parsed messages. */ protected parseConversationMessages( messages: AddonMessagesOfflineConversationMessagesDBRecord[], userIdFrom?: number, ): AddonMessagesOfflineConversationMessagesDBRecordFormatted[] { if (!messages) { return []; } return messages.map((message) => { const parsedMessage: AddonMessagesOfflineConversationMessagesDBRecordFormatted = { conversationid: message.conversationid, text: message.text, timecreated: message.timecreated, deviceoffline: message.deviceoffline, conversation: message.conversation ? CoreTextUtils.parseJSON(message.conversation, undefined) : undefined, pending: true, useridfrom: userIdFrom, }; return parsedMessage; }); } /** * Parse some fields of each offline messages. * * @param messages List of messages to parse. * @return Parsed messages. */ protected parseMessages( messages: AddonMessagesOfflineMessagesDBRecord[], ): AddonMessagesOfflineMessagesDBRecordFormatted[] { if (!messages) { return []; } return messages.map((message) => { const parsedMessage: AddonMessagesOfflineMessagesDBRecordFormatted = { touserid: message.touserid, useridfrom: message.useridfrom, smallmessage: message.smallmessage, timecreated: message.timecreated, deviceoffline: message.deviceoffline, pending: true, text: message.smallmessage, }; return parsedMessage; }); } /** * Save a conversation message to be sent later. * * @param conversation Conversation. * @param message The message to send. * @param siteId Site ID. If not defined, current site. * @return Promise resolved if stored, rejected if failure. */ async saveConversationMessage( conversation: AddonMessagesConversation, message: string, siteId?: string, ): Promise<AddonMessagesOfflineConversationMessagesDBRecord> { const site = await CoreSites.getSite(siteId); const entry: AddonMessagesOfflineConversationMessagesDBRecord = { conversationid: conversation.id, text: message, timecreated: Date.now(), deviceoffline: CoreApp.isOnline() ? 0 : 1, conversation: JSON.stringify({ name: conversation.name || '', subname: conversation.subname || '', imageurl: conversation.imageurl || '', isfavourite: conversation.isfavourite ? 1 : 0, type: conversation.type, }), }; await site.getDb().insertRecord(CONVERSATION_MESSAGES_TABLE, entry); return entry; } /** * Save a message to be sent later. * * @param toUserId User ID recipient of the message. * @param message The message to send. * @param siteId Site ID. If not defined, current site. * @return Promise resolved if stored, rejected if failure. */ async saveMessage(toUserId: number, message: string, siteId?: string): Promise<AddonMessagesOfflineMessagesDBRecord> { const site = await CoreSites.getSite(siteId); const entry: AddonMessagesOfflineMessagesDBRecord = { touserid: toUserId, useridfrom: site.getUserId(), smallmessage: message, timecreated: new Date().getTime(), deviceoffline: CoreApp.isOnline() ? 0 : 1, }; await site.getDb().insertRecord(MESSAGES_TABLE, entry); return entry; } /** * Set deviceoffline for a group of messages. * * @param messages Messages to update. Should be the same entry as retrieved from the DB. * @param value Value to set. * @param siteId Site ID. If not defined, current site. * @return Promise resolved if stored, rejected if failure. */ async setMessagesDeviceOffline( messages: AddonMessagesOfflineAnyMessagesFormatted[], value: boolean, siteId?: string, ): Promise<void> { const site = await CoreSites.getSite(siteId); const db = site.getDb(); const promises: Promise<number>[] = []; const data = { deviceoffline: value ? 1 : 0 }; messages.forEach((message) => { if ('conversationid' in message) { promises.push(db.updateRecords( CONVERSATION_MESSAGES_TABLE, data, { conversationid: message.conversationid, text: message.text, timecreated: message.timecreated }, )); } else { promises.push(db.updateRecords( MESSAGES_TABLE, data, { touserid: message.touserid, smallmessage: message.smallmessage, timecreated: message.timecreated }, )); } }); await Promise.all(promises); } } export const AddonMessagesOffline = makeSingleton(AddonMessagesOfflineProvider); export type AddonMessagesOfflineMessagesDBRecordFormatted = AddonMessagesOfflineMessagesDBRecord & { pending?: boolean; // Will be likely true. text?: string; // Copy of smallmessage. }; export type AddonMessagesOfflineConversationMessagesDBRecordFormatted = Omit<AddonMessagesOfflineConversationMessagesDBRecord, 'conversation'> & { conversation?: AddonMessagesConversation; // Data about the conversation. pending: boolean; // Will be always true. useridfrom?: number; // User Id who send the message, will be likely us. }; export type AddonMessagesOfflineAnyMessagesFormatted = AddonMessagesOfflineConversationMessagesDBRecordFormatted | AddonMessagesOfflineMessagesDBRecordFormatted;
the_stack
import path from 'path'; import fs from 'fs'; import { createReport } from '../index'; import { Image, ImagePars } from '../types'; import { setDebugLogSink } from '../debug'; import JSZip from 'jszip'; if (process.env.DEBUG) setDebugLogSink(console.log); it('001: Issue #61 Correctly renders an SVG image', async () => { const template = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'imagesSVG.docx') ); // Use a random png file as a thumbnail const thumbnail: Image = { data: await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.png') ), extension: '.png', }; const opts = { template, data: {}, additionalJsContext: { svgImgFile: async () => { const data = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.svg') ); return { width: 6, height: 6, data, extension: '.svg', thumbnail, }; }, svgImgStr: () => { const data = Buffer.from( `<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <rect x="10" y="10" height="100" width="100" style="stroke:#ff0000; fill: #0000ff"/> </svg>`, 'utf-8' ); return { width: 6, height: 6, data, extension: '.svg', thumbnail, }; }, }, }; const result = await createReport(opts, 'JS'); expect(result).toMatchSnapshot(); }); it('002: throws when thumbnail is incorrectly provided when inserting an SVG', async () => { const template = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'imagesSVG.docx') ); const thumbnail = { data: await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.png') ), // extension: '.png', extension is not given }; const opts = { template, data: {}, additionalJsContext: { svgImgFile: async () => { const data = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.svg') ); return { width: 6, height: 6, data, extension: '.svg', thumbnail, }; }, svgImgStr: () => { const data = Buffer.from( `<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <rect x="10" y="10" height="100" width="100" style="stroke:#ff0000; fill: #0000ff"/> </svg>`, 'utf-8' ); return { width: 6, height: 6, data, extension: '.svg', thumbnail, }; }, }, }; return expect(createReport(opts)).rejects.toMatchSnapshot(); }); it('003: can inject an svg without a thumbnail', async () => { const template = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'imagesSVG.docx') ); const opts = { template, data: {}, additionalJsContext: { svgImgFile: async () => { const data = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.svg') ); return { width: 6, height: 6, data, extension: '.svg', }; }, svgImgStr: () => { const data = Buffer.from( `<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <rect x="10" y="10" height="100" width="100" style="stroke:#ff0000; fill: #0000ff"/> </svg>`, 'utf-8' ); return { width: 6, height: 6, data, extension: '.svg', }; }, }, }; const result = await createReport(opts, 'JS'); expect(result).toMatchSnapshot(); }); it('004: can inject an image in the document header (regression test for #113)', async () => { const template = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'imageHeader.docx') ); const opts = { template, data: {}, additionalJsContext: { image: async () => { const data = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.png') ); return { width: 6, height: 6, data, extension: '.png', }; }, }, }; // NOTE: bug does not happen when using debug probe arguments ('JS' or 'XML'), // as these exit before the headers are parsed. // TODO: build a snapshot test once _probe === 'XML' properly includes all document XMLs, not just // the main document expect(await createReport(opts)).toBeInstanceOf(Uint8Array); }); it('005: can inject PNG files using ArrayBuffers without errors (related to issue #166)', async () => { const template = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'imageSimple.docx') ); const buff = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.png') ); function toArrayBuffer(buf: Buffer): ArrayBuffer { const ab = new ArrayBuffer(buf.length); const view = new Uint8Array(ab); for (let i = 0; i < buf.length; ++i) { view[i] = buf[i]; } return ab; } const fromAB = await createReport({ template, data: {}, additionalJsContext: { injectImg: () => { return { width: 6, height: 6, data: toArrayBuffer(buff), extension: '.png', }; }, }, }); const fromB = await createReport({ template, data: {}, additionalJsContext: { injectImg: () => { return { width: 6, height: 6, data: buff, extension: '.png', }; }, }, }); expect(fromAB).toBeInstanceOf(Uint8Array); expect(fromB).toBeInstanceOf(Uint8Array); expect(fromAB).toStrictEqual(fromB); }); it('006: can inject an image from the data instead of the additionalJsContext', async () => { const template = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'imageSimple.docx') ); const buff = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.png') ); const reportA = await createReport({ template, data: { injectImg: () => ({ width: 6, height: 6, data: buff, extension: '.png', }), }, }); const reportB = await createReport({ template, data: {}, additionalJsContext: { injectImg: () => ({ width: 6, height: 6, data: buff, extension: '.png', }), }, }); expect(reportA).toBeInstanceOf(Uint8Array); expect(reportB).toBeInstanceOf(Uint8Array); expect(reportA).toStrictEqual(reportB); // Ensure only one 'media' element (the image data as a png file) is added to the final docx file. // Regression test for #218 const zip = await JSZip.loadAsync(reportA); expect(Object.keys(zip?.files ?? {})).toMatchInlineSnapshot(` Array [ "[Content_Types].xml", "_rels/.rels", "word/_rels/document.xml.rels", "word/document.xml", "word/theme/theme1.xml", "word/settings.xml", "word/fontTable.xml", "word/webSettings.xml", "docProps/app.xml", "docProps/core.xml", "word/styles.xml", "word/", "word/media/", "word/media/template_document.xml_image1.png", "word/_rels/", ] `); }); it('007: can inject an image in a document that already contains images (regression test for #144)', async () => { const template = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'imageExisting.docx') ); const buff = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.png') ); expect( await createReport( { template, data: { cv: { ProfilePicture: { url: 'abc' } }, }, additionalJsContext: { getImage: () => ({ width: 6, height: 6, data: buff, extension: '.png', }), }, }, 'XML' ) ).toMatchSnapshot(); }); it('008: can inject an image in a shape in the doc footer (regression test for #217)', async () => { const template = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'imageInShapeInFooter.docx') ); const thumbnail_data = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.png') ); const report = await createReport( { template, data: {}, additionalJsContext: { injectSvg: () => { const svg_data = Buffer.from( `<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <rect x="10" y="10" height="100" width="100" style="stroke:#ff0000; fill: #0000ff"/> </svg>`, 'utf-8' ); const thumbnail = { data: thumbnail_data, extension: '.png', }; return { width: 6, height: 6, data: svg_data, extension: '.svg', thumbnail, }; }, }, }, 'XML' ); expect(report).toMatchSnapshot(); }); it('009 correctly rotate image', async () => { const template = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'imageRotation.docx') ); const buff = await fs.promises.readFile( path.join(__dirname, 'fixtures', 'sample.png') ); const opts = { template, data: {}, additionalJsContext: { getImage: (): ImagePars => ({ width: 6, height: 6, data: buff, extension: '.png', }), getImage45: (): ImagePars => ({ width: 6, height: 6, data: buff, extension: '.png', rotation: 45, }), getImage180: (): ImagePars => ({ width: 6, height: 6, data: buff, extension: '.png', rotation: 180, }), }, }; expect(await createReport(opts, 'XML')).toMatchSnapshot(); });
the_stack
import * as fs from 'fs'; import * as path from 'path'; import * as cdk from '@aws-cdk/core'; import * as codebuild from '@aws-cdk/aws-codebuild'; import * as codecommit from '@aws-cdk/aws-codecommit'; import * as codepipeline from '@aws-cdk/aws-codepipeline'; import * as actions from '@aws-cdk/aws-codepipeline-actions'; import * as iam from '@aws-cdk/aws-iam'; import * as kms from '@aws-cdk/aws-kms'; import * as lambda from '@aws-cdk/aws-lambda'; import * as s3 from '@aws-cdk/aws-s3'; process.on('unhandledRejection', (reason, _) => { console.error(reason); // eslint-disable-next-line no-process-exit process.exit(1); }); enum RepositorySources { GITHUB = 'github', CODECOMMIT = 'codecommit', } async function main() { const pkg = require('../package.json'); const acceleratorVersion = pkg.version; const app = new cdk.App(); const repoSources = app.node.tryGetContext('repo_source') ? [app.node.tryGetContext('repo_source')] : [RepositorySources.CODECOMMIT, RepositorySources.GITHUB]; for (const repoSource of repoSources) { if (repoSource !== RepositorySources.GITHUB && repoSource !== RepositorySources.CODECOMMIT) { throw new Error( `Invalid value for repo_source: ${repoSource} Must repo_source must be one of [github|codecommit]`, ); } new Installer(app, `InstallerStack-${repoSource}`, { stackName: `AcceleratorInstaller${repoSource === RepositorySources.CODECOMMIT ? '-CodeCommit' : ''}`, repoSource, acceleratorVersion, }); } } export namespace Installer { export interface Props extends cdk.StackProps { repoSource: string; acceleratorVersion: string; } } class Installer extends cdk.Stack { constructor(scope: cdk.Construct, id: string, props: Installer.Props) { super(scope, id, props); const { repoSource, acceleratorVersion } = props; const acceleratorPrefixParam = new cdk.CfnParameter(this, 'AcceleratorPrefix', { default: 'ASEA-', description: 'Accelerator prefix used for deployment.', allowedPattern: '[a-zA-Z][a-zA-Z0-9-]{0,8}-', }); const acceleratorNameParam = new cdk.CfnParameter(this, 'AcceleratorName', { default: 'ASEA', description: 'Accelerator Name used for deployment.', allowedPattern: '[a-zA-Z][a-zA-Z0-9]{0,3}', }); const acceleratorName = acceleratorNameParam.valueAsString; const acceleratorPrefix = acceleratorPrefixParam.valueAsString; const acceleratorConfigS3Bucket = new cdk.CfnParameter(this, 'ConfigS3Bucket', { default: 'AWSDOC-EXAMPLE-BUCKET', description: 'The S3 bucket name that contains the initial Accelerator configuration.', }); const configRepositoryName = new cdk.CfnParameter(this, 'ConfigRepositoryName', { default: 'ASEA-Config-Repo', description: 'The AWS CodeCommit repository name that contains the Accelerator configuration.', }); const configBranchName = new cdk.CfnParameter(this, 'ConfigBranchName', { default: 'main', description: 'The AWS CodeCommit branch name that contains the Accelerator configuration', }); const notificationEmail = new cdk.CfnParameter(this, 'Notification Email', { description: 'The notification email that will get Accelerator State Machine execution notifications.', }); const codebuildComputeType = new cdk.CfnParameter(this, 'CodeBuild Compute Type', { description: 'The compute type of the build server for the Accelerator deployments.', default: codebuild.ComputeType.LARGE, allowedValues: [codebuild.ComputeType.MEDIUM, codebuild.ComputeType.LARGE, codebuild.ComputeType.X2_LARGE], }); const stackDeployPageSize = new cdk.CfnParameter(this, 'Deployment Page Size', { description: 'The number of stacks to deploy in parallel. This value SHOULD NOT normally be changed.', default: 680, }); const stateMachineName = `${acceleratorPrefix}MainStateMachine_sm`; // The state machine name has to match the name of the state machine in initial setup const stateMachineArn = `arn:aws:states:${this.region}:${this.account}:stateMachine:${stateMachineName}`; // Use the `start-execution.js` script in the assets folder const stateMachineStartExecutionCode = fs.readFileSync(path.join(__dirname, '..', 'assets', 'start-execution.js')); // Use the `save-application-version.js` script in the assets folder const saveApplicationVersionCode = fs.readFileSync( path.join(__dirname, '..', 'assets', 'save-application-version.js'), ); // Use the `validate-parameters.js` script in the assets folder const validateParametersCode = fs.readFileSync(path.join(__dirname, '..', 'assets', 'validate-parameters.js')); // Role that is used by the CodeBuild project const installerProjectRole = new iam.Role(this, 'InstallerProjectRole', { roleName: `${acceleratorPrefix}CB-Installer`, assumedBy: new iam.ServicePrincipal('codebuild.amazonaws.com'), }); // Allow creation of ECR repositories installerProjectRole.addToPrincipalPolicy( new iam.PolicyStatement({ actions: ['ecr:*'], resources: [`arn:aws:ecr:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:repository/aws-cdk/*`], }), ); // Allow getting authorization tokens for ECR installerProjectRole.addToPrincipalPolicy( new iam.PolicyStatement({ actions: ['ecr:GetAuthorizationToken'], resources: [`*`], }), ); installerProjectRole.addToPrincipalPolicy( new iam.PolicyStatement({ actions: ['sts:AssumeRole'], resources: [`arn:aws:iam::${cdk.Aws.ACCOUNT_ID}:role/cdk-*`], }), ); // Allow all CloudFormation permissions installerProjectRole.addToPrincipalPolicy( new iam.PolicyStatement({ actions: ['cloudformation:*'], resources: [`arn:aws:cloudformation:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:stack/*`], }), ); // Allow the role to access the CDK asset bucket installerProjectRole.addToPrincipalPolicy( new iam.PolicyStatement({ actions: ['s3:*'], resources: [`arn:aws:s3:::cdk-*`], }), ); // Allow the role to create anything through CloudFormation installerProjectRole.addToPrincipalPolicy( new iam.PolicyStatement({ actions: ['*'], resources: ['*'], conditions: { 'ForAnyValue:StringEquals': { 'aws:CalledVia': ['cloudformation.amazonaws.com'], }, }, }), ); const cfnInstallerProjectRole = installerProjectRole.node.defaultChild as iam.CfnRole; cfnInstallerProjectRole.cfnOptions.metadata = { cfn_nag: { rules_to_suppress: [ { id: 'W28', // Resource found with an explicit name, this disallows updates that require replacement of this resource reason: 'Using explicit name for installer', }, ], }, }; const cfnInstallerProjectRoleDefaultPolicy = installerProjectRole.node.findChild('DefaultPolicy').node .defaultChild as iam.CfnPolicy; cfnInstallerProjectRoleDefaultPolicy.cfnOptions.metadata = { cfn_nag: { rules_to_suppress: [ { id: 'F4', // IAM policy should not allow * action reason: 'Allows cloudformation to generate resources, needs full access', }, { id: 'F39', // IAM policy should not allow * resource with PassRole action reason: 'False error: assumeRole using cdk-*', }, { id: 'W12', // IAM policy should not allow * resource reason: 'Allows cloudformation to generate resources, needs full access', }, { id: 'W76', // SPCM for IAM policy document is higher than 25 reason: 'IAM policy is generated by CDK', }, ], }, }; // Create a CMK that can be used for the CodePipeline artifacts bucket const installerCmk = new kms.Key(this, 'ArtifactsBucketCmk', { enableKeyRotation: true, description: 'ArtifactsBucketCmk', alias: `alias/${acceleratorPrefix}Installer-Key`, }); installerCmk.grantEncryptDecrypt(new iam.AccountRootPrincipal()); // Define a build specification to build the initial setup templates const installerProject = new codebuild.PipelineProject(this, 'InstallerProject', { projectName: `${acceleratorPrefix}InstallerProject_pl`, role: installerProjectRole, buildSpec: codebuild.BuildSpec.fromObject({ version: '0.2', phases: { install: { 'runtime-versions': { nodejs: 14, }, // The flag '--unsafe-perm' is necessary to run pnpm scripts in Docker commands: [ 'npm install --global pnpm@6.2.3', 'pnpm install --unsafe-perm --frozen-lockfile', 'pnpm recursive run build --unsafe-perm', ], }, pre_build: { // The flag '--unsafe-perm' is necessary to run pnpm scripts in Docker commands: ['pnpm recursive run build --unsafe-perm'], }, build: { commands: [ 'cd src/core/cdk', 'export CDK_NEW_BOOTSTRAP=1', `pnpx cdk bootstrap aws://${cdk.Aws.ACCOUNT_ID}/${cdk.Aws.REGION} --require-approval never --toolkit-stack-name=${acceleratorPrefix}CDKToolkit --cloudformation-execution-policies=arn:${cdk.Aws.PARTITION}:iam::aws:policy/AdministratorAccess`, `pnpx cdk deploy --require-approval never --toolkit-stack-name=${acceleratorPrefix}CDKToolkit`, ], }, }, }), environment: { buildImage: codebuild.LinuxBuildImage.STANDARD_5_0, privileged: true, // Allow access to the Docker daemon computeType: codebuild.ComputeType.MEDIUM, environmentVariables: { ACCELERATOR_NAME: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: acceleratorName, }, ACCELERATOR_PREFIX: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: acceleratorPrefix, }, ACCELERATOR_STATE_MACHINE_NAME: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: stateMachineName, }, CONFIG_REPOSITORY_NAME: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: configRepositoryName.valueAsString, }, CONFIG_BRANCH_NAME: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: configBranchName.valueAsString, }, CONFIG_S3_BUCKET: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: acceleratorConfigS3Bucket.valueAsString, }, ENABLE_PREBUILT_PROJECT: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: 'true', // Enable Docker prebuilt project }, NOTIFICATION_EMAIL: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: notificationEmail.valueAsString, }, INSTALLER_CMK: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: `alias/${acceleratorPrefix}Installer-Key`, }, BUILD_COMPUTE_TYPE: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: codebuildComputeType.valueAsString, }, DEPLOY_STACK_PAGE_SIZE: { type: codebuild.BuildEnvironmentVariableType.PLAINTEXT, value: stackDeployPageSize.valueAsString, }, }, }, cache: codebuild.Cache.local(codebuild.LocalCacheMode.SOURCE), }); // This artifact is used as output for the Github code and as input for the build step const sourceArtifact = new codepipeline.Artifact(); const repoName = new cdk.CfnParameter(this, 'RepositoryName', { default: 'aws-secure-environment-accelerator', description: 'The name of the git repository containing the Accelerator code.', }); const repoBranch = new cdk.CfnParameter(this, 'RepositoryBranch', { // Github release action sets GITHUB_DEFAULT_BRANCH // Otherwise fall back to 'release' default: process.env.GITHUB_DEFAULT_BRANCH || 'release', description: 'The branch of the git repository containing the Accelerator code.', }); let sourceAction: actions.GitHubSourceAction | actions.CodeCommitSourceAction; // Generic action for Source let repoOwner: string; if (repoSource === RepositorySources.CODECOMMIT) { // Create the CodeCommit source action sourceAction = new actions.CodeCommitSourceAction({ actionName: 'CodeCommitSource', repository: codecommit.Repository.fromRepositoryName(this, 'CodeCommitRepo', repoName.valueAsString), branch: repoBranch.valueAsString, output: sourceArtifact, trigger: actions.CodeCommitTrigger.NONE, }); // Save off values for UpdateVersion action repoOwner = 'CodeCommit'; } else { // Default to GitHub // Additional parameter needed for the GitHub secret const githubOauthSecretId = new cdk.CfnParameter(this, 'GithubSecretId', { default: 'accelerator/github-token', description: 'The token to use to access the Github repository.', }); const githubOwner = new cdk.CfnParameter(this, 'GithubOwner', { default: 'aws-samples', description: 'The owner of the Github repository containing the Accelerator code.', }); // Create the GitHub source action sourceAction = new actions.GitHubSourceAction({ actionName: 'GithubSource', owner: githubOwner.valueAsString, repo: repoName.valueAsString, branch: repoBranch.valueAsString, oauthToken: cdk.SecretValue.secretsManager(githubOauthSecretId.valueAsString), output: sourceArtifact, trigger: actions.GitHubTrigger.NONE, }); // Save off values for UpdateVersion action repoOwner = githubOwner.valueAsString; } // The role that will be used to start the state machine const stateMachineExecutionRole = new iam.Role(this, 'ExecutionRoleName', { roleName: `${acceleratorPrefix}L-SFN-Execution`, assumedBy: new iam.ServicePrincipal('lambda.amazonaws.com'), }); // Grant permissions to write logs stateMachineExecutionRole.addToPrincipalPolicy( new iam.PolicyStatement({ actions: ['logs:CreateLogGroup', 'logs:CreateLogStream', 'logs:PutLogEvents'], resources: ['*'], }), ); stateMachineExecutionRole.addToPrincipalPolicy( new iam.PolicyStatement({ actions: ['ssm:PutParameter', 'ssm:GetParameter', 'ssm:GetParameterHistory'], resources: ['*'], }), ); stateMachineExecutionRole.addToPrincipalPolicy( new iam.PolicyStatement({ actions: ['cloudformation:DescribeStacks'], resources: ['*'], }), ); // Grant permissions to start the state machine stateMachineExecutionRole.addToPrincipalPolicy( new iam.PolicyStatement({ actions: ['states:StartExecution'], resources: [stateMachineArn], }), ); const cfnStateMachineExecutionRole = stateMachineExecutionRole.node.defaultChild as iam.CfnRole; cfnStateMachineExecutionRole.cfnOptions.metadata = { cfn_nag: { rules_to_suppress: [ { id: 'W28', // Resource found with an explicit name, this disallows updates that require replacement of this resource reason: 'Using explicit name for installer', }, ], }, }; const cfnStateMachineExecutionRoleDefaultPolicy = stateMachineExecutionRole.node.findChild('DefaultPolicy').node .defaultChild as iam.CfnPolicy; cfnStateMachineExecutionRoleDefaultPolicy.cfnOptions.metadata = { cfn_nag: { rules_to_suppress: [ { id: 'W12', // IAM policy should not allow * resource reason: 'Allows stateMachine to generate resources, needs full access', }, { id: 'W76', // SPCM for IAM policy document is higher than 25 reason: 'IAM policy is generated by CDK', }, ], }, }; // Create the Lambda function that is responsible for launching the state machine const stateMachineStartExecutionLambda = new lambda.Function(this, 'ExecutionLambda', { functionName: `${acceleratorPrefix}Installer-StartExecution`, role: stateMachineExecutionRole, // Inline code is only allowed for Node.js version 12 runtime: lambda.Runtime.NODEJS_12_X, code: lambda.Code.fromInline(stateMachineStartExecutionCode.toString()), handler: 'index.handler', }); const cfnStateMachineStartExecutionLambda = stateMachineStartExecutionLambda.node .defaultChild as lambda.CfnFunction; cfnStateMachineStartExecutionLambda.cfnOptions.metadata = { cfn_nag: { rules_to_suppress: [ { id: 'W58', // Lambda functions require permission to write CloudWatch Logs reason: 'CloudWatch Logs not required for installer', }, { id: 'W89', // Lambda functions should be deployed inside a VPC reason: 'Lambda inside VPC not required for installer', }, { id: 'W92', // Lambda functions should define ReservedConcurrentExecutions to reserve simultaneous executions reason: 'ReservedConcurrentExecutions not required for installer', }, ], }, }; // Create the Lambda function that is responsible for launching the state machine const saveApplicationVersionLambda = new lambda.Function(this, 'SaveApplicationVersionLambda', { functionName: `${acceleratorPrefix}Installer-SaveApplicationVersion`, role: stateMachineExecutionRole, // Inline code is only allowed for Node.js version 12 runtime: lambda.Runtime.NODEJS_12_X, code: lambda.Code.fromInline(saveApplicationVersionCode.toString()), handler: 'index.handler', }); const cfnSaveApplicationVersionLambda = saveApplicationVersionLambda.node.defaultChild as lambda.CfnFunction; cfnSaveApplicationVersionLambda.cfnOptions.metadata = { cfn_nag: { rules_to_suppress: [ { id: 'W58', // Lambda functions require permission to write CloudWatch Logs reason: 'CloudWatch Logs not required for installer', }, { id: 'W89', // Lambda functions should be deployed inside a VPC reason: 'Lambda inside VPC not required for installer', }, { id: 'W92', // Lambda functions should define ReservedConcurrentExecutions to reserve simultaneous executions reason: 'ReservedConcurrentExecutions not required for installer', }, ], }, }; // Create the Lambda function that is responsible for validating previous parameters const validateParametersLambda = new lambda.Function(this, 'ValidateParametersLambda', { functionName: `${acceleratorPrefix}Installer-ValidateParameters`, role: stateMachineExecutionRole, // Inline code is only allowed for Node.js version 12 runtime: lambda.Runtime.NODEJS_12_X, code: lambda.Code.fromInline(validateParametersCode.toString()), handler: 'index.handler', }); const cfnValidateParametersLambda = validateParametersLambda.node.defaultChild as lambda.CfnFunction; cfnValidateParametersLambda.cfnOptions.metadata = { cfn_nag: { rules_to_suppress: [ { id: 'W58', // Lambda functions require permission to write CloudWatch Logs reason: 'CloudWatch Logs not required for installer', }, { id: 'W89', // Lambda functions should be deployed inside a VPC reason: 'Lambda inside VPC not required for installer', }, { id: 'W92', // Lambda functions should define ReservedConcurrentExecutions to reserve simultaneous executions reason: 'ReservedConcurrentExecutions not required for installer', }, ], }, }; // Role that is used by the CodePipeline // Permissions for // - accessing the artifacts bucket // - publishing to the manual approval SNS topic // - running the CodeBuild project // - running the state machine execution Lambda function // will be added automatically by the CDK Pipeline construct const installerPipelineRole = new iam.Role(this, 'InstallerPipelineRole', { roleName: `${acceleratorPrefix}CP-Installer`, assumedBy: new iam.ServicePrincipal('codepipeline.amazonaws.com'), }); // This bucket will be used to store the CodePipeline source const installerArtifactsBucket = new s3.Bucket(this, 'ArtifactsBucket', { removalPolicy: cdk.RemovalPolicy.DESTROY, encryption: s3.BucketEncryption.KMS, encryptionKey: installerCmk, blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, versioned: true, objectOwnership: s3.ObjectOwnership.BUCKET_OWNER_PREFERRED, }); const cfnInstallerArtifactsBucket = installerArtifactsBucket.node.defaultChild as s3.CfnBucket; cfnInstallerArtifactsBucket.cfnOptions.metadata = { cfn_nag: { rules_to_suppress: [ { id: 'W35', // S3 Bucket should have access logging configured reason: 'Access logs not required for installer', }, ], }, }; // Allow only https requests installerArtifactsBucket.addToResourcePolicy( new iam.PolicyStatement({ actions: ['s3:*'], resources: [installerArtifactsBucket.bucketArn, installerArtifactsBucket.arnForObjects('*')], principals: [new iam.AnyPrincipal()], conditions: { Bool: { 'aws:SecureTransport': 'false', }, }, effect: iam.Effect.DENY, }), ); const installerPipeline = new codepipeline.Pipeline(this, 'Pipeline', { role: installerPipelineRole, pipelineName: `${acceleratorPrefix}InstallerPipeline`, artifactBucket: installerArtifactsBucket, stages: [ { stageName: 'Source', actions: [sourceAction], }, { stageName: 'ValidateParameters', actions: [ new actions.LambdaInvokeAction({ actionName: 'ValidateParameters', lambda: validateParametersLambda, role: installerPipelineRole, userParameters: { acceleratorName, acceleratorPrefix, }, }), ], }, { stageName: 'Deploy', actions: [ new actions.CodeBuildAction({ actionName: 'DeployAccelerator', project: installerProject, input: sourceArtifact, role: installerPipelineRole, }), ], }, { stageName: 'UpdateVersion', actions: [ new actions.LambdaInvokeAction({ actionName: 'UpdateVersion', lambda: saveApplicationVersionLambda, role: installerPipelineRole, userParameters: { commitId: sourceAction.variables.commitId, repository: repoName, owner: repoOwner, branch: repoBranch, acceleratorVersion, acceleratorName, acceleratorPrefix, }, }), ], }, { stageName: 'Execute', actions: [ new actions.LambdaInvokeAction({ actionName: 'ExecuteAcceleratorStateMachine', lambda: stateMachineStartExecutionLambda, role: installerPipelineRole, userParameters: { stateMachineArn, }, }), ], }, ], }); cdk.Aspects.of(this).add(new cdk.Tag('Accelerator', `${acceleratorName}1`)); const cfnInstallerPipelineRole = installerPipelineRole.node.defaultChild as iam.CfnRole; cfnInstallerPipelineRole.cfnOptions.metadata = { cfn_nag: { rules_to_suppress: [ { id: 'W28', // Resource found with an explicit name, this disallows updates that require replacement of this resource reason: 'Using explicit name for installer', }, ], }, }; const cfnInstallerPipelineRoleDefaultPolicy = installerPipeline.role.node.findChild('DefaultPolicy').node .defaultChild as iam.CfnPolicy; cfnInstallerPipelineRoleDefaultPolicy.cfnOptions.metadata = { cfn_nag: { rules_to_suppress: [ { id: 'W12', // IAM policy should not allow * resource reason: 'Allows CodePipeline to generate resources, needs full access', }, { id: 'W76', // SPCM for IAM policy document is higher than 25 reason: 'IAM policy is generated by CDK', }, ], }, }; } } // eslint-disable-next-line @typescript-eslint/no-floating-promises main();
the_stack
import { mockConsole } from "@lingui/jest-mocks" import fs from "fs" import mockFs from "mock-fs" import mockDate from "mockdate" import path from "path" import PO from "pofile" import { CatalogType } from "../catalog" import format from "./po-gettext" describe("po-gettext format", () => { const dateHeaders = { "pot-creation-date": "2018-08-09", "po-revision-date": "2018-08-09", } afterEach(() => { mockFs.restore() mockDate.reset() }) it("should write catalog in pofile format", () => { mockFs({ locale: { en: mockFs.directory(), }, }) mockDate.set("2018-08-27T10:00Z") const filename = path.join("locale", "en", "messages.po") const catalog: CatalogType = { static: { translation: "Static message", }, withOrigin: { translation: "Message with origin", origin: [["src/App.js", 4]], }, withMultipleOrigins: { translation: "Message with multiple origin", origin: [ ["src/App.js", 4], ["src/Component.js", 2], ], }, withDescription: { translation: "Message with description", extractedComments: [ "Description is comment from developers to translators", ], }, withComments: { comments: ["Translator comment", "This one might come from developer"], translation: "Support translator comments separately", }, obsolete: { translation: "Obsolete message", obsolete: true, }, withFlags: { flags: ["fuzzy", "otherFlag"], translation: "Keeps any flags that are defined", }, veryLongString: { translation: "One morning, when Gregor Samsa woke from troubled dreams, he found himself" + " transformed in his bed into a horrible vermin. He lay on his armour-like" + " back, and if he lifted his head a little he could see his brown belly," + " slightly domed and divided by arches into stiff sections. The bedding was" + " hardly able to cover it and seemed ready to slide off any moment. His many" + " legs, pitifully thin compared with the size of the rest of him, waved about" + " helplessly as he looked. \"What's happened to me?\" he thought. It wasn't" + " a dream. His room, a proper human", }, } format.write(filename, catalog, { origins: true, locale: "en", ...dateHeaders, }) const pofile = fs.readFileSync(filename).toString() mockFs.restore() expect(pofile).toMatchSnapshot() }) it("should read catalog in pofile format", () => { const filename = path.join( path.resolve(__dirname), "fixtures", "messages.po" ) const actual = format.read(filename) expect(actual).toMatchSnapshot() }) it("should correct badly used comments", () => { const po = PO.parse(` #. First description #. Second comment #. Third comment msgid "withMultipleDescriptions" msgstr "Extra comments are separated from the first description line" # Translator comment #. Single description only #. Second description? msgid "withDescriptionAndComments" msgstr "Second description joins translator comments" `) mockFs({ locale: { en: { "messages.po": po.toString(), }, }, }) const filename = path.join("locale", "en", "messages.po") const actual = format.read(filename) mockFs.restore() expect(actual).toMatchSnapshot() }) it("should throw away additional msgstr if present", () => { const po = PO.parse(` msgid "withMultipleTranslations" msgstr[0] "This is just fine" msgstr[1] "Throw away that one" `) mockFs({ locale: { en: { "messages.po": po.toString(), }, }, }) const filename = path.join("locale", "en", "messages.po") mockConsole((console) => { const file = fs.readFileSync(filename).toString() const actual = format.parse(file) expect(console.warn).toHaveBeenCalledWith( expect.stringContaining("Multiple translations"), "withMultipleTranslations" ) mockFs.restore() expect(actual).toMatchSnapshot() }) }) it("should write the same catalog as it was read", () => { const pofile = fs .readFileSync( path.join(path.resolve(__dirname), "fixtures", "messages.po") ) .toString() const filename = path.join( path.resolve(__dirname), "fixtures", "messages.po" ) const catalog = format.read(filename) mockFs({ locale: { en: { "messages.po": pofile, }, }, }) const mock_filename = path.join("locale", "en", "messages.po") format.write(mock_filename, catalog, { origins: true, locale: "en" }) const actual = fs.readFileSync(mock_filename).toString() mockFs.restore() // on windows mockFs adds ··· to multiline string, so this strictly equal comparison can't be done // we test that the content if the same inlined... expect(actual.replace(/(\r\n|\n|\r)/gm, "")).toEqual( pofile.replace(/(\r\n|\n|\r)/gm, "") ) }) it("should convert ICU plural messages to gettext plurals", function () { mockFs({ locale: { en: mockFs.directory(), }, }) mockDate.set("2018-08-27T10:00Z") const filename = path.join("locale", "en", "messages.po") const catalog: CatalogType = { message_with_id_and_octothorpe: { message: "{count, plural, one {Singular} other {Number is #}}", translation: "{count, plural, one {Singular} other {Number is #}}", }, message_with_id: { message: "{someCount, plural, one {Singular case with id\ and linebreak} other {Case number {someCount} with id}}", translation: "{someCount, plural, one {Singular case with id} other {Case number {someCount} with id}}", extractedComments: [ "This is a comment by the developers about how the content must be localized.", ], }, "{anotherCount, plural, one {Singular case} other {Case number {anotherCount}}}": { translation: "{anotherCount, plural, one {Singular case} other {Case number {anotherCount}}}", }, // Entry with developer-defined ID that generates empty msgstr[] lines message_with_id_but_without_translation: { message: "{count, plural, one {Singular with id but no translation} other {Plural {count} with empty id but no translation}}", translation: "", }, // Entry with automatic ID that generates empty msgstr[] lines "{count, plural, one {Singular automatic id no translation} other {Plural {count} automatic id no translation}}": { translation: "", }, } format.write(filename, catalog, { locale: "en", }) const pofile = fs.readFileSync(filename).toString() mockFs.restore() expect(pofile).toMatchSnapshot() }) it("should convert gettext plurals to ICU plural messages", function () { const pofile = fs .readFileSync( path.join(path.resolve(__dirname), "fixtures", "messages_plural.po") ) .toString() const catalog = format.parse(pofile) expect(catalog).toMatchSnapshot() }) it("should warn when using nested plurals that cannot be represented with gettext plurals", () => { const catalog = { nested_plural_message: { message: `{count, plural, one {{numArticles, plural, one {1 book and 1 article} other {1 book and {numArticles} articles} }} other {{numArticles, plural, one {{numBooks} books and 1 article} other {{numBooks} books and {numArticles} articles} }} }`, translation: `{count, plural, one {{numArticles, plural, one {1 book and 1 article} other {1 book and {numArticles} articles} }} other {{numArticles, plural, one {{numBooks} books and 1 article} other {{numBooks} books and {numArticles} articles} }} }`, }, } mockConsole((console) => { format.serialize(catalog, {}) expect(console.warn).toHaveBeenCalledWith( expect.stringContaining("Nested plurals"), "nested_plural_message" ) }) }) it("should use correct ICU plural cases for languages having an additional plural case for fractions", () => { // This tests the edge case described in https://github.com/lingui/js-lingui/pull/677#issuecomment-737152022 const po = ` msgid "" msgstr "" "Language: cs\n" #. js-lingui:icu=%7B#%2C+plural%2C+one+%7Bday%7D+other+%7Bdays%7D%7D&pluralize_on=# msgid "# day" msgid_plural "# days" msgstr[0] "# den" msgstr[1] "# dny" msgstr[2] "# dní" ` const parsed = format.parse(po) expect(parsed).toEqual({ "{#, plural, one {day} other {days}}": { // Note that the last case must be `other` (the 4th CLDR case name) instead of `many` (the 3rd CLDR case name). translation: "{#, plural, one {# den} few {# dny} other {# dní}}", extractedComments: [], context: null, comments: [], obsolete: false, origin: [], flags: [], }, }) }) describe("when using 'select' format", () => { const catalog = { select_message: { message: `{gender, select, male {he} female {she} other {they}`, translation: "", }, } it("should warn", () => { mockConsole((console) => { format.serialize(catalog, {}) expect(console.warn).toHaveBeenCalledWith( expect.stringContaining("select"), "select_message" ) }) }) it("should not warn when disabling the warning in config", () => { mockConsole((console) => { format.serialize(catalog, { disableSelectWarning: true }) expect(console.warn).not.toHaveBeenCalled() }) }) }) describe("when using 'selectOrdinal' format", () => { const catalog = { select_ordinal_message: { message: `{count, selectOrdinal, one {1st} two {2nd} few {3rd} other {#th}}`, translation: "", }, } it("should warn", () => { mockConsole((console) => { format.serialize(catalog, {}) expect(console.warn).toHaveBeenCalledWith( expect.stringContaining("selectOrdinal"), "select_ordinal_message" ) }) }) it("should not warn when disabling the warning in config", () => { mockConsole((console) => { format.serialize(catalog, { disableSelectWarning: true }) expect(console.warn).not.toHaveBeenCalled() }) }) }) describe("convertPluralsToIco handle correctly locales with 4-letter", () => { const pofile = fs .readFileSync( path.join(path.resolve(__dirname), "fixtures", "messages_plural-4-letter.po") ) .toString() const catalog = format.parse(pofile) expect(catalog).toMatchSnapshot() }) })
the_stack
import { Injectable } from '@angular/core'; import { MatDialog } from '@angular/material'; import { Actions, Effect, ofType } from '@ngrx/effects'; import { Action, select, Store } from '@ngrx/store'; import { Observable, of } from 'rxjs'; import { catchError, exhaustMap, map, mergeMap, switchMap, tap, withLatestFrom, } from 'rxjs/operators'; import { SnackBarService } from '../../../core/services/snack-bar.service'; import { DialogService } from '../../../dialog/dialog-service/dialog.service'; import * as fromSO from '../../../reducers/service-offerings/redux/service-offerings.reducers'; import * as fromVM from '../../../reducers/vm/redux/vm.reducers'; import * as fromVolumes from '../../../reducers/volumes/redux/volumes.reducers'; // tslint:disable-next-line import { VolumeSnapshotFromVmSnapshotDialogComponent } from '../../../shared/components/volume-snapshot-from-vm-snapshot-dialog/volume-snapshot-from-vm-snapshot-dialog.component'; import { JobsNotificationService } from '../../../shared/services/jobs-notification.service'; import { TagService } from '../../../shared/services/tags/tag.service'; import { vmSnapshotEntityName, VmSnapshotService, } from '../../../shared/services/vm-snapshot.service'; import { VmSnapshotCreationDialogComponent } from '../../../vm/vm-sidebar/vm-detail/vm-snapshot-creation-dialog/vm-snapshot-creation-dialog.component'; import { VmSnapshotBuilder, vmSnapshotOfferingTagKey } from '../../models'; import { State } from '../../state'; import { Create, CreateCanceled, CreateConfirmed, CreateError, CreateSuccess, CreateVolumeSnapshot, CreateVolumeSnapshotCanceled, CreateVolumeSnapshotConfirmed, CreateVolumeSnapshotError, CreateVolumeSnapshotSuccess, Delete, DeleteCanceled, DeleteConfirmed, DeleteError, DeleteSuccess, Load, LoadError, LoadSuccess, Revert, RevertAllowed, RevertCanceled, RevertConfirmed, RevertError, RevertNotAllowed, RevertSuccess, VmSnapshotActionTypes, } from './vm-snapshots.actions'; import * as vmSnapshotSelectors from './vm-snapshots.selectors'; import { VmState } from '../../../vm'; import { configSelectors } from '../../config'; @Injectable() export class VmSnapshotsEffects { @Effect() loadVmSnapshots$: Observable<Action> = this.actions$.pipe( ofType<Load>(VmSnapshotActionTypes.Load), switchMap(() => this.vmSnapshotsService.getList().pipe( map(snapshots => new LoadSuccess({ snapshots })), catchError(error => of(new LoadError({ error }))), ), ), ); @Effect() createVmSnapshotDialog$: Observable<Action> = this.actions$.pipe( ofType<Create>(VmSnapshotActionTypes.Create), withLatestFrom( this.store.pipe(select(fromVM.getSelectedVM)), this.store.pipe(select(configSelectors.get('vmSnapLimit'))), this.store.pipe(select(vmSnapshotSelectors.getVmSnapshotsNumberForSelectedVm)), ), exhaustMap(([action, selectedVM, vmSnapLimit, snapshotsNumber]) => { if (vmSnapLimit.enable) { if (vmSnapLimit.snapshotsLimit <= snapshotsNumber) { const message = 'ERRORS.VM_SNAPSHOT.LIMIT_EXCEEDED'; this.dialogService.showNotificationsOnFail({ message }, message); return of(new CreateCanceled()); } } if (selectedVM.state === VmState.Stopped) { const message = 'ERRORS.SNAPSHOT.CREATION_UNAVAILABLE_FOR_STOPPED'; this.dialogService.showNotificationsOnFail({ message }, message); return of(new CreateCanceled()); } if (selectedVM.state === VmState.Destroyed) { const message = 'ERRORS.SNAPSHOT.CREATION_UNAVAILABLE_FOR_DELETED'; this.dialogService.showNotificationsOnFail({ message }, message); return of(new CreateCanceled()); } return this.matDialog .open(VmSnapshotCreationDialogComponent, { width: '400px', disableClose: true }) .afterClosed() .pipe( map(result => { if (result) { return new CreateConfirmed({ vmId: action.payload.vmId, name: result.name, description: result.description, snapshotMemory: result.snapshotMemory, }); } return new CreateCanceled(); }), ); }), ); @Effect() createVmSnapshot$: Observable<Action> = this.actions$.pipe( ofType<CreateConfirmed>(VmSnapshotActionTypes.CreateConfirmed), map(action => action.payload), mergeMap(payload => { const notificationId = this.jobsNotificationService.add( 'NOTIFICATIONS.VM_SNAPSHOTS.TAKE_VM_SNAP_IN_PROGRESS', ); const params = { virtualmachineid: payload.vmId, description: payload.description, name: payload.name, snapshotmemory: payload.snapshotMemory, }; return this.vmSnapshotsService.create(params).pipe( tap(() => { const message = 'NOTIFICATIONS.VM_SNAPSHOTS.TAKE_VM_SNAP_DONE'; this.showNotificationsOnFinish(message, notificationId); }), map(vmSnapshot => new CreateSuccess({ vmSnapshot })), catchError(error => { const message = 'NOTIFICATIONS.VM_SNAPSHOTS.TAKE_VM_SNAP_FAILED'; this.dialogService.showNotificationsOnFail(error, message, notificationId); return of(new CreateError({ error })); }), ); }), ); /** * Save compute offering that the VM has at time of the VM snapshot creation */ @Effect({ dispatch: false }) createVmSnapshotTag$ = this.actions$.pipe( ofType<CreateSuccess>(VmSnapshotActionTypes.CreateSuccess), withLatestFrom( this.store.pipe(select(fromVM.selectEntities)), this.store.pipe(select(fromSO.selectEntities)), ), map(([action, vmEntities, soEntities]) => { const vm = vmEntities[action.payload.vmSnapshot.virtualmachineid]; const so = soEntities[vm.serviceofferingid]; return { vm, vmSnapshotId: action.payload.vmSnapshot.id, isCustomizedOffering: so == null ? undefined : so.iscustomized, }; }), tap(({ vm, vmSnapshotId, isCustomizedOffering }) => { const vmSnapshotOffering = VmSnapshotBuilder.create(vm, isCustomizedOffering); const params = { resourceids: vmSnapshotId, resourcetype: vmSnapshotEntityName, 'tags[0].key': vmSnapshotOfferingTagKey, 'tags[0].value': vmSnapshotOffering.toString(), }; this.tagService .create(params) .pipe(catchError(of)) .subscribe(); }), ); @Effect() createVolumeSnapshotFromVmSnapshotDialog$: Observable<Action> = this.actions$.pipe( ofType<CreateVolumeSnapshot>(VmSnapshotActionTypes.CreateVolumeSnapshot), withLatestFrom( this.store.pipe(select(vmSnapshotSelectors.selectEntities)), this.store.pipe(select(fromVolumes.selectAll)), ), map(([action, vmSnapshotEntities, volumes]) => { const snapshotId = action.payload.snapshotId; const vmId = vmSnapshotEntities[snapshotId].virtualmachineid; const vmVolumes = volumes.filter(volume => volume.virtualmachineid === vmId); return { snapshotId, volumes: vmVolumes, }; }), exhaustMap(({ snapshotId, volumes }) => { return this.matDialog .open(VolumeSnapshotFromVmSnapshotDialogComponent, { width: '350px', data: { volumes } }) .afterClosed() .pipe( map(result => { if (result) { return new CreateVolumeSnapshotConfirmed({ vmsnapshotid: snapshotId, volumeid: result.volumeId, name: result.name, }); } return new CreateVolumeSnapshotCanceled(); }), ); }), ); @Effect() createVolumeSnapshotFromVmSnapshot$: Observable<Action> = this.actions$.pipe( ofType<CreateVolumeSnapshotConfirmed>(VmSnapshotActionTypes.CreateVolumeSnapshotConfirmed), mergeMap(action => { const notificationId = this.jobsNotificationService.add( 'NOTIFICATIONS.VM_SNAPSHOTS.TAKE_SNAP_FROM_VM_SNAP_IN_PROGRESS', ); return this.vmSnapshotsService.createSnapshotFromVMSnapshot(action.payload).pipe( tap(() => { const message = 'NOTIFICATIONS.VM_SNAPSHOTS.TAKE_SNAP_FROM_VM_SNAP_DONE'; this.showNotificationsOnFinish(message, notificationId); }), // todo it returns snapshot need to update map(() => new CreateVolumeSnapshotSuccess({ todo: '' })), catchError(error => { const message = 'NOTIFICATIONS.VM_SNAPSHOTS.TAKE_SNAP_FROM_VM_SNAP_FAILED'; this.dialogService.showNotificationsOnFail(error, message, notificationId); return of(new CreateVolumeSnapshotError({ error })); }), ); }), ); @Effect() deleteConfirmation$: Observable<Action> = this.actions$.pipe( ofType<Delete>(VmSnapshotActionTypes.Delete), exhaustMap(action => { const message = 'DIALOG_MESSAGES.VM_SNAPSHOT.CONFIRM_DELETION'; return this.dialogService.confirm({ message }).pipe( map(confirmed => { if (confirmed) { return new DeleteConfirmed({ id: action.payload.id }); } return new DeleteCanceled(); }), ); }), ); @Effect() deleteVmSnapshot$: Observable<Action> = this.actions$.pipe( ofType<DeleteConfirmed>(VmSnapshotActionTypes.DeleteConfirmed), mergeMap(action => { const notificationId = this.jobsNotificationService.add( 'NOTIFICATIONS.VM_SNAPSHOTS.DELETION_IN_PROGRESS', ); return this.vmSnapshotsService.delete(action.payload.id).pipe( tap(() => { const message = 'NOTIFICATIONS.VM_SNAPSHOTS.DELETION_DONE'; this.showNotificationsOnFinish(message, notificationId); }), map(id => new DeleteSuccess({ id })), catchError(error => { const message = 'NOTIFICATIONS.VM_SNAPSHOTS.DELETION_FAILED'; this.dialogService.showNotificationsOnFail(error, message, notificationId); return of(new DeleteError({ error })); }), ); }), ); @Effect() checkRevertPossibility$: Observable<Action> = this.actions$.pipe( ofType<Revert>(VmSnapshotActionTypes.Revert), withLatestFrom( this.store.pipe(select(vmSnapshotSelectors.selectEntities)), this.store.pipe(select(fromVM.selectEntities)), ), map(([action, vmSnapshotsEntities, vmEntities]) => { const vmSnapshot = vmSnapshotsEntities[action.payload.id]; const vm = vmEntities[vmSnapshot.virtualmachineid]; return { vm, vmSnapshotId: vmSnapshot.id, }; }), exhaustMap(({ vmSnapshotId, vm }) => { const params = { resourceid: vmSnapshotId, resourcetype: vmSnapshotEntityName, key: vmSnapshotOfferingTagKey, }; return this.tagService.getList(params).pipe( /** * If we are unable to verify if revert is allowed then we dispatch RevertAllowed. * In this case, the user will get a server error if the revert is not allowed. */ map(tags => { const tag = tags[0]; if (tag == null) { return new RevertAllowed({ id: vmSnapshotId }); } const vmSnapshotOffering = VmSnapshotBuilder.createFromTagValue(tag.value); if (vmSnapshotOffering.isValidForVm(vm)) { return new RevertAllowed({ id: vmSnapshotId }); } return new RevertNotAllowed(); }), catchError(() => of(new RevertAllowed({ id: vmSnapshotId }))), ); }), ); @Effect({ dispatch: false }) revertNotAllowedMessage$ = this.actions$.pipe( ofType<RevertNotAllowed>(VmSnapshotActionTypes.RevertNotAllowed), tap(() => { this.dialogService.alert({ message: 'DIALOG_MESSAGES.VM_SNAPSHOT.REVERT_NOT_ALLOWED' }); }), ); @Effect() revertConfirmation$: Observable<Action> = this.actions$.pipe( ofType<Revert>(VmSnapshotActionTypes.RevertAllowed), exhaustMap(action => { const message = 'DIALOG_MESSAGES.VM_SNAPSHOT.CONFIRM_REVERTING'; return this.dialogService.confirm({ message }).pipe( map(confirmed => { if (confirmed) { return new RevertConfirmed({ id: action.payload.id }); } return new RevertCanceled(); }), ); }), ); @Effect() revert$: Observable<Action> = this.actions$.pipe( ofType<RevertConfirmed>(VmSnapshotActionTypes.RevertConfirmed), mergeMap(action => { const notificationId = this.jobsNotificationService.add( 'NOTIFICATIONS.VM_SNAPSHOTS.REVERT_IN_PROGRESS', ); return this.vmSnapshotsService.revert(action.payload.id).pipe( tap(() => { const message = 'NOTIFICATIONS.VM_SNAPSHOTS.REVERT_DONE'; this.showNotificationsOnFinish(message, notificationId); }), map(vm => new RevertSuccess({ vm })), catchError(error => { const message = 'NOTIFICATIONS.VM_SNAPSHOTS.REVERT_FAILED'; this.dialogService.showNotificationsOnFail(error, message, notificationId); return of(new RevertError({ error })); }), ); }), ); constructor( private actions$: Actions, private vmSnapshotsService: VmSnapshotService, private dialogService: DialogService, private jobsNotificationService: JobsNotificationService, private snackBarService: SnackBarService, private matDialog: MatDialog, private store: Store<State>, private tagService: TagService, ) {} private showNotificationsOnFinish(message: string, jobNotificationId?: string) { if (jobNotificationId) { this.jobsNotificationService.finish({ message, id: jobNotificationId, }); } this.snackBarService.open(message).subscribe(); } }
the_stack
import { assert } from 'chai'; import { Article, Category, Comment, CommentSummaryScore, MODERATION_ACTION_ACCEPT, MODERATION_ACTION_DEFER, MODERATION_ACTION_HIGHLIGHT, MODERATION_ACTION_REJECT, ModerationRule, Tag, } from '../../models'; import { compileScores, processRulesForComment, resolveComment, } from '../../pipeline/rules'; import { createArticle, createCategory, createComment, createCommentSummaryScore, createModerationRule, createTag, getCommentSummaryScoreData, getTagData, } from '../domain/comments/fixture'; describe('Pipeline Rules Tests', () => { beforeEach(async () => { await CommentSummaryScore.destroy({where: {}}); await Comment.destroy({where: {}}); await ModerationRule.destroy({where: {}}); await Tag.destroy({where: {}}); await Article.destroy({where: {}}); await Category.destroy({where: {}}); }); describe('compileScores', () => { it('should return an object of scores keyed by tag id', () => { const tag1 = Tag.build(getTagData()); tag1.id = 1; const tag2 = Tag.build(getTagData()); tag2.id = 2; const score1 = CommentSummaryScore.build(getCommentSummaryScoreData({tagId: 1, score: 0.57})); const score2 = CommentSummaryScore.build(getCommentSummaryScoreData({tagId: 2, score: 0.75})); const scores = [score1, score2]; const expected = { 1: 0.57, 2: 0.75, }; assert.deepEqual(compileScores(scores), expected); }); it('should get the max scores with the same tag', () => { const tag1 = Tag.build(getTagData()); tag1.id = 1; const tag2 = Tag.build(getTagData()); tag2.id = 2 ; const score1 = CommentSummaryScore.build(getCommentSummaryScoreData({tagId: 1, score: 0.5})); const score2 = CommentSummaryScore.build(getCommentSummaryScoreData({tagId: 2, score: 0.6})); const score3 = CommentSummaryScore.build(getCommentSummaryScoreData({tagId: 2, score: 0.8})); const scores = [score1, score2, score3]; const expected = { 1: 0.5, 2: 0.8, }; assert.deepEqual(compileScores(scores), expected); }); }); describe('resolveComment', () => { let comment: any; beforeEach(async () => { const category = await createCategory(); const article = await createArticle({ categoryId: category.id }); comment = await createComment({ articleId: article.id, maxSummaryScore: 0.8, }); }); it('should accept a comment when a single "accept" action is ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 1, score: 0.5, }), ]; const rules = [ ModerationRule.build({ tagId: 1, lowerThreshold: 0.4, upperThreshold: 0.6, action: MODERATION_ACTION_ACCEPT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isTrue(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isFalse(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); // Rules shouldn't updated lastModeratedAt const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should accept a comment when a single "accept" rule for Summary Score', async () => { const summaryTag = await Tag.build({ label: 'Summary Score', key: 'SUMMARY_SCORE', }); const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: summaryTag.id, score: comment.maxSummaryScore, }), ]; const rules = [ ModerationRule.build({ tagId: summaryTag.id, lowerThreshold: 0, upperThreshold: 1, action: MODERATION_ACTION_ACCEPT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isTrue(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isFalse(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should accept a comment when unanimous "accept" actions are ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 3, score: 0.8, }), ]; const rules = [ ModerationRule.build({ tagId: 3, lowerThreshold: 0.7, upperThreshold: 0.9, action: MODERATION_ACTION_ACCEPT, }), ModerationRule.build({ tagId: 3, lowerThreshold: 0.7, upperThreshold: 0.8, action: MODERATION_ACTION_ACCEPT, }), // This should be ignored ModerationRule.build({ tagId: 3, lowerThreshold: 0.5, upperThreshold: 0.7, action: MODERATION_ACTION_REJECT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isTrue(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isFalse(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should accept and highlight a comment when both "accept" and "highlight" actions are ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 1, score: 0.95, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 3, score: 0.8, }), ]; const rules = [ ModerationRule.build({ tagId: 1, lowerThreshold: 0.7, upperThreshold: 1, action: MODERATION_ACTION_HIGHLIGHT, }), ModerationRule.build({ tagId: 3, lowerThreshold: 0.7, upperThreshold: 0.8, action: MODERATION_ACTION_ACCEPT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isTrue(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isTrue(updated.isHighlighted); assert.isFalse(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should defer a comment when both "accept" and "reject" actions are ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 1, score: 0.9, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 2, score: 0.8, }), ]; const rules = [ ModerationRule.build({ tagId: 1, lowerThreshold: 0.8, upperThreshold: 0.9, action: MODERATION_ACTION_REJECT, }), ModerationRule.build({ tagId: 2, lowerThreshold: 0.7, upperThreshold: 0.8, action: MODERATION_ACTION_ACCEPT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isNull(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isTrue(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should reject when a "reject" action is ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 29, score: 0.64, }), ]; const rules = [ ModerationRule.build({ tagId: 29, lowerThreshold: 0.5, upperThreshold: 1, action: MODERATION_ACTION_REJECT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isFalse(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isFalse(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should reject when multiple "reject" actions are ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 46, score: 0.98, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 83, score: 0.87, }), ]; const rules = [ ModerationRule.build({ tagId: 46, lowerThreshold: 0.9, upperThreshold: 1, action: MODERATION_ACTION_REJECT, }), ModerationRule.build({ tagId: 83, lowerThreshold: 0.5, upperThreshold: 1, action: MODERATION_ACTION_REJECT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isFalse(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isFalse(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should defer when a "defer" action is ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 15, score: 0.64, }), ]; const rules = [ ModerationRule.build({ tagId: 15, lowerThreshold: 0.5, upperThreshold: 1, action: MODERATION_ACTION_DEFER, }), ]; await resolveComment(comment, scores, rules); const updated = (await Comment.findByPk(comment.id)); assert.isNotNull(updated); if (updated) { assert.isNull(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isTrue(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should defer when both "accept" and "defer" actions are ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 217, score: 0.45, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 415, score: 0.67, }), ]; const rules = [ ModerationRule.build({ tagId: 217, lowerThreshold: 0.4, upperThreshold: 0.9, action: MODERATION_ACTION_ACCEPT, }), ModerationRule.build({ tagId: 415, lowerThreshold: 0.5, upperThreshold: 0.7, action: MODERATION_ACTION_DEFER, }), // Should be ignored... ModerationRule.build({ tagId: 415, lowerThreshold: 0.7, upperThreshold: 0.9, action: MODERATION_ACTION_REJECT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isNull(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isTrue(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should defer when "accept", "reject", and "defer" actions are ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 91, score: 0.31, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 294, score: 0.64, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 19, score: 0.85, }), ]; const rules = [ ModerationRule.build({ tagId: 91, lowerThreshold: 0.8, upperThreshold: 0.9, action: MODERATION_ACTION_ACCEPT, }), ModerationRule.build({ tagId: 294, lowerThreshold: 0.7, upperThreshold: 0.8, action: MODERATION_ACTION_REJECT, }), ModerationRule.build({ tagId: 19, lowerThreshold: 0.8, upperThreshold: 1, action: MODERATION_ACTION_DEFER, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isNull(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isTrue(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should highlight a comment if both "accept" and "highlight" actions are ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 81, score: 0.31, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 901, score: 0.64, }), ]; const rules = [ ModerationRule.build({ tagId: 901, lowerThreshold: 0.1, upperThreshold: 0.9, action: MODERATION_ACTION_ACCEPT, }), ModerationRule.build({ tagId: 81, lowerThreshold: 0.3, upperThreshold: 0.4, action: MODERATION_ACTION_HIGHLIGHT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isTrue(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isTrue(updated.isHighlighted); assert.isFalse(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should defer and not highlight a comment if both "reject" and "highlight" actions are ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 2, score: 0.87, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 4, score: 0.43, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 6, score: 0.91, }), ]; const rules = [ ModerationRule.build({ tagId: 2, lowerThreshold: 0.8, upperThreshold: 0.9, action: MODERATION_ACTION_REJECT, }), ModerationRule.build({ tagId: 4, lowerThreshold: 0.3, upperThreshold: 0.5, action: MODERATION_ACTION_HIGHLIGHT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isNull(updated.isAccepted, 'isAccepted'); assert.isTrue(updated.isAutoResolved, 'isAutoResolved'); assert.isFalse(updated.isHighlighted, 'isHighlighted'); assert.isTrue(updated.isDeferred, 'isDeferred'); assert.isTrue(updated.isModerated, 'isModerated'); assert.isFalse(updated.isScored, 'isScored'); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should defer and not highlight a comment if both "defer" and "highlight" actions are ruled', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 5, score: 0.16, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 10, score: 0.92, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 15, score: 0.27, }), ]; const rules = [ ModerationRule.build({ tagId: 15, lowerThreshold: 0.2, upperThreshold: 0.3, action: MODERATION_ACTION_DEFER, }), ModerationRule.build({ tagId: 10, lowerThreshold: 0.9, upperThreshold: 1, action: MODERATION_ACTION_HIGHLIGHT, }), // Should be ignored ModerationRule.build({ tagId: 5, lowerThreshold: 0.3, upperThreshold: 0.5, action: MODERATION_ACTION_ACCEPT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isNull(updated.isAccepted, 'isAccepted'); assert.isTrue(updated.isAutoResolved, 'isAutoResolved'); assert.isFalse(updated.isHighlighted, 'isHighlighted'); assert.isTrue(updated.isDeferred, 'isDeferred'); assert.isTrue(updated.isModerated, 'isModerated'); assert.isFalse(updated.isScored, 'isScored'); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should do nothing to the comment if no rules match', async () => { const scores = [ CommentSummaryScore.build({ commentId: comment.id, tagId: 4, score: 0.16, }), CommentSummaryScore.build({ commentId: comment.id, tagId: 12, score: 0.92, }), ]; const rules = [ ModerationRule.build({ tagId: 12, lowerThreshold: 0.2, upperThreshold: 0.3, action: MODERATION_ACTION_REJECT, }), ModerationRule.build({ tagId: 4, lowerThreshold: 0.9, upperThreshold: 1, action: MODERATION_ACTION_HIGHLIGHT, }), ]; await resolveComment(comment, scores, rules); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isNull(updated.isAccepted, 'isAccepted'); assert.isFalse(updated.isAutoResolved, 'isAutoResolved'); assert.isFalse(updated.isHighlighted, 'isHighlighted'); assert.isFalse(updated.isDeferred, 'isDeferred'); assert.isFalse(updated.isModerated, 'isModerated'); assert.isFalse(updated.isScored, 'isScored'); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); }); describe('processRulesForComment', () => { it('should do nothing if no matching rules are found', async () => { const category = await createCategory(); const article = await createArticle({ categoryId: category.id }); const comment = await createComment({ articleId: article.id }); await processRulesForComment(comment); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isNull(updated.isAccepted); assert.isFalse(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isFalse(updated.isDeferred); assert.isFalse(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should do nothing if no comment scores are found', async () => { const category = await createCategory(); const article = await createArticle({ categoryId: category.id }); const comment = await createComment({ articleId: article.id }); const [tag1, tag2] = await Promise.all([ createTag(), createTag(), ]); await Promise.all([ createModerationRule({ tagId: tag1.id }), createModerationRule({ tagId: tag2.id }), ]); await processRulesForComment(comment); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isNull(updated.isAccepted); assert.isFalse(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isFalse(updated.isDeferred); assert.isFalse(updated.isModerated); assert.isFalse(updated.isScored); } }); it('should mark a comment accepted for matching rules', async () => { const category = await createCategory(); const article = await createArticle({ categoryId: category.id }); const comment = await createComment({ articleId: article.id }); const [tag1, tag2] = await Promise.all([ createTag(), createTag(), ]); await Promise.all([ createModerationRule({ tagId: tag1.id, lowerThreshold: 0.5, upperThreshold: 1, action: MODERATION_ACTION_ACCEPT, }), createModerationRule({ tagId: tag2.id, lowerThreshold: 0.25, upperThreshold: 0.75, action: MODERATION_ACTION_ACCEPT, }), ]); await Promise.all([ createCommentSummaryScore({ commentId: comment.id, tagId: tag1.id, score: 0.75, }), createCommentSummaryScore({ commentId: comment.id, tagId: tag2.id, score: 0.5, }), ]); await processRulesForComment(comment); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isTrue(updated.isAccepted); assert.isTrue(updated.isAutoResolved); assert.isFalse(updated.isHighlighted); assert.isFalse(updated.isDeferred); assert.isTrue(updated.isModerated); assert.isFalse(updated.isScored); const updatedArticle = await updated.getArticle(); assert.isNull(updatedArticle!.lastModeratedAt); } }); it('should do nothing if article has disabled rule processing', async () => { const category = await createCategory(); const article = await createArticle({ categoryId: category.id, isAutoModerated: false }); const comment = await createComment({ articleId: article.id }); const [tag1, tag2] = await Promise.all([ createTag(), createTag(), ]); await Promise.all([ createModerationRule({ tagId: tag1.id, lowerThreshold: 0.5, upperThreshold: 1, action: MODERATION_ACTION_ACCEPT, }), createModerationRule({ tagId: tag2.id, lowerThreshold: 0.25, upperThreshold: 0.75, action: MODERATION_ACTION_ACCEPT, }), ]); await Promise.all([ createCommentSummaryScore({ commentId: comment.id, tagId: tag1.id, score: 0.75, }), createCommentSummaryScore({ commentId: comment.id, tagId: tag2.id, score: 0.5, }), ]); await processRulesForComment(comment); const updated = await Comment.findByPk(comment.id); assert.isNotNull(updated); if (updated) { assert.isNull(updated.isAccepted); } }); }); });
the_stack
import fs from "fs"; import https from "https"; import fetch from "node-fetch"; import path from "path"; import ProgressBar from "progress"; import shell from "shelljs"; import tar from "tar"; import { promisify } from "util"; import { getModelType, ModelType } from "./models/model"; import { ROOT_DIR } from "./qa-options"; import { LocalRuntime, RuntimeType } from "./runtimes/runtime"; export const exists = promisify(fs.exists); /** * Ensures a directory exists, creates as needed. */ export async function ensureDir(dirPath: string, recursive = true): Promise<void> { if (!(await exists(dirPath))) { recursive ? shell.mkdir("-p", dirPath) : shell.mkdir(dirPath); } } export function isOneDimensional(arr: number[] | number[][]): arr is number[] { return !Array.isArray(arr[0]); } export interface DownloadOptions { model: string; dir: string; format?: RuntimeType; force?: boolean; fullDir?: boolean; } /** * Download a model with associated vocabulary files * @param options Download options */ export async function downloadModelWithVocab(options: DownloadOptions): Promise<void> { const modelFormat = options.format ?? RuntimeType.SavedModel; const assetsDir = getAbsolutePath(options.dir); const modelDir = path.join(assetsDir, options.model); if (options.force) { shell.rm("-rf", modelDir); } if (modelFormat !== RuntimeType.Remote) { await downloadModel({ dir: modelDir, format: modelFormat, name: options.model, verbose: true }); } await getVocab({ dir: modelDir, modelName: options.model, verbose: true }); shell.echo("\nModel successfully downloaded!"); } export interface ModelDownloadOptions { /** * Absolute path to the directory under which download model */ dir: string; format: LocalRuntime; name: string; verbose?: boolean; } export async function downloadModel(model: ModelDownloadOptions): Promise<void> { const modelDir = path.join( model.dir, model.format === RuntimeType.TFJS ? RuntimeType.TFJS : "" ); if (await exists(modelDir)) { const exit = (): void => void model.verbose && shell.echo( `Model ${model.name} (format: ${model.format}) already exists, doing nothing...` ); if (model.format === RuntimeType.TFJS) { return exit(); } else if (model.format === RuntimeType.SavedModel) { if (await exists(path.join(modelDir, "saved_model.pb"))) { return exit(); } } } await ensureDir(modelDir); shell.echo("Downloading model..."); let url: string; // eslint-disable-next-line @typescript-eslint/no-use-before-define if (HF_MODELS_MAPPING[model.name]) { // eslint-disable-next-line @typescript-eslint/no-use-before-define const defaultUrl = HF_MODELS_MAPPING[model.name][model.format]; if (!defaultUrl) { throw new Error( `This model does not appear to be available in ${model.format} format` ); } url = defaultUrl; } else { url = getHfUrl(model.name, `${model.format}.tar.gz`); } await new Promise((resolve, reject) => { // eslint-disable-next-line @typescript-eslint/no-non-null-assertion https.get(url, res => { const bar = new ProgressBar("[:bar] :percent :etas", { width: 30, total: parseInt(res.headers["content-length"] ?? "0", 10) }); res .on("data", chunk => bar.tick(chunk.length)) .pipe(tar.x({ cwd: modelDir })) .on("close", resolve) .on("error", reject); }); }); } export interface VocabDownloadOptions extends VocabFiles { /** * Absolute path to the directory under which download vocab files */ dir: string; modelName: string; verbose?: boolean; } interface VocabFiles { /** * Name of the merges file (if applicable to the tokenizer) * @default "merges.txt" */ mergesFile?: string; /** * Name of the vocab file (if applicable to the tokenizer) * @default "vocab.txt" | "vocab.json" */ vocabFile?: string; } type VocabFilesKey = keyof VocabFiles; type ConfigFilesKey = "tokenizer_config.json" | "special_tokens_map.json"; const VOCAB_CONFIG_KEYS: ConfigFilesKey[] = [ "tokenizer_config.json", "special_tokens_map.json" ]; export interface VocabConfiguration { tokenizer: { do_lower_case?: boolean; }; tokensMapping: Partial<Record<TokenMappingKey, string>>; } export type TokenMappingKey = | "cls_token" | "eos_token" | "mask_token" | "pad_token" | "sep_token" | "unk_token"; const VOCAB_MAPPING: Partial<Record<ModelType, VocabFiles>> = { [ModelType.Roberta]: { mergesFile: "merges.txt", vocabFile: "vocab.json" } }; const DEFAULT_VOCAB = { vocabFile: { name: "vocab.txt" } }; type VocabReturn<TReturnConfig> = TReturnConfig extends true ? VocabConfiguration : void; export async function getVocab<TReturnConfig extends boolean>( options: VocabDownloadOptions, returnConfig?: TReturnConfig ): Promise<VocabReturn<TReturnConfig>> { await ensureDir(options.dir); const modelType = getModelType(options.modelName); if (!modelType) { throw new Error( "The model name does not allow to infer the associated tokenizer and thus which vocab files to download" ); } let vocabFiles: Partial<Record< VocabFilesKey | ConfigFilesKey, { name: string; url?: string; optional?: boolean } >>; // eslint-disable-next-line @typescript-eslint/no-use-before-define const hfVocabUrl: string | undefined = HF_VOCAB_FILES_MAPPING[options.modelName]; if (hfVocabUrl) { vocabFiles = { vocabFile: { name: "vocab.txt", url: hfVocabUrl } }; } else { const mapping = VOCAB_MAPPING[modelType]; if (mapping) { vocabFiles = {}; for (const key of Object.keys(mapping) as VocabFilesKey[]) { // eslint-disable-next-line @typescript-eslint/no-non-null-assertion vocabFiles[key] = { name: mapping[key]! }; } } else { vocabFiles = DEFAULT_VOCAB; } for (const file of ["mergesFile", "vocabFile"] as VocabFilesKey[]) { if (options[file]) { // eslint-disable-next-line @typescript-eslint/no-non-null-assertion vocabFiles[file] = { name: options[file]! }; } } } const vocabConfig: VocabConfiguration = { tokenizer: {}, tokensMapping: {} }; for (const file of VOCAB_CONFIG_KEYS) { vocabFiles[file] = { name: file, optional: true }; } for (const vocabFile of Object.values(vocabFiles)) { if (!vocabFile) { continue; } const file = path.join(options.dir, vocabFile.name); if (!(await exists(file))) { shell.echo(`Downloading ${vocabFile.name}...`); const url = vocabFile.url ?? getHfUrl(options.modelName, vocabFile.name); const response = await fetch(url); if (!response.ok) { if (vocabFile.optional !== true) { throw new Error(`Unable to download ${vocabFile.name} at ${url}`); } else { continue; } } const rawValue = await response.text(); await fs.promises.writeFile(file, rawValue); if (returnConfig && VOCAB_CONFIG_KEYS.includes(vocabFile.name as ConfigFilesKey)) { const configKey = getVocabConfigKey(vocabFile.name as ConfigFilesKey); vocabConfig[configKey] = JSON.parse(rawValue); } } else { options.verbose && shell.echo(`${vocabFile.name} already exists, doing nothing...`); if (returnConfig && VOCAB_CONFIG_KEYS.includes(vocabFile.name as ConfigFilesKey)) { try { const configFile = await fs.promises.readFile(file, { encoding: "utf-8" }); const configKey = getVocabConfigKey(vocabFile.name as ConfigFilesKey); vocabConfig[configKey] = JSON.parse(configFile); } catch (error) { // Nothing } } } } if (returnConfig === true) { return vocabConfig as VocabReturn<TReturnConfig>; } return void 0 as VocabReturn<TReturnConfig>; } function getHfUrl(model: string, file: string): string { return `https://cdn.huggingface.co/${model}/${file}`; } function getVocabConfigKey(filename: ConfigFilesKey): keyof VocabConfiguration { return filename === "tokenizer_config.json" ? "tokenizer" : "tokensMapping"; } export function getAbsolutePath(pathToCheck?: string, rootDir = ROOT_DIR): string { if (!pathToCheck) { return rootDir; } return path.isAbsolute(pathToCheck) ? pathToCheck : path.join(rootDir, pathToCheck); } const HF_VOCAB_FILES_MAPPING: Record<string, string> = { /** DistilBERT */ "distilbert-base-uncased-distilled-squad": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-vocab.txt", "distilbert-base-cased-distilled-squad": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-vocab.txt", /** BERT */ "bert-large-uncased-whole-word-masking-finetuned-squad": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-finetuned-squad-vocab.txt", "bert-large-cased-whole-word-masking-finetuned-squad": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-finetuned-squad-vocab.txt" }; interface DefaultModel { [RuntimeType.SavedModel]: string; [RuntimeType.TFJS]?: string; } const HF_MODELS_MAPPING: Record<string, DefaultModel> = { /** BERT */ "bert-large-cased-whole-word-masking-finetuned-squad": { [RuntimeType.SavedModel]: "https://cdn.huggingface.co/bert-large-cased-whole-word-masking-finetuned-squad-saved_model.tar.gz" }, "bert-large-uncased-whole-word-masking-finetuned-squad": { [RuntimeType.SavedModel]: "https://cdn.huggingface.co/bert-large-uncased-whole-word-masking-finetuned-squad-saved_model.tar.gz" }, /** DistilBERT */ "distilbert-base-cased-distilled-squad": { [RuntimeType.SavedModel]: "https://cdn.huggingface.co/distilbert-base-cased-distilled-squad-saved_model.tar.gz", [RuntimeType.TFJS]: "https://cdn.huggingface.co/distilbert-base-cased-distilled-squad-tfjs.tar.gz" }, "distilbert-base-uncased-distilled-squad": { [RuntimeType.SavedModel]: "https://cdn.huggingface.co/distilbert-base-uncased-distilled-squad-saved_model.tar.gz" } };
the_stack
import isEqual from 'react-fast-compare' import arrayMove from 'array-move' import isPromise from 'is-promise' import { getIn, setIn, cloneDeep } from './utils' import { FieldNode } from './types/field' import { FormSchema } from './types/form' import { NodeOptions, SetNodeFunction, ValidatorOptions } from './types/types' import { Fomir } from './Fomir' import { Node } from '.' function travelNodes(nodes: any[] = [], fn: (n: any) => any, travelParent = false) { function travel(nodes: any[]) { for (const item of nodes) { if (Array.isArray(item.children)) { travel(item.children) if (travelParent) fn(item) continue } fn(item) } } travel(nodes) } export function normalizeNode(node: any) { const falsyProps = ['required', 'focused', 'touched', 'loading', 'disabled'] const truthyProps = ['showLabel', 'visible', 'display'] falsyProps.forEach(k => (node[k] = node[k] ?? false)) truthyProps.forEach(k => (node[k] = node[k] ?? true)) node.status = node.status ?? 'editable' node.label = node.label ?? null node.data = node.data ?? null node.options = node.options ?? [] node.validators = node.validators ?? {} node.componentProps = node.componentProps ?? {} node.description = node.description ?? null return node } export type Form<T = any> = Omit<ReturnType<typeof createForm>, 'setValues'> & { setValues(values: T): T } export function createForm<T = any>(schema: FormSchema<T>) { const formUpdaters: any[] = [] const NODE_TO_UPDATER = new WeakMap() const NODE_TO_INDEX = new WeakMap() const NODE_TO_PARENT = new WeakMap() const NAME_TO_NODE = new Map() const NODE_TO_NAME = new WeakMap() // For form node schema.dirty = false schema.valid = true schema.submitCount = 0 schema.submitting = false schema.submitted = false schema.validating = false schema.status = 'editable' schema.components = { ...Fomir.compenents, ...schema.components, } travelNodes( schema.children, item => { normalizeNode(item) }, true, ) const initialSchema = cloneDeep(schema) function runFormUpdaters() { for (const updater of formUpdaters) { updater({}) // rerender form } } function getFormState(): FormSchema { return schema } function getFieldState(name: string, schema?: FormSchema): FieldNode { return getNode({ schema, match: n => n.name === name }) } function setSchema(fn: (shema: FormSchema) => any) { fn(schema) runFormUpdaters() } function setFormState(formPartialState: Partial<FormSchema>) { const prevSchema = cloneDeep(schema) const { watch = {} } = form.schema for (const key in formPartialState) { ;(schema as any)[key] = formPartialState[key as keyof FormSchema] } /** on form change */ for (const key of Object.keys(watch)) { if (!key.startsWith('$.')) continue let k = key.replace(/^\$\./, '') as keyof FormSchema if (Reflect.has(schema, k)) { if (isEqual(prevSchema[k], schema[k])) continue watch[key](schema[k] as any, prevSchema[k]) } } runFormUpdaters() } function setFieldState(namePath: string, fieldState: Partial<FieldNode>) { const { watch = {} } = form.schema let fieldNode = form.NAME_TO_NODE.get(namePath) /** Put values,errors... to a map */ const prevMap = Object.keys(watch) .filter(k => k.startsWith('*.')) .map(k => k.replace(/^\*\./, '')) .reduce((acc, cur) => { acc[cur] = getFieldCollection(cur as keyof FieldNode, [schema]) return acc }, {} as any) const prevSchema = cloneDeep(schema) // TODO: need refactor const matchedNode = setNode(fieldState, { rerender: false, match: n => n === fieldNode, }) /** on field change */ for (const key of Object.keys(watch)) { if (key.startsWith('*.')) { const type = key.replace(/^\*\./, '') as keyof FieldNode const prev = prevMap[type] const next = getFieldCollection(type, [schema]) if (!isEqual(prev, next)) { watch[key](next, prev) } continue } /** for single field */ const arr = key.split('.') const type = arr[arr.length - 1] const name = arr.slice(0, -1).join('.') const prev = getIn(getFieldState(name, prevSchema), type) const next = getIn(getFieldState(name, schema), type) if (!isEqual(prev, next)) { watch[key](next, prev) } } for (const fn of Fomir.onFieldChangeCallbacks) { fn(namePath, form) } rerenderNode(matchedNode) } function rerenderNode(node: any) { if (node) NODE_TO_UPDATER.get(node)?.({}) } function getFieldCollection( type: keyof FieldNode, arr: any[], ignoreInvisible = true, result: any = {}, ): any { return arr.reduce<any>((acc, cur) => { if (Array.isArray(cur.children)) { return { ...acc, ...getFieldCollection(type, cur.children, ignoreInvisible, acc), } } // must have a name if (!Reflect.has(cur, 'name')) return acc // skip invisible field if (ignoreInvisible && !cur.visible) return acc let v: any const k = NODE_TO_NAME.get(cur) if (type === 'value') { const { value, transform } = cur v = transform && typeof transform === 'function' ? transform(value) : value } else { v: (cur as any)[k] } setIn(acc, k, v) return acc }, result) } async function validateField(options: ValidatorOptions): Promise<any> { let error: any = undefined const { validators = {} } = options.fieldState const { value } = options.fieldState for (const v in validators) { let result: any if (typeof validators[v] === 'function') { result = validators[v](value, options) } else { if (!Fomir.validators[v]) continue result = Fomir.validators[v](value, validators[v], options) } error = isPromise(result) ? await result : result if (error) break } return error } async function validateForm() { return await validateAllFields() } async function validateAllFields(): Promise<any> { let errors: any = {} async function getErrors(arr: any[]) { for (const item of arr) { if (item.children) { await getErrors(item.children) } else { if (Reflect.has(item, 'name')) { if (!item.visible) continue const error = await validateField({ fieldState: item, form }) // if (error && error !== state.error) { if (error) { setIn(errors, item.name, error) setFieldState(item.name, { error }) } } } } } await getErrors([schema]) return errors } async function change(namePath: string, value: any) { let fieldNode = form.NAME_TO_NODE.get(namePath) let nextValue = value if (typeof fieldNode?.intercept === 'function') { nextValue = fieldNode.intercept(value, fieldNode) } setFieldState(namePath, { value: nextValue }) // sync value fieldNode = { ...fieldNode, value: nextValue } const fieldError = await validateField({ fieldState: fieldNode, form }) const prevError = fieldNode.error const error = fieldError || undefined if (prevError !== error) setFieldState(namePath, { error }) /** field change callback, for Dependent fields */ fieldNode?.onValueChange?.(fieldNode) } async function blur(namePath: string) { let fieldNode = form.NAME_TO_NODE.get(namePath) if (schema.validationMode !== 'onSubmit') { const error = await validateField({ fieldState: fieldNode, form }) if (error) setFieldState(namePath, { touched: true, error }) } } async function submitForm(e?: any) { e && e?.preventDefault() let valid: boolean = true const values = getValues() touchAll() // make all fields touched const errors = await validateForm() valid = isValid(errors) if (valid) { setSubmitting(true) schema?.onSubmit?.(values) } else { setFieldErrors(errors) schema?.onError?.(errors) } setFormState({ valid, dirty: true, submitCount: schema.submitCount! + 1, }) } function getValues<V = T>(name?: string): V { const values = getFieldCollection('value', [schema]) return name ? getIn(values, name) : values } function getErrors() { return getFieldCollection('error', [schema]) } // TODO: handle nested function setFieldErrors(errors: any) { for (const key in errors) { setFieldState(key, { error: errors[key] }) } } function touchAll() { travelNodes(schema.children, item => { if (Reflect.has(item, 'name')) { item.touched = true } }) } function setValues(values: T) { const changedNodeNames: string[] = [] travelNodes(schema.children, item => { const nodeName = NODE_TO_NAME.get(item) if (nodeName) { const nextValue = getIn(values, nodeName) if (item.value !== nextValue) { item.value = nextValue changedNodeNames.push(nodeName) } } }) /** handle form watch */ const watch = schema.watch || {} for (const key of Object.keys(watch || {})) { if (key.startsWith('*.')) continue const arr = key.split('.') const type = arr[arr.length - 1] const name = arr.slice(0, -1).join('.') if (type === 'value' && changedNodeNames.includes(name)) { watch[key](getIn(values, name)) } } rerenderNode(form) return getValues() } function setSubmitting(submitting: boolean) { setFormState({ submitting }) } function resetForm() { schema = cloneDeep(initialSchema) form.schema = schema rerenderNode(form) form.schema?.onReset?.() } function onFieldInit(namePath: string, form: any) { const fieldNode = getFieldState(namePath) fieldNode?.onFieldInit?.(fieldNode, form) } function getNode<T = any>(opt: NodeOptions) { let node: T = undefined as any const nodes = [opt.schema || schema] function travel(nodes: any[]) { for (const item of nodes) { if (Array.isArray(item.children)) { if (opt?.match?.(item)) { node = item break } travel(item.children) continue } if (opt?.match?.(item)) { node = item break } } } travel(nodes) return node } function setNode<T = any>(propertiesOrSetter: T | SetNodeFunction<T>, opt: NodeOptions) { let matchedNode: any const nodes = [opt.schema || schema] const updateNode = (node: any) => { matchedNode = node const rerender = typeof opt?.rerender === 'boolean' ? opt?.rerender : true if (typeof propertiesOrSetter === 'function') { if (rerender) rerenderNode(matchedNode) return (propertiesOrSetter as any)(node) } for (const k in propertiesOrSetter) { node[k] = propertiesOrSetter[k] } // rerender form if (rerender) rerenderNode(matchedNode) } function travel(nodes: any[]) { for (const item of nodes) { if (Array.isArray(item.children)) { if (opt?.match?.(item)) { updateNode(item) break } travel(item.children) continue } if (opt?.match?.(item)) { updateNode(item) break } } } travel(nodes) return matchedNode } // TODO: function getArrayHelpers(name: string, arrayNode?: any) { const arrayField = arrayNode || getNode({ match: n => n.name === name, }) const fields = arrayField.children const isValidIndex = (...args: number[]) => { return !args.some(i => i < 0 || i > fields.length) } function move(from: number, to: number) { if (!isValidIndex(from, to)) return arrayField.children = arrayMove(fields, from, to) rerenderNode(arrayField) } return { isFirst(index: number) { return index === 0 }, isLast(index: number) { return index + 1 === arrayField?.children?.length }, push<T = any>(value: T) { if (arrayField.children[0]) { const item = cloneDeep(arrayField.children[0]) for (const c of item.children) { delete c.value } arrayField.children.push(item) } rerenderNode(arrayField) if (value) { // TODO: } }, // unshift, remove(index: number) { arrayField.children.splice(index, 1) rerenderNode(arrayField) }, move, swap: move, // insert, } } function getNodeName(node: any) { let name: string = node.name || '' let child = node while (true) { const parent = NODE_TO_PARENT.get(child) if (parent == null) { break } const i = NODE_TO_INDEX.get(child) if (parent.name) { if (parent.isArrayField && name) { name = parent.name + `[${i}].` + name } else { name = parent.name + name } } child = parent } return name } function findPath(node: any) { const path: number[] = [] let child = node while (true) { const parent = NODE_TO_PARENT.get(child) if (parent == null) { break } const i = NODE_TO_INDEX.get(child) if (i == null) { break } path.unshift(i) child = parent } return path } function getParent(node: any) { return NODE_TO_PARENT.get(node) } function getNodeIndex(node: any) { return NODE_TO_INDEX.get(node) } function getNodeComponent(node: Node) { const { component } = node const { components } = schema if (components?.[component]) return components[component] return null } /** * check form is valid * @param errors errors object */ function isValid(errors: any = {}): boolean { let valid = true function loopErrors(errors: any) { for (const key of Object.keys(errors)) { let error = errors[key] if (!error) continue // skip it if (typeof error === 'object') { loopErrors(error) } else { error = String(error) as string valid = error.length ? false : true } } } loopErrors(errors) return valid } const form = { schema, setSchema, data: {} as any, formUpdaters, NODE_TO_UPDATER, NODE_TO_INDEX, NODE_TO_PARENT, NAME_TO_NODE, NODE_TO_NAME, normalizeNode, getParent, getNodeIndex, findPath, getNodeName, getNodeComponent, /** getter */ getFieldState, getFormState, getValues, getErrors, getNode, getFieldCollection, getArrayHelpers, /** setter */ setFormState, setFieldState, setNode, setFieldErrors, setSubmitting, touchAll, setValues, /** handle form */ resetForm, submitForm, /** validate */ validateForm, validateField, isValid, isSchema: (node: any) => { return node === schema }, onFieldInit, rerenderNode, blur, change, } return form }
the_stack
import { writeFileSync, mkdirSync } from 'fs'; import { mean } from 'mathjs'; import PQueue from 'p-queue'; import { logger, TraderWorker as TraderWorkerBase, TraderConfig } from '../../../../src/exports'; import { deepFind } from '../../../_core/helpers'; import { Status } from '@src/_core/exports'; class TraderWorker extends TraderWorkerBase { public fitnesses: Array<{ currentProfit: number; percentTradeWin: number; tradeFreqency: number; total: number; [name: string]: number; }>; public hasRunned: boolean = false; } interface GeneticOpts { silent: boolean; threads: number; generation: number; popSize: number; elitism: number; mutationRate: number; envs: Array<{ start: string; stop: string }>; genes: Gene[]; } interface Gene { key: string; min: number; max: number; integer?: boolean; list?: string[]; } function randomBetween(min: number, max: number, integer?: boolean): number { if (integer === true) return Math.floor(Math.random() * (max - min + 1) + min); return Math.random() * (max - min) + min; } function createTraderWorker( traderConfig: TraderConfig, name: string, stratOpts: TraderConfig['stratOpts'], silent: boolean = true ) { return new TraderWorker( { ...traderConfig, stratOpts, name, }, { silent } ); } function randomIndiv(traderConfig: TraderConfig, opts: GeneticOpts, gen: number, ind: number): TraderWorker { const newOpts = { ...traderConfig.stratOpts }; opts.genes.forEach(g => { if (g.list) { newOpts[g.key] = g.list[randomBetween(0, g.list.length - 1, true)]; } else { newOpts[g.key] = randomBetween(g.min, g.max, g.integer); } }); return createTraderWorker(traderConfig, `${traderConfig.name}-gen${gen}-ind${ind}`, newOpts, opts.silent); } function tournamentSelection(generation: TraderWorker[], participant: number = 4): TraderWorker[] { const checkSameFitness = (indivs: TraderWorker[], fitness: number) => { for (const trader of indivs) { if (getFitness(trader) === fitness) return true; } return false; }; const traders: TraderWorker[] = []; // Select X random participant for (let i = 0; i < participant; i++) { let trader = generation[randomBetween(0, generation.length - 1, true)]; let j = 0; while (j++ < generation.length && checkSameFitness(traders, getFitness(trader))) { trader = generation[randomBetween(0, generation.length - 1, true)]; } traders.push(trader); } // return 2 best traders from tournament return traders.sort((a: any, b: any) => getFitness(b) - getFitness(a)).slice(0, 2); } function getFitness(trader: TraderWorker, key: string = 'total'): number { let sum = 0; for (const fitness of trader.fitnesses) { sum += fitness[key]; } const score = sum / trader.fitnesses.length; // Add 0.5 bonus points to total let bonus = 0; if (key === 'total') { if (trader.fitnesses.filter(f => f.currentProfit > 0.05).length === trader.fitnesses.length) bonus += 0.25; if (trader.fitnesses.filter(f => f.percentTradeWin > 0.6).length === trader.fitnesses.length) bonus += 0.25; } return score + bonus; } function calcFitness( trader: TraderWorker ): { currentProfit: number; percentTradeWin: number; tradeFreqency: number; total: number } { let currentProfit = deepFind(trader, 'trader.portfolio.indicators.currentProfit'); currentProfit = currentProfit === undefined || currentProfit === 0 ? -1 : currentProfit; const tradeHistory = deepFind(trader, 'trader.portfolio.tradeHistory') || []; const percentTradeWin = tradeHistory.length === 0 ? 0 : tradeHistory.filter((trade: any) => trade.orderProfit > 0.001).length / tradeHistory.length; const { start, stop } = trader.config.env.backtest!; const limit = Math.floor(daysBetween(new Date(start), new Date(stop)) / 3); let tradeFreqency = tradeHistory.length / limit === 0 ? 1 : limit; tradeFreqency = tradeFreqency > 1 ? 1 : tradeFreqency; return { currentProfit, percentTradeWin, tradeFreqency, total: currentProfit + percentTradeWin /* + tradeFreqency*/, }; } function mutate( traderConfig: TraderConfig, trader: TraderWorker, opts: GeneticOpts, gen: number, ind: number ): TraderWorker { const oldOpts = trader.config.stratOpts; const newOpts = { ...oldOpts }; opts.genes.forEach(g => { // If gene should mutate if (randomBetween(0, 1) <= opts.mutationRate) { // Mutate value from list if (g.list) { newOpts[g.key] = g.list[randomBetween(0, g.list.length - 1, true)]; } // Mutate numeric value // Mutation move value between 0.5% to 50% else { const direction = randomBetween(0, 1, true) === 0 ? -1 : 1; const range = g.max - g.min; const diff = range * randomBetween(0.005, 0.5) * direction; let newVal = oldOpts[g.key] + diff; newVal = newVal < g.min ? g.min : newVal > g.max ? g.max : newVal; newOpts[g.key] = g.integer ? Math.floor(newVal) : newVal; } } }); return createTraderWorker(traderConfig, `${traderConfig.name}-gen${gen}-ind${ind}`, newOpts, opts.silent); } function crossover(name: string, traderA: TraderWorker, traderB: TraderWorker, opts: GeneticOpts): TraderWorker { // Set gene as traderB const newOpts = { ...traderB.config.stratOpts }; // Take some gene of traderA if mutation prob OK opts.genes.forEach(g => { if (randomBetween(0, 1) < 0.5) { if (g.list) newOpts[g.key] = g.list[randomBetween(0, g.list.length - 1, true)]; else newOpts[g.key] = traderA.config.stratOpts[g.key]; } }); // mutate new indiv (25% chance) if (randomBetween(0, 1) < 0.25) { opts.genes.forEach(g => { if (randomBetween(0, 1) <= opts.mutationRate) { if (g.list) newOpts[g.key] = g.list[randomBetween(0, g.list.length - 1, true)]; else newOpts[g.key] = randomBetween(g.min, g.max, g.integer); } }); } return createTraderWorker(traderA.trader.config, name, newOpts, opts.silent); } function breedNewGeneration( traderConfig: TraderConfig, generation: TraderWorker[], opts: GeneticOpts, gen: number ): TraderWorker[] { // sort by fitness (but keep only different fitness at the top => try to avoid same indiv convergence) generation = generation.sort((a: any, b: any) => getFitness(b) - getFitness(a)); const generationResort: TraderWorker[] = []; let currentIdx = 1; // Sort indiv by fitness (take care of keeping only one version of each individu) generation.forEach((indiv, idx) => { // keep best indiv (first one) if (idx === 0) generationResort.push(indiv); else { // If same fitness push back if (getFitness(indiv) - getFitness(generation[idx - 1]) === 0) generationResort.push(indiv); // Else push front else generationResort.splice(currentIdx++, 0, indiv); } }); generation = generationResort; /* CREATE NEW GENERATION */ const newGeneration: TraderWorker[] = []; // keep best indiv const bestIndivs = generation.slice(0, opts.elitism); for (const bestIndiv of bestIndivs) { // just rename best indiv with new name (will not rerun) bestIndiv.config.name = `${traderConfig.name}-gen${gen}-ind${newGeneration.length}`; // keep best unchanged if (newGeneration.length < opts.elitism) {// Math.floor(opts.elitism / 2) + 1) { newGeneration.push(bestIndiv); } else { // Mutate indiv or keep it unmutate const indiv = randomBetween(0, 1) < 0.5 ? mutate(traderConfig, bestIndiv, opts, gen, newGeneration.length) : bestIndiv; newGeneration.push(indiv); } } // Mutate or breed new indiv while (newGeneration.length < opts.popSize) { // Breed indiv using crossover (66%) if (randomBetween(0, 1) > 0.33) { // other impl: // let t2 = generation[randomBetween(0, generation.length - 1, true)]; /*const t1 = generation[randomBetween(0, Math.floor(gen >= 10 ? generation.length - 1 : generation.length / 2), true)]; let t2 = generation[randomBetween(0, Math.floor(generation.length / 2), true)]; while (getFitness(t2) === getFitness(t1)) t2 = generation[randomBetween(0, generation.length - 1, true)];*/ // Get parent1 and 2 randomly (Make sure parent1 and 2 are different) const [t1, t2] = tournamentSelection(generation, 4); // create children newGeneration.push(crossover(`${traderConfig.name}-gen${gen}-ind${newGeneration.length}`, t1, t2, opts)); } // Breed indiv using mutation (33%) else { const t = generation[randomBetween(0, generation.length - 1, true)]; newGeneration.push(mutate(traderConfig, t, opts, gen, newGeneration.length)); } } return newGeneration; } function makeGeneration(traderConfig: TraderConfig, opts: GeneticOpts, gen: number): TraderWorker[] { let ind = 0; const generation = []; while (ind < opts.popSize) { // Add best indiv (no mutation copy of config) if (ind === 0) { generation.push( createTraderWorker( traderConfig, `${traderConfig.name}-gen${gen}-ind${ind}`, traderConfig.stratOpts, opts.silent ) ); } else { generation.push(randomIndiv(traderConfig, opts, gen, ind)); } ind++; } return generation; } /* tslint:disable */ export class Optimizer { public static runningTraders: TraderWorker[] = []; public static pqueue: PQueue; public static getQueue(concurrency: number): PQueue { if (Optimizer.pqueue) Optimizer.pqueue.clear(); Optimizer.pqueue = new PQueue({ concurrency, autoStart: true }); return Optimizer.pqueue; } public static async genetic(trader: TraderConfig, opts: GeneticOpts) { let gen = 0; const traderConfig = { ...trader }; let generation; while (gen < opts.generation) { try { generation = !generation ? makeGeneration(traderConfig, opts, gen) : breedNewGeneration(traderConfig, generation, opts, gen); // Clear promise queue const pqueue = Optimizer.getQueue(opts.threads); pqueue.clear(); // Add promise to execute inside queue (start executing it) generation.forEach((t: TraderWorker) => { pqueue .add( // Exec trader task () => new Promise(async (resolve, reject) => { try { // avoid resimulating elite individual if (t.hasRunned !== true) { for (let i = 0; i < opts.envs.length; i++) { t.config.env.backtest = opts.envs[i]; t.config.flush = i === 0 ? true : false; await t.init(); // flush only first envs await t.start(); await t.stop(); if (!t.fitnesses) t.fitnesses = []; t.fitnesses.push(calcFitness(t)); } } t.hasRunned = true; resolve(); } catch (error) { // set fitness to -1 on error if (!t.fitnesses) t.fitnesses = []; t.fitnesses.push({ currentProfit: -1, percentTradeWin: -1, tradeFreqency: -1, total: -1 }); if (t.trader.status !== Status.STOP) await t.stop().catch(error => logger.error(error)); reject(error); } }) ) .catch(error => { logger.error(error); // logger.error(new Error(`Problem while running ${t.config.name}`)); }); }); // Execute traders with batchSize = Optimize.threadsSize // Wait end of runnings trader await pqueue.onIdle(); // LOGGING // sort by fitness const g = generation.sort((a: any, b: any) => getFitness(b) - getFitness(a)); logger.info('RESULT GEN ' + gen); const fitnesses = g.map((t: any) => getFitness(t)); logger.info( g .map((t: TraderWorker) => { const total = getFitness(t); const currentProfit = getFitness(t, 'currentProfit'); const percentTradeWin = getFitness(t, 'percentTradeWin'); const tradeFreqency = getFitness(t, 'tradeFreqency'); return `[${ t.config.name }] total: ${total}, currentProfit: ${currentProfit}, percentTradeWin: ${percentTradeWin}, tradeFreqency: ${tradeFreqency} `; }) .join('\n') ); logger.info( 'mean: ' + mean(...fitnesses) + ' min: ' + Math.min(...fitnesses) + ' max: ' + Math.max(...fitnesses) ); // Flush config of the generation mkdirSync(`optimizer/genetic/${traderConfig.name}`, { recursive: true }); writeFileSync( `optimizer/genetic/${traderConfig.name}/gen${gen}`, `${JSON.stringify({ result: { mean: mean(...fitnesses), min: Math.min(...fitnesses), max: Math.max(...fitnesses), }, gen: g.map(t => ({ fitness: t.fitnesses, config: t.config.stratOpts })), })}` ); gen++; } catch (error) { if (generation) generation.forEach(t => t.stop().catch(error => logger.error(error))); logger.error(error); throw Error('Problem during genetic optimization'); } } } } // helper function daysBetween(date1: Date, date2: Date) { // Get 1 day in milliseconds const oneDay = 1000 * 60 * 60 * 24; // Calculate the difference in milliseconds const diffms = date2.getTime() - date1.getTime(); // Convert back to days and return return Math.round(diffms / oneDay); }
the_stack
import { Global } from '../global' import { ID } from './common/id' import * as Util from '../util/dom' import { Adopter } from './common/adopter' import { Registry } from './common/registry' import { Primer } from './primer' import { Transform } from './transform/transform' import type { AttributesMap } from './attributes' import type { ElementMap } from '../types' @Dom.register('Dom') @Dom.mixin(Transform) export class Dom<TElement extends Element = Element> extends Primer<TElement> { /** * Returns the first child of the element. */ firstChild<T extends Dom = Dom>(): T | null { return Dom.adopt<T>(this.node.firstChild) } /** * Returns the last child of the element. */ lastChild<T extends Dom = Dom>(): T | null { return Dom.adopt<T>(this.node.lastChild) } /** * Returns an element on a given position in the element's children array. */ get<T extends Dom = Dom>(index: number): T | null { return Dom.adopt<T>(this.node.childNodes[index]) } /** * Returns an array of elements matching the given selector. */ find<T extends Dom = Dom>(selector: string): T[] { return Dom.find<T>(selector, this.node) } /** * Returns the first element matching the given selector. */ findOne<T extends Dom = Dom>(selector: string): T | null { return Dom.findOne<T>(selector, this.node) } /** * Returns `true` if the element matching the given selector. */ matches(selector: string): boolean { const elem = this.node const node = this.node as any const matcher = elem.matches || node.matchesSelector || node.webkitMatchesSelector || node.msMatchesSelector || node.mozMatchesSelector || node.oMatchesSelector || null return matcher ? matcher.call(elem, selector) : false } /** * Returns an array of children elements. */ children<T extends Dom>(): T[] { const elems: T[] = [] this.node.childNodes.forEach((node) => { elems.push(Dom.adopt<T>(node)) }) return elems } /** * Removes all elements from the element. */ clear() { while (this.node.lastChild) { this.node.lastChild.remove() } return this } /** * Returns an exact copy of the element. */ clone(deep = true) { // write dom data to the dom so the clone can pickup the data this.storeAffix(deep) // clone element and assign new id const Ctor = this.constructor as new (node: Element) => ElementMap<TElement> const cloned = this.node.cloneNode(deep) as Element ID.overwrite(cloned, true) return new Ctor(cloned) } /** * Iterates over all the children of the element. * Deep traversing is also possible by passing `true` as the second argument. * @returns */ eachChild<T extends Dom>( iterator: (this: T, child: T, index: number, children: T[]) => void, deep?: boolean, ) { const children = this.children() for (let i = 0, l = children.length; i < l; i += 1) { const child = children[i] iterator.call(child, child, i, children) if (deep) { child.eachChild(iterator, deep) } } return this } /** * Returns the index of given node. * Returns `-1` when it is not a child. */ indexOf(node: Node): number /** * Returns the index of given element. * Returns `-1` when it is not a child. */ indexOf(element: Dom): number /** * Returns the index of given element or node. * Returns `-1` when it is not a child. */ indexOf(element: Dom | Node): number indexOf(element: Dom | Node): number { const children = Array.prototype.slice.call(this.node.childNodes) as Node[] return children.indexOf(element instanceof Node ? element : element.node) } /** * Returns `true` when the given node is a child of the element. */ has(node: Node): boolean /** * Returns `true` when the given element is a child of the element. */ has(element: Dom): boolean /** * Returns `true` when the given element or node is a child of the element. */ has(element: Dom | Node): boolean has(element: Dom | Node): boolean { return this.indexOf(element) !== -1 } /** * Returns the index of the element in it's parent. * Returns `-1` when the element do not have a parent. */ index(): number { const parent: Dom | null = this.parent() return parent ? parent.indexOf(this) : -1 } contains(node: Node): boolean contains(element: Dom): boolean contains(element: Dom | Node): boolean { return Util.isAncestorOf( this.node, element instanceof Node ? element : element.node, ) } /** * Returns the element's id, generate new id if no id set. */ id(): string /** * Set id of the element. */ id(id: string | null): this id(id?: string | null) { // generate new id if no id set if (typeof id === 'undefined' && !this.node.id) { this.node.id = ID.generate() } // dont't set directly with this.node.id to make `null` work correctly return typeof id === 'undefined' ? this.attr('id') : this.attr('id', id) } /** * Returns the parent element if exist. */ parent<T extends Dom = Dom>(): T | null /** * Iterates over the ancestors and returns the ancestor mathcing the selector. */ parent<T extends Dom = Dom>(selector: string): T | null /** * Iterates over the ancestors and returns the ancestor mathcing the type. */ parent<T extends Dom = Dom>(parentType: Registry.Definition): T | null parent<T extends Dom = Dom>(selector?: string | Registry.Definition): T | null parent<T extends Dom = Dom>( selector?: string | Registry.Definition, ): T | null { if (this.node.parentNode == null) { return null } let parent: T | null = Dom.adopt<T>(this.node.parentNode) if (selector == null) { return parent } // loop trough ancestors if type is given do { if ( typeof selector === 'string' ? parent.matches(selector) : parent instanceof selector ) { return parent } } while ((parent = Dom.adopt<T>(parent.node.parentNode))) return null } parents<T extends Dom = Dom>(): T[] parents<T extends Dom = Dom>(until: null): T[] parents<T extends Dom = Dom>(untilSelector: string): T[] parents<T extends Dom = Dom>(untilType: Registry.Definition): T[] parents<T extends Dom = Dom>(untilNode: Node): T[] parents<T extends Dom = Dom>(untilElement: Dom): T[] parents<T extends Dom = Dom>( until: string | Registry.Definition | Node | Dom | null, ): T[] parents<T extends Dom = Dom>( until?: string | Registry.Definition | Node | Dom | null, ) { if (until == null && this.node instanceof Global.window.SVGElement) { until = Registry.getClass('Svg') // eslint-disable-line } const match = (elem: Dom) => { if (until == null) { return true } if (typeof until === 'string') { return !elem.matches(until) } if (until instanceof Primer) { return elem !== until } if (until instanceof Global.window.Node) { return elem.node !== until } return !(elem instanceof until) } const parents: T[] = [] let parent = this.parent() while ( parent && !parent.isDocument() && !parent.isDocumentFragment() && match(parent) ) { parents.push(parent as T) parent = parent.parent() } return parents } add<T extends Dom>(element: T, index?: number): this add<T extends Node>(node: T, index?: number): this add(selector: string, index?: number): this add<T extends Dom>(element: Adopter.Target<T>, index?: number): this add<T extends Dom>(element: Adopter.Target<T>, index?: number): this { const instance = Adopter.makeInstance<T>(element) // If non-root svg nodes are added we have to remove their namespaces if (instance.isSVGSVGElement()) { const svg = Dom.adopt(instance.node as SVGSVGElement) svg.removeNamespace() } if (index == null) { this.node.appendChild(instance.node) } else if (instance.node !== this.node.childNodes[index]) { this.node.insertBefore(instance.node, this.node.childNodes[index]) } return this } append<T extends Dom>(element: T): this append<T extends Node>(node: T): this append(selector: string): this append<T extends Dom>(element: Adopter.Target<T>): this append<T extends Dom>(element: Adopter.Target<T>): this { return this.add(element) } prepend<T extends Dom>(element: T): this prepend<T extends Node>(node: T): this prepend(selector: string): this prepend<T extends Dom>(element: Adopter.Target<T>): this prepend<T extends Dom>(element: Adopter.Target<T>): this { return this.add(element, 0) } addTo<T extends Dom>(parentElement: T, index?: number): this addTo<T extends Node>(parentNode: T, index?: number): this addTo(selector: string, index?: number): this addTo<T extends Dom>(parent: Adopter.Target<T>, index?: number): this addTo<T extends Dom>(parent: Adopter.Target<T>, index?: number): this { return Adopter.makeInstance<T>(parent).put(this, index) } appendTo<T extends Dom>(parentElement: T): this appendTo<T extends Node>(parentNode: T): this appendTo(selector: string): this appendTo<T extends Dom>(parent: Adopter.Target<T>): this appendTo<T extends Dom>(parent: Adopter.Target<T>): this { return this.addTo(parent) } /** * Adds the given element to the end fo child list or the optional child * position and returns the added element. */ put<T extends Dom>(element: T, index?: number): T /** * Adds the given node to the end fo child list or the optional child position * and returns the added element. */ put<T extends Node>(node: T, index?: number): ElementMap<T> /** * Adds the node matching the selector to end fo child list or the optional * child position and returns the added element. */ put<T extends Dom>(selector: string, index?: number): T put<T extends Dom>(element: Adopter.Target<T>, index?: number): T put<T extends Dom>(element: Adopter.Target<T>, index?: number): T { const instance = Adopter.makeInstance<T>(element) this.add(instance, index) return instance } putIn<T extends Dom>(parentElement: T, index?: number): T putIn<T extends Node>(parentNode: T, index?: number): ElementMap<T> putIn<T extends Dom>(selector: string, index?: number): T putIn<T extends Dom>(parent: Adopter.Target<T>, index?: number): T putIn<T extends Dom>(parent: Adopter.Target<T>, index?: number): T { return Adopter.makeInstance<T>(parent).add(this, index) } replace<T extends Dom>(element: T, index?: number): T replace<T extends Node>(node: T, index?: number): ElementMap<T> replace<T extends Dom>(selector: string, index?: number): T replace<T extends Dom>(element: Adopter.Target<T>, index?: number): T replace<T extends Dom>(element: Adopter.Target<T>): T { const instance = Adopter.makeInstance<T>(element) if (this.node.parentNode) { this.node.parentNode.replaceChild(instance.node, this.node) } return instance } /** * Creates an element of given tagName and appends it to the current element. */ element<T extends keyof SVGElementTagNameMap>( tagName: T, attrs?: AttributesMap<SVGElementTagNameMap[T]> | null, ): ElementMap<T> element<T extends keyof HTMLElementTagNameMap>( tagName: T, attrs?: AttributesMap<HTMLElementTagNameMap[T]> | null, ): ElementMap<T> element<T extends Dom>( tagName: string, attrs?: AttributesMap<any> | null, ): T { const registed = tagName.toLowerCase() !== 'dom' && Registry.isRegisted(tagName) const elem = Adopter.makeInstance<T>(tagName, !registed) if (attrs) { elem.attr(attrs) } return this.put(elem) } remove() { const parent = this.parent() if (parent) { parent.removeChild(this) } return this } removeChild(node: Node): this removeChild(element: Dom): this removeChild(element: Dom | Node) { this.node.removeChild(element instanceof Node ? element : element.node) return this } before<T extends Dom>(element: T): this before<T extends Node>(node: T): this before(tagName: string): this before<T extends Dom>(element: Adopter.Target<T>): this before<T extends Dom>(element: Adopter.Target<T>) { const parent = this.parent() if (parent) { const index = this.index() const instance = Adopter.makeInstance(element) instance.remove() parent.add(instance, index) } return this } after<T extends Dom>(element: T): this after<T extends Node>(node: T): this after(tagName: string): this after<T extends Dom>(element: Adopter.Target<T>): this after<T extends Dom>(element: Adopter.Target<T>) { const parent = this.parent() if (parent) { const index = this.index() const instance = Adopter.makeInstance(element) instance.remove() parent.add(element, index + 1) } return this } insertBefore<T extends Dom>(element: T): this insertBefore<T extends Node>(node: T): this insertBefore<T extends Dom>(element: Adopter.Target<T>): this insertBefore<T extends Dom>(element: Adopter.Target<T>) { Adopter.makeInstance(element).before(this) return this } insertAfter<T extends Dom>(element: T): this insertAfter<T extends Node>(node: T): this insertAfter<T extends Dom>(element: Adopter.Target<T>): this insertAfter<T extends Dom>(element: Adopter.Target<T>) { Adopter.makeInstance(element).after(this) return this } siblings<T extends Dom>(): T[] siblings<T extends Dom>(selfInclued?: boolean): T[] siblings<T extends Dom>(selector: string, selfInclued?: boolean): T[] siblings(selector?: string | boolean, selfInclued?: boolean) { const parent = this.parent() const children = parent ? parent.children() : [] if (selector == null) { return children.filter((child) => child !== this) } if (typeof selector === 'boolean') { return selector ? children : children.filter((child) => child !== this) } return children.filter( (child) => child.matches(selector) && (selfInclued || child !== this), ) } next<T extends Dom>(): T | null next<T extends Dom>(selector: string): T | null next<T extends Dom>(selector?: string): T | null { const parent = this.parent() if (parent) { const index = this.index() const children = parent.children<T>() for (let i = index + 1, l = children.length; i < l; i += 1) { const next = children[i] if (selector == null || next.matches(selector)) { return next } } } return null } nextAll<T extends Dom>(): T[] nextAll<T extends Dom>(selector: string): T[] nextAll<T extends Dom>(selector?: string): T[] { const result: T[] = [] const parent = this.parent() if (parent) { const index = this.index() const children = parent.children<T>() for (let i = index + 1, l = children.length; i < l; i += 1) { const next = children[i] if (selector == null || next.matches(selector)) { result.push(next) } } } return result } prev<T extends Dom>(): T | null prev<T extends Dom>(selector: string): T | null prev<T extends Dom>(selector?: string): T | null { const parent = this.parent() if (parent) { const index = this.index() const children = parent.children<T>() for (let i = index - 1; i >= 0; i -= 1) { const previous = children[i] if (selector == null || previous.matches(selector)) { return previous } } } return null } prevAll<T extends Dom>(): T[] prevAll<T extends Dom>(selector: string): T[] prevAll<T extends Dom>(selector?: string): T[] { const result: T[] = [] const parent = this.parent() if (parent) { const index = this.index() const children = parent.children<T>() for (let i = index - 1; i >= 0; i -= 1) { const previous = children[i] if (selector == null || previous.matches(selector)) { result.push(previous) } } } return result } forward() { const parent = this.parent() if (parent) { const index = this.index() parent.add(this.remove(), index + 1) } return this } backward() { const parent = this.parent() if (parent) { const index = this.index() parent.add(this.remove(), index ? index - 1 : 0) } return this } front() { const parent = this.parent() if (parent) { parent.add(this.remove()) } return this } back() { const parent = this.parent() if (parent) { parent.add(this.remove(), 0) } return this } wrap<T extends Dom>(element: T): this wrap<T extends Node>(node: T): this wrap(selector: string): this wrap<T extends Dom>(element: Adopter.Target<T>): this wrap<T extends Dom>(node: Adopter.Target<T>): this { const parent = this.parent() if (!parent) { return this.addTo<T>(node) } const index = parent.indexOf(this) return parent.put<T>(node, index).put(this) } words(text: string) { this.node.textContent = text return this } /** * Returns the is of the node. */ toString() { return this.id() } html(): string html(outerHTML: boolean): string html( process: (dom: Dom) => boolean | undefined | Dom, outerHTML?: boolean, ): string html(content: string, outerHTML?: boolean): string html( arg1?: boolean | string | ((dom: Dom) => boolean | undefined | Dom), arg2?: boolean, ) { return this.xml(arg1, arg2, Util.namespaces.html) } xml(): string xml(outerXML: boolean): string xml( process: (dom: Dom) => boolean | undefined | Dom, outerXML?: boolean, ): string xml(content: string, outerXML?: boolean, ns?: string): this xml<T extends Dom>(content: string, outerXML: true, ns?: string): T xml( arg1?: boolean | string | ((dom: Dom) => boolean | undefined | Dom), arg2?: boolean, arg3?: string, ): string xml( arg1?: boolean | string | ((dom: Dom) => boolean | undefined | Dom), arg2?: boolean, arg3?: string, ) { const content = typeof arg1 === 'boolean' ? null : arg1 let isOuterXML = typeof arg1 === 'boolean' ? arg1 : arg2 const ns = arg3 // getter // ------ if (content == null || typeof content === 'function') { // The default for exports is, that the outerNode is included isOuterXML = isOuterXML == null ? true : isOuterXML this.storeAffix(true) let current: Dom = this // eslint-disable-line // An export modifier was passed if (typeof content === 'function') { current = Dom.adopt<Dom>(current.node.cloneNode(true)) // If the user wants outerHTML we need to process this node, too if (isOuterXML) { const result = content(current) if (result && typeof result !== 'boolean') { current = result } // The user does not want this node? Well, then he gets nothing if (result === false) { return '' } } // Deep loop through all children and apply modifier current.eachChild((child) => { const result = content(child) const next = result && typeof result !== 'boolean' ? result : child if (result === false) { // If modifier returns false, discard node child.remove() } else if (child !== next) { // If modifier returns new node, use it child.replace(next) } }, true) } const element = current.node as Element return isOuterXML ? element.outerHTML : element.innerHTML } // setter // ------ { // The default for import is, that the current node is not replaced isOuterXML = isOuterXML == null ? false : isOuterXML const wrapper = Util.createNode('wrapper', ns || Util.namespaces.html) const fragment = Global.document.createDocumentFragment() wrapper.innerHTML = content for (let i = wrapper.children.length; i > 0; i -= 1) { fragment.appendChild(wrapper.firstElementChild!) } if (isOuterXML) { const parent = this.parent() this.replace(fragment) return parent } return this.add(fragment) } } } export interface Dom<TElement extends Element = Element> extends Transform<TElement> {} export namespace Dom { export const adopt = Adopter.adopt export function find<T extends Dom = Dom>( selectors: string, parent: Element | Document = Global.document, ) { const elems: T[] = [] parent .querySelectorAll(selectors) .forEach((node) => elems.push(adopt<T>(node))) return elems } export function findOne<T extends Dom = Dom>( selectors: string, parent: Element | Document = Global.document, ) { return adopt<T>(parent.querySelector(selectors)) } }
the_stack
import { IndexTransformer, PrimaryKeyTransformer } from '@aws-amplify/graphql-index-transformer'; import { ModelTransformer } from '@aws-amplify/graphql-model-transformer'; import { BelongsToTransformer, HasManyTransformer, HasOneTransformer, ManyToManyTransformer, } from '@aws-amplify/graphql-relational-transformer'; import { AuthTransformer } from '@aws-amplify/graphql-auth-transformer'; import { GraphQLTransform } from '@aws-amplify/graphql-transformer-core'; import { ResourceConstants } from 'graphql-transformer-common'; import { CloudFormationClient } from '../CloudFormationClient'; import { Output } from 'aws-sdk/clients/cloudformation'; import { GraphQLClient } from '../GraphQLClient'; import { cleanupStackAfterTest, deploy } from '../deployNestedStacks'; import { S3Client } from '../S3Client'; import { S3, CognitoIdentityServiceProvider as CognitoClient } from 'aws-sdk'; import { default as moment } from 'moment'; import { addUserToGroup, authenticateUser, configureAmplify, createGroup, createUserPool, createUserPoolClient, signupUser, } from '../cognitoUtils'; // to deal with bug in cognito-identity-js (global as any).fetch = require('node-fetch'); jest.setTimeout(2000000); const cf = new CloudFormationClient('us-west-2'); const customS3Client = new S3Client('us-west-2'); const awsS3Client = new S3({ region: 'us-west-2' }); const cognitoClient = new CognitoClient({ apiVersion: '2016-04-19', region: 'us-west-2' }); const BUILD_TIMESTAMP = moment().format('YYYYMMDDHHmmss'); const STACK_NAME = `RelationalAuthV2TransformersTest-${BUILD_TIMESTAMP}`; const BUCKET_NAME = `appsync-relational-auth-transformer-test-${BUILD_TIMESTAMP}`; const LOCAL_FS_BUILD_DIR = '/tmp/relational_auth_transformer_tests/'; const S3_ROOT_DIR_KEY = 'deployments'; let GRAPHQL_ENDPOINT = undefined; /** * Client 1 is logged in and is a member of the Admin group. */ let GRAPHQL_CLIENT_1 = undefined; /** * Client 2 is logged in and is a member of the Devs group. */ let GRAPHQL_CLIENT_2 = undefined; /** * Client 3 is logged in and has no group memberships. */ let GRAPHQL_CLIENT_3 = undefined; let USER_POOL_ID = undefined; const USERNAME1 = 'user1@test.com'; const USERNAME2 = 'user2@test.com'; const USERNAME3 = 'user3@test.com'; const TMP_PASSWORD = 'Password123!'; const REAL_PASSWORD = 'Password1234!'; const ADMIN_GROUP_NAME = 'Admin'; const DEVS_GROUP_NAME = 'Devs'; const PARTICIPANT_GROUP_NAME = 'Participant'; const WATCHER_GROUP_NAME = 'Watcher'; function outputValueSelector(key: string) { return (outputs: Output[]) => { const output = outputs.find((o: Output) => o.OutputKey === key); return output ? output.OutputValue : null; }; } beforeAll(async () => { const validSchema = ` type Post @model @auth(rules: [{allow: owner}]) { id: ID! title: String! author: User @belongsTo(fields: ["owner"]) owner: ID! @index(name: "byOwner", sortKeyFields: ["id"]) } type User @model @auth(rules: [{ allow: owner }]) { id: ID! posts: [Post] @hasMany(indexName: "byOwner", fields: ["id"]) } type FieldProtected @model @auth(rules: [{ allow: private }, { allow: owner, operations: [read] }]) { id: ID! owner: String ownerOnly: String @auth(rules: [{allow: owner}]) } type OpenTopLevel @model @auth(rules: [{allow: private}]) { id: ID! name: String owner: String protected: [ConnectionProtected] @hasMany(indexName: "byTopLevel", fields: ["id"]) } type ConnectionProtected @model(queries: null) @auth(rules: [{allow: owner}]) { id: ID! name: String owner: String topLevelID: ID! @index(name: "byTopLevel", sortKeyFields: ["id"]) topLevel: OpenTopLevel @belongsTo(fields: ["topLevelID"]) }`; let out; try { const modelTransformer = new ModelTransformer(); const indexTransformer = new IndexTransformer(); const hasOneTransformer = new HasOneTransformer(); const authTransformer = new AuthTransformer(); const transformer = new GraphQLTransform({ authConfig: { defaultAuthentication: { authenticationType: 'AMAZON_COGNITO_USER_POOLS', }, additionalAuthenticationProviders: [], }, transformers: [ modelTransformer, new PrimaryKeyTransformer(), indexTransformer, hasOneTransformer, new HasManyTransformer(), new BelongsToTransformer(), new ManyToManyTransformer(modelTransformer, indexTransformer, hasOneTransformer, authTransformer), authTransformer, ], }); out = transformer.transform(validSchema); } catch (e) { console.error(`Failed to transform schema: ${e}`); expect(true).toEqual(false); } try { await awsS3Client .createBucket({ Bucket: BUCKET_NAME, }) .promise(); } catch (e) { console.error(`Failed to create S3 bucket: ${e}`); expect(true).toEqual(false); } const userPoolResponse = await createUserPool(cognitoClient, `UserPool${STACK_NAME}`); USER_POOL_ID = userPoolResponse.UserPool.Id; const userPoolClientResponse = await createUserPoolClient(cognitoClient, USER_POOL_ID, `UserPool${STACK_NAME}`); const userPoolClientId = userPoolClientResponse.UserPoolClient.ClientId; try { const finishedStack = await deploy( customS3Client, cf, STACK_NAME, out, { AuthCognitoUserPoolId: USER_POOL_ID }, LOCAL_FS_BUILD_DIR, BUCKET_NAME, S3_ROOT_DIR_KEY, BUILD_TIMESTAMP, ); expect(finishedStack).toBeDefined(); const getApiEndpoint = outputValueSelector(ResourceConstants.OUTPUTS.GraphQLAPIEndpointOutput); const getApiKey = outputValueSelector(ResourceConstants.OUTPUTS.GraphQLAPIApiKeyOutput); const apiKey = getApiKey(finishedStack.Outputs); GRAPHQL_ENDPOINT = getApiEndpoint(finishedStack.Outputs); expect(apiKey).not.toBeTruthy(); // Verify we have all the details expect(GRAPHQL_ENDPOINT).toBeTruthy(); expect(USER_POOL_ID).toBeTruthy(); expect(userPoolClientId).toBeTruthy(); // Configure Amplify, create users, and sign in. configureAmplify(USER_POOL_ID, userPoolClientId); await signupUser(USER_POOL_ID, USERNAME1, TMP_PASSWORD); await signupUser(USER_POOL_ID, USERNAME2, TMP_PASSWORD); await signupUser(USER_POOL_ID, USERNAME3, TMP_PASSWORD); await createGroup(USER_POOL_ID, ADMIN_GROUP_NAME); await createGroup(USER_POOL_ID, PARTICIPANT_GROUP_NAME); await createGroup(USER_POOL_ID, WATCHER_GROUP_NAME); await createGroup(USER_POOL_ID, DEVS_GROUP_NAME); await addUserToGroup(ADMIN_GROUP_NAME, USERNAME1, USER_POOL_ID); await addUserToGroup(PARTICIPANT_GROUP_NAME, USERNAME1, USER_POOL_ID); await addUserToGroup(WATCHER_GROUP_NAME, USERNAME1, USER_POOL_ID); await addUserToGroup(DEVS_GROUP_NAME, USERNAME2, USER_POOL_ID); const authResAfterGroup: any = await authenticateUser(USERNAME1, TMP_PASSWORD, REAL_PASSWORD); const idToken = authResAfterGroup.getIdToken().getJwtToken(); GRAPHQL_CLIENT_1 = new GraphQLClient(GRAPHQL_ENDPOINT, { Authorization: idToken }); const authRes2AfterGroup: any = await authenticateUser(USERNAME2, TMP_PASSWORD, REAL_PASSWORD); const idToken2 = authRes2AfterGroup.getIdToken().getJwtToken(); GRAPHQL_CLIENT_2 = new GraphQLClient(GRAPHQL_ENDPOINT, { Authorization: idToken2 }); const authRes3: any = await authenticateUser(USERNAME3, TMP_PASSWORD, REAL_PASSWORD); const idToken3 = authRes3.getIdToken().getJwtToken(); GRAPHQL_CLIENT_3 = new GraphQLClient(GRAPHQL_ENDPOINT, { Authorization: idToken3 }); // Wait for any propagation to avoid random // "The security token included in the request is invalid" errors await new Promise<void>(res => setTimeout(() => res(), 5000)); } catch (e) { console.error(e); expect(true).toEqual(false); } }); afterAll(async () => { await cleanupStackAfterTest(BUCKET_NAME, STACK_NAME, cf, { cognitoClient, userPoolId: USER_POOL_ID }); }); /** * Test queries below */ test('Test creating a post and immediately view it via the User.posts connection.', async () => { const createUser1 = await GRAPHQL_CLIENT_1.query( `mutation { createUser(input: { id: "user1@test.com" }) { id } }`, {}, ); expect(createUser1.data.createUser.id).toEqual('user1@test.com'); const response = await GRAPHQL_CLIENT_1.query( `mutation { createPost(input: { title: "Hello, World!", owner: "user1@test.com" }) { id title owner } }`, {}, ); expect(response.data.createPost.id).toBeDefined(); expect(response.data.createPost.title).toEqual('Hello, World!'); expect(response.data.createPost.owner).toBeDefined(); const getResponse = await GRAPHQL_CLIENT_1.query( `query { getUser(id: "user1@test.com") { posts { items { id title owner author { id } } } } }`, {}, ); expect(getResponse.data.getUser.posts.items[0].id).toBeDefined(); expect(getResponse.data.getUser.posts.items[0].title).toEqual('Hello, World!'); expect(getResponse.data.getUser.posts.items[0].owner).toEqual('user1@test.com'); expect(getResponse.data.getUser.posts.items[0].author.id).toEqual('user1@test.com'); }); test('Testing reading an owner protected field as a non owner', async () => { const response1 = await GRAPHQL_CLIENT_1.query( `mutation { createFieldProtected(input: { id: "1", owner: "${USERNAME1}", ownerOnly: "owner-protected" }) { id owner ownerOnly } }`, {}, ); expect(response1.data.createFieldProtected.id).toEqual('1'); expect(response1.data.createFieldProtected.owner).toEqual(USERNAME1); expect(response1.data.createFieldProtected.ownerOnly).toEqual(null); const response2 = await GRAPHQL_CLIENT_2.query( `query { getFieldProtected(id: "1") { id owner ownerOnly } }`, {}, ); expect(response2.data.getFieldProtected.ownerOnly).toBeNull(); expect(response2.errors).toHaveLength(1); const response3 = await GRAPHQL_CLIENT_1.query( `query { getFieldProtected(id: "1") { id owner ownerOnly } }`, {}, ); expect(response3.data.getFieldProtected.id).toEqual('1'); expect(response3.data.getFieldProtected.owner).toEqual(USERNAME1); expect(response3.data.getFieldProtected.ownerOnly).toEqual('owner-protected'); }); test('Test that @connection resolvers respect @model read operations.', async () => { const response1 = await GRAPHQL_CLIENT_1.query( `mutation { createOpenTopLevel(input: { id: "1", owner: "${USERNAME1}", name: "open" }) { id owner name } }`, {}, ); expect(response1.data.createOpenTopLevel.id).toEqual('1'); expect(response1.data.createOpenTopLevel.owner).toEqual(USERNAME1); expect(response1.data.createOpenTopLevel.name).toEqual('open'); const response2 = await GRAPHQL_CLIENT_2.query( `mutation { createConnectionProtected(input: { id: "1", owner: "${USERNAME2}", name: "closed", topLevelID: "1" }) { id owner name topLevelID } }`, {}, ); expect(response2.data.createConnectionProtected.id).toEqual('1'); expect(response2.data.createConnectionProtected.owner).toEqual(USERNAME2); expect(response2.data.createConnectionProtected.name).toEqual('closed'); const response3 = await GRAPHQL_CLIENT_1.query( `query { getOpenTopLevel(id: "1") { id protected { items { id name owner } } } }`, {}, ); expect(response3.data.getOpenTopLevel.id).toEqual('1'); expect(response3.data.getOpenTopLevel.protected.items).toHaveLength(0); const response4 = await GRAPHQL_CLIENT_2.query( `query { getOpenTopLevel(id: "1") { id protected { items { id name owner } } } }`, {}, ); expect(response4.data.getOpenTopLevel.id).toEqual('1'); expect(response4.data.getOpenTopLevel.protected.items).toHaveLength(1); }); // Per field auth in mutations test('Test that owners cannot set the field of a FieldProtected object unless authorized.', async () => { const response1 = await GRAPHQL_CLIENT_1.query( `mutation { createFieldProtected(input: { id: "2", owner: "${USERNAME1}", ownerOnly: "owner-protected" }) { id owner ownerOnly } }`, {}, ); expect(response1.data.createFieldProtected.id).toEqual('2'); expect(response1.data.createFieldProtected.owner).toEqual(USERNAME1); expect(response1.data.createFieldProtected.ownerOnly).toEqual(null); const response2 = await GRAPHQL_CLIENT_1.query( `mutation { createFieldProtected(input: { id: "3", owner: "${USERNAME2}", ownerOnly: "owner-protected" }) { id owner ownerOnly } }`, {}, ); expect(response2.data.createFieldProtected).toBeNull(); expect(response2.errors).toHaveLength(1); // The owner auth rule is on ownerOnly. Omitting the "ownerOnly" field will // not trigger the @auth check const response3 = await GRAPHQL_CLIENT_1.query( `mutation { createFieldProtected(input: { id: "4", owner: "${USERNAME2}" }) { id owner ownerOnly } }`, {}, ); expect(response3.data.createFieldProtected.id).toEqual('4'); expect(response3.data.createFieldProtected.owner).toEqual(USERNAME2); // The length is one because the 'ownerOnly' field is protected on reads. // Since the caller is not the owner this will throw after the mutation succeeds // and return partial results. expect(response3.errors).toHaveLength(1); }); test('Test that owners cannot update the field of a FieldProtected object unless authorized.', async () => { const response1 = await GRAPHQL_CLIENT_1.query( `mutation { createFieldProtected(input: { owner: "${USERNAME1}", ownerOnly: "owner-protected" }) { id owner ownerOnly } }`, {}, ); expect(response1.data.createFieldProtected.id).not.toBeNull(); expect(response1.data.createFieldProtected.owner).toEqual(USERNAME1); expect(response1.data.createFieldProtected.ownerOnly).toEqual(null); const response2 = await GRAPHQL_CLIENT_2.query( `mutation { updateFieldProtected(input: { id: "${response1.data.createFieldProtected.id}", ownerOnly: "owner2-protected" }) { id owner ownerOnly } }`, {}, ); expect(response2.data.updateFieldProtected).toBeNull(); expect(response2.errors).toHaveLength(1); // The auth rule is on ownerOnly. Omitting the "ownerOnly" field will // not trigger the @auth check const response3 = await GRAPHQL_CLIENT_1.query( `mutation { updateFieldProtected(input: { id: "${response1.data.createFieldProtected.id}", ownerOnly: "updated" }) { id owner ownerOnly } }`, {}, ); const resposne3ID = response3.data.updateFieldProtected.id; expect(resposne3ID).toEqual(response1.data.createFieldProtected.id); expect(response3.data.updateFieldProtected.owner).toEqual(USERNAME1); const response3query = await GRAPHQL_CLIENT_1.query(`query getMake1 { getFieldProtected(id: "${resposne3ID}"){ id owner ownerOnly } }`); expect(response3query.data.getFieldProtected.ownerOnly).toEqual('updated'); // This request should succeed since we are not updating the protected field. const response4 = await GRAPHQL_CLIENT_3.query( `mutation { updateFieldProtected(input: { id: "${response1.data.createFieldProtected.id}", owner: "${USERNAME3}" }) { id owner ownerOnly } }`, {}, ); expect(response4.data.updateFieldProtected.id).toEqual(response1.data.createFieldProtected.id); expect(response4.data.updateFieldProtected.owner).toEqual(USERNAME3); expect(response4.data.updateFieldProtected.ownerOnly).toBeNull(); const response5 = await GRAPHQL_CLIENT_3.query( `query { getFieldProtected( id: "${response1.data.createFieldProtected.id}" ) { id owner ownerOnly } }`, {}, ); expect(response5.data.getFieldProtected.ownerOnly).toEqual('updated'); });
the_stack
import { ControlsType } from "../enums/ControlsType"; import { DisplayMode } from "../enums/DisplayMode"; import { Material } from "../enums/Material"; import { Orientation } from "../enums/Orientation"; import { Units } from "../enums/Units"; import { AlCamera, AlEdge } from "../interfaces"; import { AlAngle } from "../interfaces/AlAngle"; import { AlNode } from "../interfaces/AlNode"; export interface NullAction { type: TypeKeys.NULL; } // Keep this type updated with each known action export type ActionTypes = | NullAction | AppClearAnglesAction | AppClearEdgesAction | AppClearNodesAction | AppDeleteAngleAction | AppDeleteEdgeAction | AppDeleteNodeAction | AppSelectAngleAction | AppSelectEdgeAction | AppSelectNodeAction | AppSetAngleAction | AppSetBoundingBoxEnabledAction | AppSetCameraAction | AppSetControlsEnabledAction | AppSetControlsTypeAction | AppSetDisplayModeAction | AppSetEdgeAction | AppSetGraphEnabledAction | AppSetMaterialAction | AppSetNodeAction | AppSetOrientationAction | AppSetSceneDistanceAction | AppSetSlicesIndexAction | AppSetSlicesMaxIndexAction | AppSetSrcAction | AppSetSrcLoadedAction | AppSetUnitsAction | AppSetVolumeStepsAction | AppSetVolumeWindowCenterAction | AppSetVolumeWindowWidthAction; export enum TypeKeys { NULL = "NULL", ERROR = "ERROR", APP_CLEAR_ANGLES = "APP_LOAD_ANGLES", APP_CLEAR_EDGES = "APP_LOAD_EDGES", APP_CLEAR_NODES = "APP_LOAD_NODES", APP_DELETE_ANGLE = "APP_DELETE_ANGLE", APP_DELETE_EDGE = "APP_DELETE_EDGE", APP_DELETE_NODE = "APP_DELETE_NODE", APP_SELECT_ANGLE = "APP_SELECT_ANGLE", APP_SELECT_EDGE = "APP_SELECT_EDGE", APP_SELECT_NODE = "APP_SELECT_NODE", APP_SET_ANGLE = "APP_SET_ANGLE", APP_SET_BOUNDINGBOX_ENABLED = "APP_SET_BOUNDINGBOX_ENABLED", APP_SET_CAMERA = "APP_SET_CAMERA", APP_SET_CONTROLS_ENABLED = "APP_SET_CONTROLS_ENABLED", APP_SET_CONTROLS_TYPE = "APP_SET_CONTROLS_TYPE", APP_SET_DISPLAY_MODE = "APP_SET_DISPLAY_MODE", APP_SET_EDGE = "APP_SET_EDGE", APP_SET_MATERIAL = "APP_SET_MATERIAL", APP_SET_NODE = "APP_SET_NODE", APP_SET_NODES_ENABLED = "APP_SET_NODES_ENABLED", APP_SET_ORIENTATION = "APP_SET_ORIENTATION", APP_SET_SCENE_DISTANCE = "APP_SET_SCENE_DISTANCE", APP_SET_SLICES_INDEX = "APP_SET_SLICES_INDEX", APP_SET_SLICES_MAX_INDEX = "APP_SET_SLICES_MAX_INDEX", APP_SET_SLICES_WINDOW_CENTER = "APP_SET_SLICES_WINDOW_CENTER", APP_SET_SLICES_WINDOW_WIDTH = "APP_SET_SLICES_WINDOW_WIDTH", APP_SET_SRC = "APP_SET_SRC", APP_SET_SRC_LOADED = "APP_SET_SRC_LOADED", APP_SET_UNITS = "APP_SET_UNITS", APP_SET_VOLUME_STEPS = "APP_SET_VOLUME_STEPS", APP_SET_VOLUME_WINDOW_CENTER = "APP_SET_VOLUME_WINDOW_CENTER", APP_SET_VOLUME_WINDOW_WIDTH = "APP_SET_VOLUME_WINDOW_WIDTH" } //#region src export interface AppSetSrcAction { type: TypeKeys.APP_SET_SRC; payload: [string, DisplayMode | undefined]; } export const appSetSrc = (payload: [string, DisplayMode | undefined]) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_SRC, payload }); }; export interface AppSetSrcLoadedAction { type: TypeKeys.APP_SET_SRC_LOADED; payload: boolean; } export const appSetSrcLoaded = (payload: boolean) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_SRC_LOADED, payload }); }; //#endregion //#region nodes export interface AppSetNodeAction { type: TypeKeys.APP_SET_NODE; payload: [string, AlNode]; } export const appSetNode = (payload: [string, AlNode]) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_NODE, payload }); }; export interface AppDeleteNodeAction { type: TypeKeys.APP_DELETE_NODE; payload: string; } export const appDeleteNode = (payload: string) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_DELETE_NODE, payload }); }; export interface AppSelectNodeAction { type: TypeKeys.APP_SELECT_NODE; payload: string; } export const appSelectNode = (payload: string | null) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SELECT_NODE, payload }); }; export interface AppClearNodesAction { type: TypeKeys.APP_CLEAR_NODES; payload: void; } export const appClearNodes = (payload: void) => async (dispatch, _getState) => { return dispatch({ type: TypeKeys.APP_CLEAR_NODES, payload }); }; //#endregion //#region edges export interface AppSetEdgeAction { type: TypeKeys.APP_SET_EDGE; payload: [string, AlEdge]; } export const appSetEdge = (payload: [string, AlEdge]) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_EDGE, payload }); }; export interface AppDeleteEdgeAction { type: TypeKeys.APP_DELETE_EDGE; payload: string; } export const appDeleteEdge = (payload: string) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_DELETE_EDGE, payload }); }; export interface AppSelectEdgeAction { type: TypeKeys.APP_SELECT_EDGE; payload: string; } export const appSelectEdge = (payload: string | null) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SELECT_EDGE, payload }); }; export interface AppClearEdgesAction { type: TypeKeys.APP_CLEAR_EDGES; payload: void; } export const appClearEdges = (payload: void) => async (dispatch, _getState) => { return dispatch({ type: TypeKeys.APP_CLEAR_EDGES, payload }); }; //#endregion //#region angles export interface AppSetAngleAction { type: TypeKeys.APP_SET_ANGLE; payload: [string, AlAngle]; } export const appSetAngle = (payload: [string, AlAngle]) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_ANGLE, payload }); }; export interface AppDeleteAngleAction { type: TypeKeys.APP_DELETE_ANGLE; payload: string; } export const appDeleteAngle = (payload: string) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_DELETE_ANGLE, payload }); }; export interface AppSelectAngleAction { type: TypeKeys.APP_SELECT_ANGLE; payload: string; } export const appSelectAngle = (payload: string | null) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SELECT_ANGLE, payload }); }; export interface AppClearAnglesAction { type: TypeKeys.APP_CLEAR_ANGLES; payload: void; } export const appClearAngles = (payload: void) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_CLEAR_ANGLES, payload }); }; //#endregion //#region control panel export interface AppSetBoundingBoxEnabledAction { type: TypeKeys.APP_SET_BOUNDINGBOX_ENABLED; payload: boolean; } export const appSetBoundingBoxEnabled = (payload: boolean) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_BOUNDINGBOX_ENABLED, payload }); }; export interface AppSetDisplayModeAction { type: TypeKeys.APP_SET_DISPLAY_MODE; payload: DisplayMode; } export const appSetDisplayMode = (payload: DisplayMode) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_DISPLAY_MODE, payload }); }; export interface AppSetGraphEnabledAction { type: TypeKeys.APP_SET_NODES_ENABLED; payload: boolean; } export const appSetGraphEnabled = (payload: boolean) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_NODES_ENABLED, payload }); }; export interface AppSetMaterialAction { type: TypeKeys.APP_SET_MATERIAL; payload: Material; } export const appSetMaterial = (payload: Material) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_MATERIAL, payload }); }; export interface AppSetUnitsAction { type: TypeKeys.APP_SET_UNITS; payload: Units; } export const appSetUnits = (payload: Units) => async (dispatch, _getState) => { return dispatch({ type: TypeKeys.APP_SET_UNITS, payload }); }; //#endregion //#region volumes export interface AppSetOrientationAction { type: TypeKeys.APP_SET_ORIENTATION; payload: Orientation; } export const appSetOrientation = (payload: Orientation) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_ORIENTATION, payload }); }; export interface AppSetSceneDistanceAction { type: TypeKeys.APP_SET_SCENE_DISTANCE; payload: number; } export const appSetSceneDistance = (payload: number) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_SCENE_DISTANCE, payload }); }; export interface AppSetSlicesIndexAction { type: TypeKeys.APP_SET_SLICES_INDEX; payload: number; } export const appSetSlicesIndex = (payload: number) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_SLICES_INDEX, payload }); }; export interface AppSetSlicesMaxIndexAction { type: TypeKeys.APP_SET_SLICES_MAX_INDEX; payload: number; } export const appSetSlicesMaxIndex = (payload: number) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_SLICES_MAX_INDEX, payload }); }; export interface AppSetVolumeStepsAction { type: TypeKeys.APP_SET_VOLUME_STEPS; payload: number; } export const appSetVolumeSteps = (payload: number) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_VOLUME_STEPS, payload }); }; export interface AppSetVolumeWindowWidthAction { type: TypeKeys.APP_SET_VOLUME_WINDOW_WIDTH; payload: number; } export const appSetVolumeWindowWidth = (payload: number) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_VOLUME_WINDOW_WIDTH, payload }); }; export interface AppSetVolumeWindowCenterAction { type: TypeKeys.APP_SET_VOLUME_WINDOW_CENTER; payload: number; } export const appSetVolumeWindowCenter = (payload: number) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_VOLUME_WINDOW_CENTER, payload }); }; //#endregion //#region camera export interface AppSetCameraAction { type: TypeKeys.APP_SET_CAMERA; payload: AlCamera; } export const appSetCamera = (payload: AlCamera) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_CAMERA, payload }); }; export interface AppSetControlsEnabledAction { type: TypeKeys.APP_SET_CONTROLS_ENABLED; payload: boolean; } export const appSetControlsEnabled = (payload: boolean) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_CONTROLS_ENABLED, payload }); }; export interface AppSetControlsTypeAction { type: TypeKeys.APP_SET_CONTROLS_TYPE; payload: ControlsType; } export const appSetControlsType = (payload: ControlsType) => async ( dispatch, _getState ) => { return dispatch({ type: TypeKeys.APP_SET_CONTROLS_TYPE, payload }); }; //#endregion
the_stack
module core { /** * 声音控制 * 本类为游戏音乐及音效管理类,并依赖于音效配置表的配置,使用本类先应先熟悉音效配置表的相关字段说明。 */ export class SoundUtils { /** * 声音字典 */ private m_sounds: Dictionary<egret.Sound>; /** * 加载列表 */ private m_loadings: Dictionary<any> /** * 音频字典 */ private m_channels: Dictionary<egret.SoundChannel>; /** * 当前播放音效字典 */ private m_curEffect: Dictionary<egret.Sound>; /** * 当前播放背景音乐字典 */ private m_curBGM: Dictionary<egret.Sound>; /** * 回调字典 */ private m_callbacks: Dictionary<() => void>; /** * 播放通道 同一通道音乐覆盖 */ private m_playChannel: Dictionary<egret.Sound>; private m_BGMVolume: number = 1; private m_effectVolume: number = 1; /** * 音效开关 */ private m_effectEnable: boolean = false; /** * 音乐开关 */ private m_musicEnable: boolean = false; private static s_instance: SoundUtils; public static getInstance(): SoundUtils { if (SoundUtils.s_instance == null) { SoundUtils.s_instance = new SoundUtils(); } return SoundUtils.s_instance; } public constructor() { this.m_sounds = new Dictionary<egret.Sound>(); this.m_loadings = new Dictionary<any>(); this.m_channels = new Dictionary<egret.SoundChannel>(); this.m_curBGM = new Dictionary<egret.Sound>(); this.m_curEffect = new Dictionary<egret.Sound>(); this.m_callbacks = new Dictionary<() => void>(); this.m_playChannel = new Dictionary<egret.Sound>(); let value: string = egret.localStorage.getItem('soundEffect'); if (value) { this.m_effectVolume = parseInt(value); } value = egret.localStorage.getItem('soundBGM'); if (value) { this.m_BGMVolume = parseInt(value); } } /** * 音乐播放完成 * @param {egret.Event} event */ private onPlayComplete(event: egret.Event): void { let channel: egret.SoundChannel = event.currentTarget; let callback: () => void = this.m_callbacks.get(channel.hashCode); if (callback) { callback(); } channel['count']++; if (channel['count'] >= channel['maxCount']) { this.stop(channel['owner']); } } /** * 停止音乐通道播放 * @param {number} coverChannel */ private stopSound(coverChannel: number): void { let sound: egret.Sound = this.m_playChannel.get(coverChannel); if (sound) { this.stop(sound); } } /** * 停止播放音乐 * @param {egret.Sound} sound */ private stop(sound: egret.Sound): void { let channel: egret.SoundChannel = this.m_channels.get(sound.hashCode); if (channel) { egret.setTimeout(() => { if (channel) { channel.stop(); if (channel.hasEventListener(egret.Event.SOUND_COMPLETE)) { channel.removeEventListener(egret.Event.SOUND_COMPLETE, this.onPlayComplete, this); } this.m_callbacks.remove(channel.hashCode); this.m_channels.remove(sound.hashCode); } }, this, 500); } this.m_playChannel.remove(sound['cover']); if (sound.type == egret.Sound.EFFECT) { this.m_curEffect.remove(this.m_sounds.getKeyByValue(sound)); } else { this.m_curBGM.remove(this.m_sounds.getKeyByValue(sound)); } } /** * 停止所有音乐 */ private stopAllMusic(): void { let sounds: egret.Sound[] = this.m_curBGM.values; for (let i: number = 0, iLen: number = sounds.length; i < iLen; i++) { let sound: egret.Sound = sounds[i]; if (sound) { this.stop(sound); } } } /** * 停止所有音效 */ private stopAllEffect(): void { let sounds: egret.Sound[] = this.m_curEffect.values; for (let i: number = 0, iLen: number = sounds.length; i < iLen; i++) { let sound: egret.Sound = sounds[i]; if (sound) { this.stop(sound); } } } /** * 播放音乐或音效 * @param {number} id 声音配置ID * @param {number=1} loop 播放次数,默认值是 1,循环播放。 大于 0 为播放次数,如 1 为播放 1 次;小于等于 0,为循环播放。 * @param {()=>void} onPlayComplete? 播放完毕回调 */ public playSound(id: number, loop: number = 1, onPlayComplete?: () => void): void { let config: SoundConfig = Config.getConfig(SoundConfig).get(id); if (config) { if (this.isSoundPlaying(id) && config.soundType == 1) { return; } this.stopSound(config.coverKey); } else { egret.warn(`ID为${id}的音效在SoundConfig中不存在`); return; } if ((config.soundType == 0 && !this.m_effectEnable) || (config.soundType != 0 && !this.m_musicEnable)) { return; } let sound: egret.Sound = this.m_sounds.get(id); if (!sound) { sound = RES.getRes(config.soundName); if (sound) { sound.type = config.soundType == 0 ? egret.Sound.EFFECT : egret.Sound.MUSIC; this.m_sounds.add(id, sound); } else { let data = { config: config, loop: loop, callback: onPlayComplete } if (config.soundType != 0) { this.m_loadings.add(id, data); } RES.getResAsync(config.soundName, function (): RES.GetResAsyncCallback { return function (value, key): void { egret.log(`名称为${config.soundName}的音效资源加载完成`); if (this.m_loadings.remove(id)) { this.playSound(id, loop, onPlayComplete); } } } (), this); } } if (sound) { if (sound.type == egret.Sound.EFFECT) { this.m_curEffect.add(id, sound); } else { this.m_curBGM.add(id, sound); } this.m_playChannel.add(config.coverKey, sound); sound['cover'] = config.coverKey; let channel: egret.SoundChannel = sound.play(0, loop); if (channel) { channel['owner'] = sound; channel['maxCount'] = loop > 0 ? loop : Number.MAX_VALUE; channel['count'] = 0; if (sound.type == egret.Sound.EFFECT) { channel.volume = this.m_effectVolume; } else { channel.volume = this.m_BGMVolume; } if (onPlayComplete) { this.m_callbacks.add(channel.hashCode, onPlayComplete); } channel.addEventListener(egret.Event.SOUND_COMPLETE, this.onPlayComplete, this); this.m_channels.add(sound.hashCode, channel); } } else { egret.warn(`名称为${config.soundName}的音效资源不存在`); return; } } /** * 停止播放音乐 * @param id 声音ID */ public stopSoundByID(id: number): void { let sound: egret.Sound = this.m_sounds.get(id); if (sound) { this.stop(sound); this.m_loadings.remove(id); } } /** * 停止播放所有音乐及音效 */ public stopAllSound(): void { //停止所有音效 this.stopAllEffect(); //停止所有音乐 this.stopAllMusic(); if (this.m_loadings) { this.m_loadings.clear(); } } /** * 设置背景音乐音量 * @param {number} volume */ public setBGMValume(volume: number): void { egret.localStorage.setItem('soundBGM', volume.toString()); this.m_BGMVolume = volume; let sounds: egret.Sound[] = this.m_curBGM.values; for (let i: number = 0, iLen: number = sounds.length; i < iLen; i++) { let sound: egret.Sound = sounds[i]; if (sound) { let channel: egret.SoundChannel = this.m_channels.get(sound.hashCode); if (channel) { channel.volume = this.m_musicEnable ? volume : 0; } } } } /** * 得到背景音乐音量 * @return number */ public getBGMValue(): number { return this.m_BGMVolume; } /** * 设置音效音量 * @param {number} volume */ public setEffectValume(volume: number): void { egret.localStorage.setItem('soundEffect', volume.toString()); this.m_effectVolume = volume; let sounds: egret.Sound[] = this.m_curEffect.values; for (let i: number = 0, iLen: number = sounds.length; i < iLen; i++) { let sound: egret.Sound = sounds[i]; if (sound) { let channel: egret.SoundChannel = this.m_channels.get(sound.hashCode); if (channel) { channel.volume = this.m_effectEnable ? volume : 0; } } } } /** * 得到音效音量 * @returns number */ public getEffectValue(): number { return this.m_effectVolume; } /** * 开关音效 * @param {boolean} bool */ public setEffectEnable(bool: boolean): void { this.m_effectEnable = bool; this.setEffectValume(this.m_effectVolume); } /** * 开关音乐 * @param {boolean} bool */ public setMusicEnable(bool: boolean): void { this.m_musicEnable = bool; this.setBGMValume(this.m_BGMVolume); } /** * 检测ID是否播放 * @param {number} id */ public isSoundPlaying(id: number): boolean { let sound: egret.Sound = this.m_sounds.get(id); if (sound) { let channel: egret.SoundChannel = this.m_channels.get(sound.hashCode); if (channel) { return true; } } return false; } } }
the_stack
import { createBoardInfo } from "./boardinfobase"; import { IBoard } from "../boards"; import { hvqfs } from "../fs/hvqfs"; import { BoardType } from "../types"; // Chilly Waters - (U) ROM const MP3_CHILLY = createBoardInfo("MP3_CHILLY", { name: "Chilly Waters", canOverwrite: true, boardDefFile: 570, bgDir: 3, pauseBgDir: 5, str: { boardSelect: [ [21, 30], [26, 17], ], boardGreeting: [24, 12], boardGreetingDuel: [24, 0], boardNames: [ [49, 39], [83, 43], // [93, 23], This is the songs list, so we can leave it ], }, img: { boardSelectImg: 72, splashLogoImg: 22, splashLogoTextImg: 28, pauseLogoImg: 125, gateImg: 354, // dir 19 }, sceneIndex: 0x48, mainfsEventFile: [19, 618], mainfsBoardFile: [19, 619], eventASMStart: 0x14AF0, // 0x00330000 // ballpark, but this is wrong -> // 0x0031E814, // is this 0x8011A490 ? eventASMEnd: 0x16BEC, // 0x003320FC, 0x8011C58C // spaceEventsStartAddr: 0x0011E718, // spaceEventsStartOffset: 0x00334288, // spaceEventsEndOffset: 0x18F18, // 0x00334428, spaceEventTables: [ { upper: 0x26A8, lower: 0x26B0 }, // 0x80108048, 0x80108050, table 0x8011E2CC { upper: 0x26B4, lower: 0x26BC }, // 0x80108054, 0x8010805C, table 0x8011E718 // { upper: 0x31DBD0, lower: 0x31DBD8 }, // 0x80108060, 0x80108068 // This is not a table actually, it is related to the happening spaces { upper: 0x26CC, lower: 0x26D4 }, // 0x8010806C, 0x80108074, table 0x8011E344 // A table, but if we remove it Poison Shrooms break and probably other things // { upper: 0x31DBE8, lower: 0x31DBF0 }, // 0x80108078, 0x80108080, table 0x8011E4D8 ], starSpaceArrOffset: [0x17910, 0x17980], // [0x00332E20, 0x00332E90] // 0x8011D2B0, 0x8011D320 starSpaceCount: 8, toadSpaceArrOffset: [0x17920, 0x179DC], // [0x00332E30, 0x00332EEC] // 0x8011D2C0, 0x8011D37C bankArrOffset: [0x17B64], // [0x00333074] // 0x8011D504 bankCoinArrOffset: [0x17A00], // [0x00332F10] // 0x8011D3A0 bankCount: 2, itemShopArrOffset: [0x17B68], // [0x00333078] // 0x8011D508 itemShopCount: 2, booArrOffset: [0x179FC], // [0x00332F0C] // 0x8011D39C booCount: 1, gateNeighborsOffset: [0x179D4], // [0x00332EE4] // 0x8011D374 gateArrOffset: [0x17A7C], // [0x00332F8C] // 0x8011D41C gateCount: 2, arrowRotStartOffset: 0x2398, // 0x0031D8A8 // 0x80107D38 arrowRotEndOffset: 0x2440, // 0x0031D950 // 0x80107DDC audioIndexOffset: 0x2682, // 0x0031DB92 // 0x80108022 onLoad: function(board: IBoard) { board.otherbg.largescene = hvqfs.readBackground(MP3_CHILLY.bgDir + 1).src; }, }); // Deep Bloober Sea - (U) ROM const MP3_BLOOBER = createBoardInfo("MP3_BLOOBER", { name: "Deep Bloober Sea", canOverwrite: true, boardDefFile: 571, bgDir: 6, pauseBgDir: 8, str: { boardSelect: [ [21, 31], [26, 18], ], boardGreeting: [24, 13], boardGreetingDuel: [24, 2], boardNames: [ [49, 40], [83, 44], // [93, 24], This is the songs list, so we can leave it ], }, img: { boardSelectImg: 73, splashLogoImg: 23, splashLogoTextImg: 29, pauseLogoImg: 126, gateImg: 359, // dir 19 }, sceneIndex: 0x49, mainfsEventFile: [19, 620], mainfsBoardFile: [19, 621], onLoad: function(board: IBoard) { board.otherbg.largescene = hvqfs.readBackground(MP3_BLOOBER.bgDir + 1).src; }, }); // Works, but needs other values to parse right: // spaceEventTables = [ // { upper: 0x003377C0, lower: 0x003377C8 }, // 0x80107B80, 0x80107B88, table 0x8011D688 // { upper: 0x003377CC, lower: 0x003377D4 }, // 0x80107B8C, 0x80107B94, table 0x8011D9CC // // 0x800F8D48 call in between // { upper: 0x003377E4, lower: 0x003377EC }, // 0x80107BA4, 0x80107BAC, table 0x8011D700 // { upper: 0x003377F0, lower: 0x003377F8 }, // 0x80107BB0, 0x80107BB8, table 0x8011D894 // ]; // Spiny Desert - (U) ROM const MP3_SPINY = createBoardInfo("MP3_SPINY", { name: "Spiny Desert", canOverwrite: true, boardDefFile: 572, bgDir: 9, pauseBgDir: 11, str: { boardSelect: [ [21, 32], [26, 19], ], boardGreeting: [24, 14], boardGreetingDuel: [24, 4], boardNames: [ [49, 41], [83, 45], // [93, 25], This is the songs list, so we can leave it ], }, img: { boardSelectImg: 74, splashLogoImg: 24, splashLogoTextImg: 30, pauseLogoImg: 127, gateImg: 366, // dir 19 }, sceneIndex: 0x4A, mainfsEventFile: [19, 622], mainfsBoardFile: [19, 623], onLoad: function(board: IBoard) { board.otherbg.largescene = hvqfs.readBackground(MP3_SPINY.bgDir + 1).src; }, }); // Woody Woods - (U) ROM const MP3_WOODY = createBoardInfo("MP3_WOODY", { name: "Woody Woods", canOverwrite: true, boardDefFile: 573, bgDir: 12, pauseBgDir: 14, str: { boardSelect: [ [21, 33], [26, 20], ], boardGreeting: [24, 15], boardGreetingDuel: [24, 6], boardNames: [ [49, 42], [83, 46], // [93, 26], This is the songs list, so we can leave it ], }, img: { boardSelectImg: 75, splashLogoImg: 25, splashLogoTextImg: 31, pauseLogoImg: 128, gateImg: 373, // dir 19 }, sceneIndex: 0x4B, mainfsEventFile: [19, 624], mainfsBoardFile: [19, 625], onLoad: function(board: IBoard) { board.otherbg.largescene = hvqfs.readBackground(MP3_WOODY.bgDir + 1).src; }, }); // Creepy Cavern - (U) ROM const MP3_CAVERN = createBoardInfo("MP3_CAVERN", { name: "Creepy Cavern", canOverwrite: true, boardDefFile: 574, bgDir: 15, pauseBgDir: 17, str: { boardSelect: [ [21, 34], [26, 21], ], boardGreeting: [24, 16], boardGreetingDuel: [24, 8], boardNames: [ [49, 43], [83, 47], // [93, 27], This is the songs list, so we can leave it ], }, img: { boardSelectImg: 76, splashLogoImg: 26, splashLogoTextImg: 32, pauseLogoImg: 129, gateImg: 383, // dir 19 }, sceneIndex: 0x4C, mainfsEventFile: [19, 626], mainfsBoardFile: [19, 627], onLoad: function(board: IBoard) { board.otherbg.largescene = hvqfs.readBackground(MP3_CAVERN.bgDir + 1).src; }, }); // Waluigi's Land - (U) ROM const MP3_WALUIGI = createBoardInfo("MP3_WALUIGI", { name: "Waluigi's Land", canOverwrite: true, boardDefFile: 575, bgDir: 18, pauseBgDir: 20, str: { boardSelect: [ [21, 35], [26, 22], ], boardGreeting: [24, 17], boardGreetingDuel: [24, 10], boardNames: [ [49, 44], [83, 48], // [93, 28], This is the songs list, so we can leave it ], }, img: { boardSelectImg: 77, splashLogoImg: 27, splashLogoTextImg: 33, pauseLogoImg: 130, gateImg: 387, // dir 19 }, sceneIndex: 0x4D, mainfsEventFile: [19, 628], mainfsBoardFile: [19, 629], onLoad: function(board: IBoard) { board.otherbg.largescene = hvqfs.readBackground(MP3_WALUIGI.bgDir + 1).src; }, }); // Gate Guy - (U) ROM const MP3U_GATEGUY = createBoardInfo("MP3U_GATEGUY"); MP3U_GATEGUY.name = "Gate Guy"; MP3U_GATEGUY.type = BoardType.DUEL; MP3U_GATEGUY.boardDefFile = 577; MP3U_GATEGUY.bgDir = 24; // MP3U_GATEGUY.str = { // boardSelect: [ // [36, 21], // [41, 27], // ], // boardNames: [ // [49, 45], // [39, 1], // [83, 49], // ], // }; MP3U_GATEGUY.img = { boardSelectImg: 81, splashLogoImg: 34, splashLogoTextImg: 40, miniMapWithBg: 279, // dir 19 miniMapDots: 280, }; MP3U_GATEGUY.sceneIndex = 0x5B; // MP3U_GATEGUY.spaceEventsStartAddr = 0x00118914; // MP3U_GATEGUY.spaceEventsStartOffset = 0x003EBA04; MP3U_GATEGUY.spaceEventTables = [ // JAL 800EA46C { upper: 0x811C, lower: 0x8124 }, // 0x8010DABC, 0x8010DAC4, table 0x80118914 { upper: 0x8128, lower: 0x8130 }, // 0x8010DAC8, 0x8010DAD0, table 0x80118DEC ]; MP3U_GATEGUY.onAfterOverwrite = function(board: IBoard) { // TODO Need this for duels? // This code (right inbetween 800EBA60 calls) sets up a function pointer for happening spaces. // Since we don't use any default events, we can overwrite it. // romView.setUint32(, 0); // romView.setUint32(, 0); // romView.setUint32(, 0); // TODO: Probably some stuff to NOP around 0x8010DA9C }; // Arrowhead - (U) ROM const MP3U_ARROWHEAD = createBoardInfo("MP3U_ARROWHEAD"); MP3U_ARROWHEAD.name = "Arrowhead"; MP3U_ARROWHEAD.type = BoardType.DUEL; MP3U_ARROWHEAD.boardDefFile = 578; MP3U_ARROWHEAD.bgDir = 25; MP3U_ARROWHEAD.img = { boardSelectImg: 82, splashLogoImg: 35, splashLogoTextImg: 41, miniMapWithBg: 310, // dir 19 miniMapDots: 311, }; // Pipesqueak - (U) ROM const MP3U_PIPESQUEAK = createBoardInfo("MP3U_PIPESQUEAK"); MP3U_PIPESQUEAK.name = "Pipesqueak"; MP3U_PIPESQUEAK.type = BoardType.DUEL; MP3U_PIPESQUEAK.boardDefFile = 579; MP3U_PIPESQUEAK.bgDir = 26; MP3U_PIPESQUEAK.img = { boardSelectImg: 83, splashLogoImg: 36, splashLogoTextImg: 42, miniMapWithBg: 312, // dir 19 miniMapDots: 313, }; // Blowhard - (U) ROM const MP3U_BLOWHARD = createBoardInfo("MP3U_BLOWHARD"); MP3U_BLOWHARD.name = "Blowhard"; MP3U_BLOWHARD.type = BoardType.DUEL; MP3U_BLOWHARD.boardDefFile = 580; MP3U_BLOWHARD.bgDir = 27; MP3U_BLOWHARD.img = { boardSelectImg: 84, splashLogoImg: 37, splashLogoTextImg: 43, miniMapWithBg: 315, // dir 19 miniMapDots: 316, }; // Mr. Mover - (U) ROM const MP3U_MRMOVER = createBoardInfo("MP3U_MRMOVER"); MP3U_MRMOVER.name = "Mr. Mover"; MP3U_MRMOVER.type = BoardType.DUEL; MP3U_MRMOVER.boardDefFile = 581; MP3U_MRMOVER.bgDir = 28; MP3U_MRMOVER.img = { boardSelectImg: 85, splashLogoImg: 38, splashLogoTextImg: 44, miniMapWithBg: 317, // dir 19 miniMapDots: 318, }; // Backtrack - (U) ROM const MP3U_BACKTRACK = createBoardInfo("MP3U_BACKTRACK"); MP3U_BACKTRACK.name = "Backtrack"; MP3U_BACKTRACK.type = BoardType.DUEL; MP3U_BACKTRACK.boardDefFile = 582; MP3U_BACKTRACK.bgDir = 29; MP3U_BACKTRACK.img = { boardSelectImg: 86, splashLogoImg: 39, splashLogoTextImg: 45, miniMapWithBg: 319, // dir 19 miniMapDots: 320, }; export function getBoardInfos() { return [ MP3_CHILLY, MP3_BLOOBER, MP3_SPINY, MP3_WOODY, MP3_CAVERN, MP3_WALUIGI, MP3U_GATEGUY, MP3U_ARROWHEAD, MP3U_PIPESQUEAK, MP3U_BLOWHARD, MP3U_MRMOVER, MP3U_BACKTRACK, ]; } /* { "name": "Training?", "fileNum": 576, "bgNum": 21, } { "name": "mystery", "fileNum": 583, "bgNum": 30, }, ]; */
the_stack
import { Injectable, ɵConsole } from '@angular/core'; import { HttpClient, HttpHeaders, HttpErrorResponse } from '@angular/common/http'; import { BehaviorSubject } from 'rxjs'; import { environment } from '../../../environments/environment'; // firebase import * as firebase from 'firebase/app'; import 'firebase/messaging'; import 'firebase/database'; import 'firebase/auth'; // services // import { EventsService } from '../events-service'; import { MessagingAuthService } from '../abstract/messagingAuth.service'; import { Chat21Service } from './chat-service'; // models import { UserModel } from '../../models/user'; import { avatarPlaceholder, getColorBck } from '../../utils/utils-user'; import { AppStorageService } from '../abstract/app-storage.service'; // @Injectable({ providedIn: 'root' }) @Injectable() export class MQTTAuthService extends MessagingAuthService { // authStateChanged: BehaviorSubject<any>; // = new BehaviorSubject<any>([]); // authStateChanged: BehaviorSubject<any>; // = new BehaviorSubject<any>([]); // BehaviorSubject BSAuthStateChanged: BehaviorSubject<any>; BSSignOut: BehaviorSubject<any>; // private persistence: string; SERVER_BASE_URL: string; public token: any; public tiledeskToken: any; public user: any; private currentUser: any; private URL_TILEDESK_SIGNIN: string; private URL_TILEDESK_CREATE_CUSTOM_TOKEN: string; private URL_TILEDESK_SIGNIN_ANONYMOUSLY: string; private URL_TILEDESK_SIGNIN_WITH_CUSTOM_TOKEN: string; constructor( public http: HttpClient, public chat21Service: Chat21Service, public appStorage: AppStorageService ) { super(); console.log("chat21Service:", chat21Service) } /** * */ initialize() { this.SERVER_BASE_URL = this.getBaseUrl(); // this.URL_TILEDESK_SIGNIN = this.SERVER_BASE_URL + 'auth/signin'; // this.URL_TILEDESK_SIGNIN_ANONYMOUSLY = this.SERVER_BASE_URL + 'auth/signinAnonymously'; this.URL_TILEDESK_CREATE_CUSTOM_TOKEN = this.SERVER_BASE_URL + 'chat21/native/auth/createCustomToken'; // this.URL_TILEDESK_SIGNIN_WITH_CUSTOM_TOKEN = this.SERVER_BASE_URL + 'auth/signinWithCustomToken'; console.log(' ---------------- login con token url ---------------- '); // this.checkIsAuth(); this.onAuthStateChanged(); } // logout(callback) { logout() { console.log("closing mqtt connection..."); this.chat21Service.chatClient.close(() => { console.log("mqtt connection closed. OK"); // remove // this.appStorage.removeItem('tiledeskToken'); // this.appStorage.removeItem('currentUser'); this.currentUser = null; console.log("user removed."); this.BSSignOut.next(true); this.BSAuthStateChanged.next('offline'); // if (callback) { // callback(); // } }); } /** * */ getUser(): any { return this.currentUser; } z /** */ getToken(): string { console.log('UserService::getToken'); return this.token; } /** */ onAuthStateChanged() { console.log('UserService::onAuthStateChanged'); // if (this.appStorage.getItem('tiledeskToken') == null) { // this.currentUser = null; this.BSAuthStateChanged.next('offline'); // } // const that = this; console.log("STORAGE CHANGED: added listner") // window.addEventListener('storage', (e) => { // console.log('STORAGE CHANGED:', e.key); // if (this.appStorage.getItem('tiledeskToken') == null && this.appStorage.getItem('currentUser') == null) { // console.log('STORAGE CHANGED: CASO TOKEN NULL'); // this.currentUser = null; // // that.logout(() => { // // that.BSAuthStateChanged.next('offline'); // // }); // this.logout(); // } // else if (this.currentUser == null && this.appStorage.getItem('tiledeskToken') != null && this.appStorage.getItem('currentUser') != null) { // console.log('STORAGE CHANGED: CASO LOGGED OUTSIDE'); // this.currentUser = JSON.parse(this.appStorage.getItem('currentUser')); // const tiledeskToken = this.appStorage.getItem('tiledeskToken'); // this.connectWithCustomToken(tiledeskToken); // } // }, false); } createCustomToken(tiledeskToken: any): void { this.connectWithCustomToken(tiledeskToken) } // createCustomToken(tiledeskToken: any): void { // const headers = new HttpHeaders({ // 'Content-type': 'application/json', // Authorization: tiledeskToken // }); // const responseType = 'text'; // const postData = {}; // const that = this; // this.http.post(this.URL_TILEDESK_CREATE_CUSTOM_TOKEN, postData, { headers, responseType}) // .subscribe(data => { // that.getCustomToken(data); // }, error => { // console.log(error); // }); // } // ********************* NATIVE AUTH (NO TILEDESK) ********************* // // private signinMQTT(url: string, username: string, password: string) { // console.log("signinMQTT...") // const httpHeaders = new HttpHeaders(); // httpHeaders.append('Accept', 'application/json'); // httpHeaders.append('Content-Type', 'application/json' ); // const requestOptions = { headers: httpHeaders }; // const postData = { // username: username, // password: password // }; // const that = this; // this.http.post(url, postData, requestOptions) // .subscribe(data => { // console.log("native auth data:", JSON.stringify(data)); // if (data['token'] && data['userid']) { // this.appStorage.setItem('tiledeskToken', data['token']); // this.tiledeskToken = data['token']; // data['_id'] = data['userid']; // this.createCompleteUser(data); // that.connectMQTT(data); // // that.firebaseCreateCustomToken(tiledeskToken); // } // }, error => { // console.log(error); // }); // } /** * @param tiledeskToken */ // signInWithCustomToken(tiledeskToken: string): Promise<any>{ // const headers = new HttpHeaders({ // 'Content-type': 'application/json', // Authorization: tiledeskToken // }); // const requestOptions = { headers: headers }; // // const that = this; // return new Promise((resolve, reject)=> { // this.http.post(this.URL_TILEDESK_SIGNIN_WITH_CUSTOM_TOKEN, null, requestOptions).subscribe((data) => { // if (data['success'] && data['token']) { // this.tiledeskToken = data['token']; // // this.createCompleteUser(data['user']); // this.appStorage.setItem('tiledeskToken', this.tiledeskToken); // this.connectWithCustomToken(this.tiledeskToken); // resolve(this.currentUser) // } // }, (error) => { // console.log(error); // reject(error) // }); // }); // } // private signIn(url: string, emailVal: string, pswVal: string) { // const httpHeaders = new HttpHeaders(); // httpHeaders.append('Accept', 'application/json'); // httpHeaders.append('Content-Type', 'application/json' ); // const requestOptions = { headers: httpHeaders }; // const postData = { // email: emailVal, // password: pswVal // }; // // const that = this; // this.http.post(url, postData, requestOptions) // .subscribe(data => { // console.log("data:", JSON.stringify(data)); // if (data['success'] && data['token']) { // this.tiledeskToken = data['token']; // this.createCompleteUser(data['user']); // this.appStorage.setItem('tiledeskToken', this.tiledeskToken); // this.connectWithCustomToken(this.tiledeskToken); // // that.firebaseCreateCustomToken(tiledeskToken); // } // }, error => { // console.log(error); // }); // } // private createCustomToken(tiledeskToken: string) { // const headers = new HttpHeaders({ // 'Content-type': 'application/json', // Authorization: tiledeskToken // }); // const responseType = 'text'; // const postData = {}; // const that = this; // this.http.post(this.URL_TILEDESK_CREATE_CUSTOM_TOKEN, postData, { headers, responseType}) // .subscribe(data => { // that.getCustomToken(data); // }, error => { // console.log(error); // }); // } private connectWithCustomToken(tiledeskToken: string): any { const headers = new HttpHeaders({ 'Content-type': 'application/json', Authorization: tiledeskToken }); const responseType = 'text'; const postData = {}; // const that = this; this.http.post(this.URL_TILEDESK_CREATE_CUSTOM_TOKEN, postData, { headers, responseType}) .subscribe(data => { console.log("**** data", data) const result = JSON.parse(data); this.connectMQTT(result); }, error => { console.log(error); }); } connectMQTT(credentials: any): any { console.log('**** credentials:', credentials); const userid = credentials.userid; this.chat21Service.chatClient.connect(userid, credentials.token, () => { console.log('Chat connected.'); this.BSAuthStateChanged.next('online'); }); } // /** // * createCompleteUser // * @param user // */ // private createCompleteUser(user: any) { // const member = new UserModel(user._id); // try { // const uid = user._id; // const firstname = user.firstname ? user.firstname : ''; // const lastname = user.lastname ? user.lastname : ''; // const email = user.email ? user.email : ''; // const fullname = ( firstname + ' ' + lastname ).trim(); // const avatar = avatarPlaceholder(fullname); // const color = getColorBck(fullname); // //TODO-GAB // // const imageurl = this.imageRepo.getImageThumb(uid); // member.uid = uid; // member.email = email; // member.firstname = firstname; // member.lastname = lastname; // member.fullname = fullname; // //TODO-GAB // // member.imageurl = imageurl; // member.avatar = avatar; // member.color = color; // console.log('createCompleteUser: ', member); // } catch (err) { // console.log('createCompleteUser error:' + err); // } // console.log('createCompleteUser: ', member); // this.currentUser = member; // // salvo nel local storage e sollevo l'evento // this.appStorage.setItem('currentUser', JSON.stringify(this.currentUser)); // } }
the_stack
import * as llvm from "llvm-node"; import * as ts from "typescript"; import {CodeGenerationDiagnostics} from "../../code-generation-diagnostic"; import {CodeGenerationContext} from "../code-generation-context"; import {SyntaxCodeGenerator} from "../syntax-code-generator"; import {isMaybeObjectType} from "../util/types"; import {MathObjectReference} from "../value/math-object-reference"; import {Primitive} from "../value/primitive"; import {Value} from "../value/value"; function isAssignment(operatorToken: ts.BinaryOperatorToken) { return operatorToken.kind === ts.SyntaxKind.EqualsToken || operatorToken.kind === ts.SyntaxKind.PlusEqualsToken || operatorToken.kind === ts.SyntaxKind.MinusEqualsToken || operatorToken.kind === ts.SyntaxKind.AsteriskAsteriskEqualsToken || operatorToken.kind === ts.SyntaxKind.AsteriskEqualsToken || operatorToken.kind === ts.SyntaxKind.SlashEqualsToken || operatorToken.kind === ts.SyntaxKind.PercentEqualsToken || operatorToken.kind === ts.SyntaxKind.AmpersandEqualsToken || operatorToken.kind === ts.SyntaxKind.BarEqualsToken || operatorToken.kind === ts.SyntaxKind.CaretEqualsToken || operatorToken.kind === ts.SyntaxKind.LessThanLessThanEqualsToken || operatorToken.kind === ts.SyntaxKind.GreaterThanGreaterThanGreaterThanEqualsToken || operatorToken.kind === ts.SyntaxKind.GreaterThanGreaterThanEqualsToken; } /** * Code Generator for binary expressions, e.g. 5+3 but also x = 3, or x += 3 */ class BinaryExpressionCodeGenerator implements SyntaxCodeGenerator<ts.BinaryExpression, Value> { syntaxKind = ts.SyntaxKind.BinaryExpression; generate(binaryExpression: ts.BinaryExpression, context: CodeGenerationContext): Value { const leftType = context.typeChecker.getTypeAtLocation(binaryExpression.left); const rightType = context.typeChecker.getTypeAtLocation(binaryExpression.right); const resultType = context.typeChecker.getTypeAtLocation(binaryExpression); let result: llvm.Value | undefined; let resultValue: Value | undefined; switch (binaryExpression.operatorToken.kind) { // 10.12 & 0 case ts.SyntaxKind.AmpersandToken: case ts.SyntaxKind.AmpersandEqualsToken: { const leftInt = Primitive.toInt32(context.generateValue(binaryExpression.left), leftType, resultType, context).generateIR(); const rightInt = Primitive.toInt32(context.generateValue(binaryExpression.right), rightType, resultType, context).generateIR(); result = context.builder.createAnd(leftInt, rightInt, "and"); break; } // a && b case ts.SyntaxKind.AmpersandAmpersandToken: { const lhs = context.generateValue(binaryExpression.left).generateIR(context); const lhsAsBool = Primitive.toBoolean(lhs, leftType, context); const lhsBlock = context.builder.getInsertBlock(); let rhsBlock = llvm.BasicBlock.create(context.llvmContext, "land.lhs.true"); const end = llvm.BasicBlock.create(context.llvmContext, "land.end"); context.builder.createCondBr(lhsAsBool, rhsBlock, end); context.scope.enclosingFunction.addBasicBlock(rhsBlock); context.builder.setInsertionPoint(rhsBlock); const right = context.generateValue(binaryExpression.right).generateIR(context); context.builder.createBr(end); rhsBlock = context.builder.getInsertBlock(); context.scope.enclosingFunction.addBasicBlock(end); context.builder.setInsertionPoint(end); const phi = context.builder.createPhi(context.toLLVMType(resultType), 2, "land"); phi.addIncoming(lhs, lhsBlock); phi.addIncoming(right, rhsBlock); result = phi; break; } // a * b case ts.SyntaxKind.AsteriskToken: case ts.SyntaxKind.AsteriskEqualsToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); if (unifiedType.flags & ts.TypeFlags.IntLike) { result = context.builder.createMul(leftIr, rightIr, "mul"); } else if (unifiedType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFMul(leftIr, rightIr, "mul"); } break; } // a ** b case ts.SyntaxKind.AsteriskAsteriskToken: case ts.SyntaxKind.AsteriskAsteriskEqualsToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const left = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context); const right = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context); if (unifiedType.flags & (ts.TypeFlags.IntLike | ts.TypeFlags.NumberLike)) { result = MathObjectReference.pow(left, leftType, right, rightType, resultType, context).generateIR(context); } break; } // a | b case ts.SyntaxKind.BarToken: case ts.SyntaxKind.BarEqualsToken: { const intType = resultType; const left = context.generateValue(binaryExpression.left); const lhsIntValue = Primitive.toInt32(left, leftType, intType, context).generateIR(); const right = context.generateValue(binaryExpression.right); const rhsIntValue = Primitive.toInt32(right, rightType, intType, context).generateIR(); result = context.builder.createOr(lhsIntValue, rhsIntValue, "or"); break; } // a || b case ts.SyntaxKind.BarBarToken: { const lhs = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const lhsBlock = context.builder.getInsertBlock(); const lhsAsBool = Primitive.toBoolean(lhs, leftType, context); let rhsBlock = llvm.BasicBlock.create(context.llvmContext, "lor.lhs.false"); const lorEnd = llvm.BasicBlock.create(context.llvmContext, "lor.end"); context.builder.createCondBr(lhsAsBool, lorEnd, rhsBlock); context.scope.enclosingFunction.addBasicBlock(rhsBlock); context.builder.setInsertionPoint(rhsBlock); const rhs = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); context.builder.createBr(lorEnd); rhsBlock = context.builder.getInsertBlock(); context.scope.enclosingFunction.addBasicBlock(lorEnd); context.builder.setInsertionPoint(lorEnd); const phi = context.builder.createPhi(context.toLLVMType(resultType), 2, "lor"); phi.addIncoming(lhs, lhsBlock); phi.addIncoming(rhs, rhsBlock); result = phi; break; } // a ^ b case ts.SyntaxKind.CaretEqualsToken: case ts.SyntaxKind.CaretToken: { const leftInt = Primitive.toInt32(context.generateValue(binaryExpression.left), leftType, resultType, context).generateIR(); const rightInt = Primitive.toInt32(context.generateValue(binaryExpression.right), rightType, resultType, context).generateIR(); result = context.builder.createXor(leftInt, rightInt, "xor"); break; } // a, b, c case ts.SyntaxKind.CommaToken: { context.generateValue(binaryExpression.left).generateIR(context); result = context.generateValue(binaryExpression.right).generateIR(context); break; } // a === b, a == b case ts.SyntaxKind.EqualsEqualsToken: case ts.SyntaxKind.EqualsEqualsEqualsToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); // tslint:disable-next-line: max-line-length if (unifiedType.flags & (ts.TypeFlags.IntLike | ts.TypeFlags.BooleanLike | ts.TypeFlags.Object | ts.TypeFlags.Undefined) || isMaybeObjectType(unifiedType)) { result = context.builder.createICmpEQ(leftIr, rightIr, "cmpEQ"); } else if (unifiedType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFCmpOEQ(leftIr, rightIr, "cmpEQ"); } break; } // a != b, a !== b case ts.SyntaxKind.ExclamationEqualsToken: case ts.SyntaxKind.ExclamationEqualsEqualsToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); // tslint:disable-next-line: max-line-length if (unifiedType.flags & (ts.TypeFlags.IntLike | ts.TypeFlags.BooleanLike | ts.TypeFlags.Object | ts.TypeFlags.Undefined) || isMaybeObjectType(unifiedType)) { result = context.builder.createICmpNE(leftIr, rightIr, "cmpNE"); } else if (leftType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFCmpONE(leftIr, rightIr, "cmpNE"); } break; } // a > b case ts.SyntaxKind.GreaterThanToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); if (unifiedType.flags & ts.TypeFlags.BooleanLike) { const leftInt = context.builder.createZExt(leftIr, llvm.Type.getInt32Ty(context.llvmContext)); const rightInt = context.builder.createZExt(rightIr, llvm.Type.getInt32Ty(context.llvmContext)); result = context.builder.createICmpSGT(leftInt, rightInt, "cmpGT"); } else if (unifiedType.flags & ts.TypeFlags.IntLike) { result = context.builder.createICmpSGT(leftIr, rightIr, "cmpGT"); } else if (unifiedType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFCmpOGT(leftIr, rightIr, "cmpGT"); } break; } // a >= b case ts.SyntaxKind.GreaterThanEqualsToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); if (unifiedType.flags & ts.TypeFlags.BooleanLike) { const leftInt = context.builder.createZExt(leftIr, llvm.Type.getInt32Ty(context.llvmContext)); const rightInt = context.builder.createZExt(rightIr, llvm.Type.getInt32Ty(context.llvmContext)); result = context.builder.createICmpSGE(leftInt, rightInt, "cmpGE"); } else if (unifiedType.flags & ts.TypeFlags.IntLike) { result = context.builder.createICmpSGE(leftIr, rightIr, "cmpGE"); } else if (unifiedType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFCmpOGE(leftIr, rightIr, "cmpGE"); } break; } // a >> b case ts.SyntaxKind.GreaterThanGreaterThanToken: case ts.SyntaxKind.GreaterThanGreaterThanEqualsToken: { const leftInt = Primitive.toInt32(context.generateValue(binaryExpression.left), leftType, resultType, context).generateIR(); const rightInt = Primitive.toInt32(context.generateValue(binaryExpression.right), rightType, resultType, context).generateIR(); // mask not needed, less than 32 guaranteed by wasm result = context.builder.createAShr(leftInt, rightInt, "ashr"); break; } // a >>> b case ts.SyntaxKind.GreaterThanGreaterThanGreaterThanEqualsToken: case ts.SyntaxKind.GreaterThanGreaterThanGreaterThanToken: { const leftInt = Primitive.toInt32(context.generateValue(binaryExpression.left), leftType, resultType, context).generateIR(); const rightInt = Primitive.toInt32(context.generateValue(binaryExpression.right), rightType, resultType, context).generateIR(); // mask not needed, less than 32 guaranteed by wasm result = context.builder.createLShr(leftInt, rightInt, "lshr"); break; } // a < b case ts.SyntaxKind.LessThanToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); if (unifiedType.flags & ts.TypeFlags.BooleanLike) { const leftInt = context.builder.createZExt(leftIr, llvm.Type.getInt32Ty(context.llvmContext)); const rightInt = context.builder.createZExt(rightIr, llvm.Type.getInt32Ty(context.llvmContext)); result = context.builder.createICmpSLT(leftInt, rightInt, "cmpLT"); } else if (unifiedType.flags & ts.TypeFlags.IntLike) { result = context.builder.createICmpSLT(leftIr, rightIr, "cmpLT"); } else if (unifiedType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFCmpOLT(leftIr, rightIr, "cmpLT"); } break; } // a <= b case ts.SyntaxKind.LessThanEqualsToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); if (unifiedType.flags & ts.TypeFlags.BooleanLike) { const leftInt = context.builder.createZExt(leftIr, llvm.Type.getInt32Ty(context.llvmContext)); const rightInt = context.builder.createZExt(rightIr, llvm.Type.getInt32Ty(context.llvmContext)); result = context.builder.createICmpSLE(leftInt, rightInt, "cmpLE"); } else if (unifiedType.flags & ts.TypeFlags.IntLike) { result = context.builder.createICmpSLE(leftIr, rightIr, "cmpLE"); } else if (unifiedType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFCmpOLE(leftIr, rightIr, "cmpLE"); } break; } // a << b case ts.SyntaxKind.LessThanLessThanToken: case ts.SyntaxKind.LessThanLessThanEqualsToken: { const leftInt = Primitive.toInt32(context.generateValue(binaryExpression.left), leftType, resultType, context).generateIR(); const rightInt = Primitive.toInt32(context.generateValue(binaryExpression.right), rightType, resultType, context).generateIR(); // mask not needed, less than 32 guaranteed by wasm result = context.builder.createShl(leftInt, rightInt, "shl"); break; } // a - b case ts.SyntaxKind.MinusEqualsToken: case ts.SyntaxKind.MinusToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); if (unifiedType.flags & ts.TypeFlags.IntLike) { result = context.builder.createSub(leftIr, rightIr, "sub"); } else if (unifiedType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFSub(leftIr, rightIr, "fsub"); } break; } // a % b case ts.SyntaxKind.PercentToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); if (unifiedType.flags & ts.TypeFlags.IntLike) { result = context.builder.createSRem(leftIr, rightIr, "srem"); } else if (unifiedType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFRem(leftIr, rightIr, "frem"); } break; } // a + b case ts.SyntaxKind.PlusEqualsToken: case ts.SyntaxKind.PlusToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); if (unifiedType.flags & ts.TypeFlags.IntLike) { result = context.builder.createAdd(leftIr, rightIr, "add"); } else if (unifiedType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFAdd(leftIr, rightIr, "fadd"); } break; } // a / b case ts.SyntaxKind.SlashEqualsToken: case ts.SyntaxKind.SlashToken: { const unifiedType = BinaryExpressionCodeGenerator.unifyTypes(binaryExpression, context); const leftIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.left, binaryExpression, context).generateIR(context); const rightIr = BinaryExpressionCodeGenerator.toUnified(binaryExpression.right, binaryExpression, context).generateIR(context); if (unifiedType.flags & ts.TypeFlags.IntLike) { result = context.builder.createSDiv(leftIr, rightIr, "sdiv"); } else if (unifiedType.flags & ts.TypeFlags.NumberLike) { result = context.builder.createFDiv(leftIr, rightIr, "div"); } break; } // a = b case ts.SyntaxKind.FirstAssignment: resultValue = context.generateValue(binaryExpression.right).castImplicit(leftType, context); if (!resultValue) { BinaryExpressionCodeGenerator.throwUnsupportedImplicitCast(binaryExpression, context); } } if (result) { resultValue = context.value(result, resultType); } if (!resultValue) { throw CodeGenerationDiagnostics.unsupportedBinaryOperation( binaryExpression, context.typeChecker.typeToString(leftType), context.typeChecker.typeToString(leftType) ); } if (isAssignment(binaryExpression.operatorToken)) { context.assignValue(context.generateValue(binaryExpression.left), resultValue); } return resultValue; } private static toUnified(node: ts.Node, binaryExpression: ts.BinaryExpression, context: CodeGenerationContext): Value { const unifiedType = this.unifyTypes(binaryExpression, context); const casted = context.generateValue(node).castImplicit(unifiedType, context); if (!casted) { return this.throwUnsupportedImplicitCast(binaryExpression, context); } return casted; } private static unifyTypes(binaryExpression: ts.BinaryExpression, context: CodeGenerationContext): ts.Type { function unify(left: ts.Type, right: ts.Type) { if (left === right) { return left; } // cast int to number if (left.flags & ts.TypeFlags.IntLike && right.flags & (ts.TypeFlags.NumberLike ^ ts.TypeFlags.IntLike)) { return right; } if ((left.flags & ts.TypeFlags.Object || isMaybeObjectType(left)) && right.flags & ts.TypeFlags.Undefined) { return left; } return undefined; } const leftType = context.typeChecker.getTypeAtLocation(binaryExpression.left); const rightType = context.typeChecker.getTypeAtLocation(binaryExpression.right); const unified = unify(leftType, rightType) || unify(rightType, leftType); if (unified) { return unified; } return this.throwUnsupportedImplicitCast(binaryExpression, context); } private static throwUnsupportedImplicitCast(binaryExpression: ts.BinaryExpression, context: CodeGenerationContext): never { const leftType = context.typeChecker.getTypeAtLocation(binaryExpression.left); const rightType = context.typeChecker.getTypeAtLocation(binaryExpression.right); throw CodeGenerationDiagnostics.unsupportedImplicitCastOfBinaryExpressionOperands( binaryExpression, context.typeChecker.typeToString(leftType), context.typeChecker.typeToString(rightType) ); } } export default BinaryExpressionCodeGenerator;
the_stack
/* eslint-disable @typescript-eslint/class-name-casing */ /* eslint-disable @typescript-eslint/no-unused-vars */ /* eslint-disable @typescript-eslint/no-empty-interface */ /* eslint-disable @typescript-eslint/no-namespace */ /* eslint-disable no-irregular-whitespace */ import { OAuth2Client, JWT, Compute, UserRefreshClient, BaseExternalAccountClient, GaxiosPromise, GoogleConfigurable, createAPIRequest, MethodOptions, StreamMethodOptions, GlobalOptions, GoogleAuth, BodyResponseCallback, APIRequestContext, } from 'googleapis-common'; import {Readable} from 'stream'; export namespace playintegrity_v1 { export interface Options extends GlobalOptions { version: 'v1'; } interface StandardParameters { /** * Auth client or API Key for the request */ auth?: | string | OAuth2Client | JWT | Compute | UserRefreshClient | BaseExternalAccountClient | GoogleAuth; /** * V1 error format. */ '$.xgafv'?: string; /** * OAuth access token. */ access_token?: string; /** * Data format for response. */ alt?: string; /** * JSONP */ callback?: string; /** * Selector specifying which fields to include in a partial response. */ fields?: string; /** * API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */ key?: string; /** * OAuth 2.0 token for the current user. */ oauth_token?: string; /** * Returns response with indentations and line breaks. */ prettyPrint?: boolean; /** * Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */ quotaUser?: string; /** * Legacy upload protocol for media (e.g. "media", "multipart"). */ uploadType?: string; /** * Upload protocol for media (e.g. "raw", "multipart"). */ upload_protocol?: string; } /** * Google Play Integrity API * * Play Integrity * * @example * ```js * const {google} = require('googleapis'); * const playintegrity = google.playintegrity('v1'); * ``` */ export class Playintegrity { context: APIRequestContext; v1: Resource$V1; constructor(options: GlobalOptions, google?: GoogleConfigurable) { this.context = { _options: options || {}, google, }; this.v1 = new Resource$V1(this.context); } } /** * Contains the account information such as the licensing status for the user in the scope. */ export interface Schema$AccountDetails { /** * Required. Details about the licensing status of the user for the app in the scope. */ appLicensingVerdict?: string | null; } /** * Contains the application integrity information. */ export interface Schema$AppIntegrity { /** * Required. Details about the app recognition verdict */ appRecognitionVerdict?: string | null; /** * Hex fingerprint of the application signing certificate. e.g. “ABCE1F....” Set iff app_recognition_verdict != UNEVALUATED. */ certificateSha256Digest?: string[] | null; /** * Package name of the application under attestation. Set iff app_recognition_verdict != UNEVALUATED. */ packageName?: string | null; /** * Version code of the application. Set iff app_recognition_verdict != UNEVALUATED. */ versionCode?: string | null; } /** * Request to decode the integrity token. */ export interface Schema$DecodeIntegrityTokenRequest { /** * Encoded integrity token. */ integrityToken?: string | null; } /** * Response containing the decoded integrity payload. */ export interface Schema$DecodeIntegrityTokenResponse { /** * Plain token payload generated from the decoded integrity token. */ tokenPayloadExternal?: Schema$TokenPayloadExternal; } /** * Contains the device attestation information. */ export interface Schema$DeviceIntegrity { /** * Details about the integrity of the device the app is running on */ deviceRecognitionVerdict?: string[] | null; } /** * Contains the integrity request information. */ export interface Schema$RequestDetails { /** * Required. Nonce that was provided in the request (which is base64 web-safe no-wrap). */ nonce?: string | null; /** * Required. Application package name this attestation was requested for. Note: This field makes no guarantees or promises on the caller integrity. For details on application integrity, check application_integrity. */ requestPackageName?: string | null; /** * Required. Timestamp, in milliseconds, of the integrity application request. */ timestampMillis?: string | null; } /** * Contains additional information generated for testing responses. */ export interface Schema$TestingDetails { /** * Required. Indicates that the information contained in this payload is a testing response that is statically overridden for a tester. */ isTestingResponse?: boolean | null; } /** * Contains basic app information and integrity signals like device attestation and licensing details. */ export interface Schema$TokenPayloadExternal { /** * Required. Details about the Play Store account. */ accountDetails?: Schema$AccountDetails; /** * Required. Details about the application integrity. */ appIntegrity?: Schema$AppIntegrity; /** * Required. Details about the device integrity. */ deviceIntegrity?: Schema$DeviceIntegrity; /** * Required. Details about the integrity request. */ requestDetails?: Schema$RequestDetails; /** * Indicates that this payload is generated for testing purposes and contains any additional data that is linked with testing status. */ testingDetails?: Schema$TestingDetails; } export class Resource$V1 { context: APIRequestContext; constructor(context: APIRequestContext) { this.context = context; } /** * Decodes the integrity token and returns the token payload. * @example * ```js * // Before running the sample: * // - Enable the API at: * // https://console.developers.google.com/apis/api/playintegrity.googleapis.com * // - Login into gcloud by running: * // `$ gcloud auth application-default login` * // - Install the npm module by running: * // `$ npm install googleapis` * * const {google} = require('googleapis'); * const playintegrity = google.playintegrity('v1'); * * async function main() { * const auth = new google.auth.GoogleAuth({ * // Scopes can be specified either as an array or as a single, space-delimited string. * scopes: ['https://www.googleapis.com/auth/playintegrity'], * }); * * // Acquire an auth client, and bind it to all future calls * const authClient = await auth.getClient(); * google.options({auth: authClient}); * * // Do the magic * const res = await playintegrity.decodeIntegrityToken({ * // Package name of the app the attached integrity token belongs to. * packageName: '[^/]+', * * // Request body metadata * requestBody: { * // request body parameters * // { * // "integrityToken": "my_integrityToken" * // } * }, * }); * console.log(res.data); * * // Example response * // { * // "tokenPayloadExternal": {} * // } * } * * main().catch(e => { * console.error(e); * throw e; * }); * * ``` * * @param params - Parameters for request * @param options - Optionally override request options, such as `url`, `method`, and `encoding`. * @param callback - Optional callback that handles the response. * @returns A promise if used with async/await, or void if used with a callback. */ decodeIntegrityToken( params: Params$Resource$V1$Decodeintegritytoken, options: StreamMethodOptions ): GaxiosPromise<Readable>; decodeIntegrityToken( params?: Params$Resource$V1$Decodeintegritytoken, options?: MethodOptions ): GaxiosPromise<Schema$DecodeIntegrityTokenResponse>; decodeIntegrityToken( params: Params$Resource$V1$Decodeintegritytoken, options: StreamMethodOptions | BodyResponseCallback<Readable>, callback: BodyResponseCallback<Readable> ): void; decodeIntegrityToken( params: Params$Resource$V1$Decodeintegritytoken, options: | MethodOptions | BodyResponseCallback<Schema$DecodeIntegrityTokenResponse>, callback: BodyResponseCallback<Schema$DecodeIntegrityTokenResponse> ): void; decodeIntegrityToken( params: Params$Resource$V1$Decodeintegritytoken, callback: BodyResponseCallback<Schema$DecodeIntegrityTokenResponse> ): void; decodeIntegrityToken( callback: BodyResponseCallback<Schema$DecodeIntegrityTokenResponse> ): void; decodeIntegrityToken( paramsOrCallback?: | Params$Resource$V1$Decodeintegritytoken | BodyResponseCallback<Schema$DecodeIntegrityTokenResponse> | BodyResponseCallback<Readable>, optionsOrCallback?: | MethodOptions | StreamMethodOptions | BodyResponseCallback<Schema$DecodeIntegrityTokenResponse> | BodyResponseCallback<Readable>, callback?: | BodyResponseCallback<Schema$DecodeIntegrityTokenResponse> | BodyResponseCallback<Readable> ): | void | GaxiosPromise<Schema$DecodeIntegrityTokenResponse> | GaxiosPromise<Readable> { let params = (paramsOrCallback || {}) as Params$Resource$V1$Decodeintegritytoken; let options = (optionsOrCallback || {}) as MethodOptions; if (typeof paramsOrCallback === 'function') { callback = paramsOrCallback; params = {} as Params$Resource$V1$Decodeintegritytoken; options = {}; } if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; options = {}; } const rootUrl = options.rootUrl || 'https://playintegrity.googleapis.com/'; const parameters = { options: Object.assign( { url: (rootUrl + '/v1/{+packageName}:decodeIntegrityToken').replace( /([^:]\/)\/+/g, '$1' ), method: 'POST', }, options ), params, requiredParams: ['packageName'], pathParams: ['packageName'], context: this.context, }; if (callback) { createAPIRequest<Schema$DecodeIntegrityTokenResponse>( parameters, callback as BodyResponseCallback<unknown> ); } else { return createAPIRequest<Schema$DecodeIntegrityTokenResponse>( parameters ); } } } export interface Params$Resource$V1$Decodeintegritytoken extends StandardParameters { /** * Package name of the app the attached integrity token belongs to. */ packageName?: string; /** * Request body metadata */ requestBody?: Schema$DecodeIntegrityTokenRequest; } }
the_stack
import * as etch from 'etch' import { Disposable, CompositeDisposable } from 'atom' import { IBtnDesc, OutputPanelButtons } from './views/output-panel-buttons' import { OutputPanelCheckbox } from './views/output-panel-checkbox' import { OutputPanelItems } from './views/output-panel-items' import { ResultsDB, ResultItem } from '../results-db' import { isDock, isSimpleControlDef, handlePromise } from '../utils' import * as UPI from 'atom-haskell-upi' import { BackendStatusController } from '../backend-status' export interface IState { fileFilter: boolean activeTab: string } export class OutputPanel { private static readonly defaultTabs: ReadonlyArray<string> = [ 'error', 'warning', 'lint', ] private readonly refs!: { items?: OutputPanelItems } private readonly elements: Set<JSX.Element> = new Set() private readonly disposables: CompositeDisposable = new CompositeDisposable() private readonly tabs: Map<string, IBtnDesc> = new Map() private readonly tabUsers: Map<string, number> = new Map() private itemFilter?: (item: ResultItem) => boolean private currentResult: number = 0 private results?: ResultsDB private buttonsClass!: 'buttons-top' | 'buttons-left' private bsc?: BackendStatusController constructor( private state: IState = { fileFilter: false, activeTab: 'error' }, ) { this.setButtonsClass(atom.config.get('ide-haskell.buttonsPosition')) etch.initialize(this) atom.config.onDidChange('ide-haskell.buttonsPosition', ({ newValue }) => { this.setButtonsClass(newValue) handlePromise(this.update()) }) if (atom.config.get('ide-haskell.messageDisplayFrontend') === 'builtin') { for (const name of OutputPanel.defaultTabs) { this.tabs.set(name, { name, count: 0, onClick: () => this.activateTab(name), uriFilter: true, autoScroll: false, }) } } handlePromise(this.update()) this.disposables.add( atom.workspace.onDidChangeActivePaneItem(() => { if (this.state.fileFilter) handlePromise(this.updateItems()) }), ) setImmediate(async () => { await this.show() if (atom.config.get('ide-haskell.autoHideOutput')) { this.hide() } }) } public connectBsc(bsc: BackendStatusController) { if (this.bsc) throw new Error('BackendStatusController already connected!') this.bsc = bsc this.disposables.add( this.bsc.onDidUpdate(() => handlePromise(this.update())), ) handlePromise(this.update()) } public connectResults(results: ResultsDB) { if (this.results) throw new Error('Results already connected!') this.results = results let lastUpdateTime = Date.now() let collectedSeverities = new Set<UPI.TSeverity>() const didUpdate = (severities: UPI.TSeverity[]) => { this.currentResult = 0 handlePromise(this.updateItems()) const newUpdateTime = Date.now() if ( newUpdateTime - lastUpdateTime < atom.config.get('ide-haskell.switchTabOnCheckInterval') ) { for (const s of severities) { collectedSeverities.add(s) } } else { collectedSeverities = new Set(severities) } if ( atom.config.get('ide-haskell.autoHideOutput') && (!this.results || this.results.isEmpty(severities)) ) { this.hide() } else if (atom.config.get('ide-haskell.switchTabOnCheck')) { this.activateFirstNonEmptyTab(collectedSeverities) } lastUpdateTime = newUpdateTime } this.disposables.add(this.results.onDidUpdate(didUpdate)) handlePromise(this.update()) } public render() { if (!this.results) { return <ide-haskell-panel /> } // tslint:disable: strict-boolean-expressions no-null-keyword return ( <ide-haskell-panel class={this.buttonsClass}> <ide-haskell-panel-heading> {this.bsc?.renderStatusIcon() || null} <OutputPanelButtons buttons={Array.from(this.tabs.values())} activeBtn={this.state.activeTab} /> <OutputPanelCheckbox class="ide-haskell-checkbox--uri-filter" state={this.state.fileFilter || false} onSwitched={this.switchFileFilter} enabledHint="Show current file messages" disabledHint="Show all project messages" /> {Array.from(this.elements.values())} {this.bsc?.renderProgressBar() || null} </ide-haskell-panel-heading> <OutputPanelItems model={this.results} filter={this.itemFilter} ref="items" /> </ide-haskell-panel> ) // tslint:enable: strict-boolean-expressions no-null-keyword } public async update() { return etch.update(this) } public destroy() { this.hide() } public async reallyDestroy() { await etch.destroy(this) this.disposables.dispose() } public async toggle() { const pane = atom.workspace.paneContainerForItem(this) if (!pane || (isDock(pane) && !pane.isVisible())) { return this.show() } else { return this.hide() } } public async show() { await atom.workspace.open(this, { searchAllPanes: true, activatePane: false, }) const pane = atom.workspace.paneContainerForItem(this) if (pane && isDock(pane)) { pane.show() } } public hide() { const pane = atom.workspace.paneContainerForItem(this) if (pane && isDock(pane)) { atom.workspace.hide(this) } } public getTitle() { return 'IDE-Haskell' } public getURI() { return `ide-haskell://output-panel/` } public getDefaultLocation() { return atom.config.get('ide-haskell.panelPosition') } public addPanelControl<T>(def: UPI.TControlDefinition<T>) { let newElement: JSX.Element if (isSimpleControlDef(def)) { const { events, classes, style, attrs } = def.opts const props: { [key: string]: Object } = {} if (classes) { props.class = classes.join(' ') } if (style) { props.style = style } if (attrs) { props.attributes = attrs } if (events) { props.on = events } newElement = etch.dom(def.element, props) } else { newElement = etch.dom(def.element, def.opts) } this.elements.add(newElement) handlePromise(this.update()) return new Disposable(() => { this.elements.delete(newElement) handlePromise(this.update()) }) } public async updateItems() { const activeTab = this.getActiveTab() let currentUri: string | undefined if (this.state.fileFilter) { const ed = atom.workspace.getActiveTextEditor() currentUri = ed ? ed.getPath() : undefined } let scroll: boolean = false if (activeTab) { const ato = this.tabs.get(activeTab) if (currentUri !== undefined && ato && ato.uriFilter) { this.itemFilter = ({ uri, severity }) => severity === activeTab && uri === currentUri } else { this.itemFilter = ({ severity }) => severity === activeTab } scroll = (ato && ato.autoScroll && this.refs.items && this.refs.items.atEnd()) || false } if (this.results) { for (const [btn, ato] of this.tabs.entries()) { ato.count = this.results.filter( ({ severity }) => severity === btn, ).length } } await this.update() if (scroll && this.refs.items) await this.refs.items.scrollToEnd() } public activateTab(tab: string) { this.state.activeTab = tab handlePromise(this.updateItems()) } public activateFirstNonEmptyTab(severities: Set<UPI.TSeverity>) { for (const tab of this.tabs.values()) { if (!severities.has(tab.name)) continue const count = tab.count if (count && count > 0) { handlePromise(this.show()) this.activateTab(tab.name) break } } } public showItem(item: ResultItem) { this.activateTab(item.severity) if (this.refs.items) handlePromise(this.refs.items.showItem(item)) } public getActiveTab() { return this.state.activeTab } public async createTab( name: string, { uriFilter = true, autoScroll = false }: UPI.ISeverityTabDefinition, ) { if (OutputPanel.defaultTabs.includes(name)) return if (this.tabs.has(name)) { // tslint:disable-next-line: no-non-null-assertion this.tabUsers.set(name, this.tabUsers.get(name)! + 1) } else { this.tabUsers.set(name, 1) this.tabs.set(name, { name, count: 0, onClick: () => this.activateTab(name), uriFilter, autoScroll, }) if (this.state.activeTab) this.activateTab(this.state.activeTab) } return this.update() } public async removeTab(name: string) { if (OutputPanel.defaultTabs.includes(name)) return if (this.tabUsers.has(name)) { // tslint:disable-next-line: no-non-null-assertion let n = this.tabUsers.get(name)! n -= 1 if (n === 0) { this.tabUsers.delete(name) this.tabs.delete(name) if (this.state.activeTab === name) { this.state.activeTab = OutputPanel.defaultTabs[0] } return this.update() } else { this.tabUsers.set(name, n) } } else { throw new Error( `Ide-Haskell: Removing nonexistent output panel tab ${name}`, ) } } public serialize(): IState & { deserializer: 'ide-haskell/OutputPanel' } { return { ...this.state, deserializer: 'ide-haskell/OutputPanel', } } public showNextError() { if (!this.results) return const rs = this.results.filter(({ uri }) => uri !== undefined) if (rs.length === 0) { return } this.currentResult++ if (this.currentResult >= rs.length) { this.currentResult = 0 } this.showItem(rs[this.currentResult]) } public showPrevError() { if (!this.results) return const rs = this.results.filter(({ uri }) => uri !== undefined) if (rs.length === 0) { return } this.currentResult-- if (this.currentResult < 0) { this.currentResult = rs.length - 1 } this.showItem(rs[this.currentResult]) } private switchFileFilter = () => { this.state.fileFilter = !this.state.fileFilter handlePromise(this.updateItems()) } private setButtonsClass(buttonsPos: 'top' | 'left') { switch (buttonsPos) { case 'top': this.buttonsClass = 'buttons-top' break case 'left': this.buttonsClass = 'buttons-left' break } } }
the_stack
import {Destination, DestinationConnectionStatus, DestinationOrigin, DestinationType, NativeLayerCrosImpl, NativeLayerImpl, PrinterStatusReason, PrinterStatusSeverity, PrintPreviewDestinationDropdownCrosElement, PrintPreviewDestinationSelectCrosElement} from 'chrome://print/print_preview.js'; import {assert} from 'chrome://resources/js/assert.m.js'; import {assertEquals, assertFalse, assertTrue} from 'chrome://webui-test/chai_assert.js'; import {waitBeforeNextRender} from 'chrome://webui-test/test_util.js'; import {NativeLayerCrosStub} from './native_layer_cros_stub.js'; import {NativeLayerStub} from './native_layer_stub.js'; import {getGoogleDriveDestination, getSaveAsPdfDestination} from './print_preview_test_utils.js'; const printer_status_test_cros = { suiteName: 'PrinterStatusTestCros', TestNames: { PrinterStatusUpdatesColor: 'printer status updates color', SendStatusRequestOnce: 'send status request once', HiddenStatusText: 'hidden status text', ChangeIcon: 'change icon', SuccessfulPrinterStatusAfterRetry: 'successful printer status after retry', }, }; Object.assign(window, {printer_status_test_cros: printer_status_test_cros}); suite(printer_status_test_cros.suiteName, function() { let destinationSelect: PrintPreviewDestinationSelectCrosElement; const account: string = 'foo@chromium.org'; let nativeLayerCros: NativeLayerCrosStub; function setNativeLayerPrinterStatusMap() { [{ printerId: 'ID1', statusReasons: [], timestamp: 0, }, { printerId: 'ID2', statusReasons: [{ reason: PrinterStatusReason.LOW_ON_PAPER, severity: PrinterStatusSeverity.UNKNOWN_SEVERITY }], timestamp: 0, }, { printerId: 'ID3', statusReasons: [{ reason: PrinterStatusReason.LOW_ON_PAPER, severity: PrinterStatusSeverity.REPORT }], timestamp: 0, }, { printerId: 'ID4', statusReasons: [{ reason: PrinterStatusReason.LOW_ON_PAPER, severity: PrinterStatusSeverity.WARNING }], timestamp: 0, }, { printerId: 'ID5', statusReasons: [{ reason: PrinterStatusReason.LOW_ON_PAPER, severity: PrinterStatusSeverity.ERROR }], timestamp: 0, }, { printerId: 'ID6', statusReasons: [ { reason: PrinterStatusReason.DEVICE_ERROR, severity: PrinterStatusSeverity.UNKNOWN_SEVERITY }, { reason: PrinterStatusReason.PRINTER_QUEUE_FULL, severity: PrinterStatusSeverity.ERROR } ], timestamp: 0, }, { printerId: 'ID7', statusReasons: [ { reason: PrinterStatusReason.DEVICE_ERROR, severity: PrinterStatusSeverity.REPORT }, { reason: PrinterStatusReason.PRINTER_QUEUE_FULL, severity: PrinterStatusSeverity.UNKNOWN_SEVERITY } ], timestamp: 0, }, { printerId: 'ID8', statusReasons: [{ reason: PrinterStatusReason.UNKNOWN_REASON, severity: PrinterStatusSeverity.ERROR }], timestamp: 0, }, { printerId: 'ID9', statusReasons: [{ reason: PrinterStatusReason.UNKNOWN_REASON, severity: PrinterStatusSeverity.UNKNOWN_SEVERITY }], timestamp: 0, }, { printerId: 'ID10', statusReasons: [{ reason: PrinterStatusReason.PRINTER_UNREACHABLE, severity: PrinterStatusSeverity.ERROR }], timestamp: 0, }] .forEach( status => nativeLayerCros.addPrinterStatusToMap( status.printerId, status)); } function createDestination( id: string, displayName: string, destinationOrigin: DestinationOrigin): Destination { return new Destination( id, DestinationType.LOCAL, destinationOrigin, displayName, DestinationConnectionStatus.ONLINE); } function escapeForwardSlahes(value: string): string { return value.replace(/\//g, '\\/'); } function getIconString( dropdown: PrintPreviewDestinationDropdownCrosElement, key: string): string { return dropdown.shadowRoot!.querySelector(`#${ escapeForwardSlahes(key)}`)!.querySelector('iron-icon')!.icon!; } setup(function() { document.body.innerHTML = ''; // Stub out native layer. NativeLayerImpl.setInstance(new NativeLayerStub()); nativeLayerCros = new NativeLayerCrosStub(); NativeLayerCrosImpl.setInstance(nativeLayerCros); setNativeLayerPrinterStatusMap(); destinationSelect = document.createElement('print-preview-destination-select-cros'); document.body.appendChild(destinationSelect); }); test( assert(printer_status_test_cros.TestNames.PrinterStatusUpdatesColor), function() { const destination1 = createDestination('ID1', 'One', DestinationOrigin.CROS); const destination2 = createDestination('ID2', 'Two', DestinationOrigin.CROS); const destination3 = createDestination('ID3', 'Three', DestinationOrigin.CROS); const destination4 = createDestination('ID4', 'Four', DestinationOrigin.CROS); const destination5 = createDestination('ID5', 'Five', DestinationOrigin.CROS); const destination6 = createDestination('ID6', 'Six', DestinationOrigin.CROS); const destination7 = createDestination('ID7', 'Seven', DestinationOrigin.CROS); const destination8 = createDestination('ID8', 'Eight', DestinationOrigin.CROS); const destination9 = createDestination('ID9', 'Nine', DestinationOrigin.CROS); return waitBeforeNextRender(destinationSelect) .then(() => { const whenStatusRequestsDone = nativeLayerCros.waitForMultiplePrinterStatusRequests(7); destinationSelect.recentDestinationList = [ destination1, destination2, destination3, destination4, destination5, destination6, destination7, destination8, destination9, ]; return whenStatusRequestsDone; }) .then(() => { return waitBeforeNextRender(destinationSelect); }) .then(() => { const dropdown = destinationSelect.$.dropdown; // Empty printer status. assertEquals( 'print-preview:printer-status-green', getIconString(dropdown, destination1.key)); // Error printer status with unknown severity. assertEquals( 'print-preview:printer-status-green', getIconString(dropdown, destination2.key)); // Error printer status with report severity. assertEquals( 'print-preview:printer-status-green', getIconString(dropdown, destination3.key)); // Error printer status with warning severity. assertEquals( 'print-preview:printer-status-red', getIconString(dropdown, destination4.key)); // Error printer status with error severity. assertEquals( 'print-preview:printer-status-red', getIconString(dropdown, destination5.key)); // Error printer status with unknown severity + error printer // status with error severity. assertEquals( 'print-preview:printer-status-red', getIconString(dropdown, destination6.key)); // Error printer status with unknown severity + error printer // status with report severity. assertEquals( 'print-preview:printer-status-green', getIconString(dropdown, destination7.key)); // Unknown reason printer status with error severity. assertEquals( 'print-preview:printer-status-grey', getIconString(dropdown, destination8.key)); // Unknown reason printer status with unknown severity. assertEquals( 'print-preview:printer-status-green', getIconString(dropdown, destination9.key)); }); }); test( assert(printer_status_test_cros.TestNames.SendStatusRequestOnce), function() { return waitBeforeNextRender(destinationSelect).then(() => { const destination1 = createDestination('ID1', 'One', DestinationOrigin.CROS); const destination2 = createDestination('ID2', 'Two', DestinationOrigin.CROS); destinationSelect.recentDestinationList = [ destination1, destination2, createDestination('ID3', 'Three', DestinationOrigin.EXTENSION), createDestination('ID4', 'Four', DestinationOrigin.EXTENSION), ]; assertEquals( 2, nativeLayerCros.getCallCount('requestPrinterStatusUpdate')); // Update list with 2 existing destinations and one new destination. // Make sure the requestPrinterStatusUpdate only gets called for the // new destination. destinationSelect.recentDestinationList = [ destination1, destination2, createDestination('ID5', 'Five', DestinationOrigin.CROS), ]; assertEquals( 3, nativeLayerCros.getCallCount('requestPrinterStatusUpdate')); }); }); test(assert(printer_status_test_cros.TestNames.HiddenStatusText), function() { const destinationStatus = destinationSelect.shadowRoot!.querySelector<HTMLElement>( '.destination-additional-info')!; return waitBeforeNextRender(destinationSelect) .then(() => { const destinationWithoutErrorStatus = createDestination('ID1', 'One', DestinationOrigin.CROS); // Destination with ID4 will return an error printer status that will // trigger the error text being populated. const destinationWithErrorStatus = createDestination('ID4', 'Four', DestinationOrigin.CROS); const cloudPrintDestination = new Destination( 'ID2', DestinationType.GOOGLE, DestinationOrigin.COOKIES, 'Two', DestinationConnectionStatus.OFFLINE, {account: account}); destinationSelect.recentDestinationList = [ destinationWithoutErrorStatus, destinationWithErrorStatus, cloudPrintDestination, ]; const destinationEulaWrapper = destinationSelect.$.destinationEulaWrapper; destinationSelect.destination = cloudPrintDestination; assertFalse(destinationStatus.hidden); assertTrue(destinationEulaWrapper.hidden); destinationSelect.destination = destinationWithoutErrorStatus; assertTrue(destinationStatus.hidden); assertTrue(destinationEulaWrapper.hidden); destinationSelect.set( 'destination.eulaUrl', 'chrome://os-credits/eula'); assertFalse(destinationEulaWrapper.hidden); destinationSelect.destination = destinationWithErrorStatus; return nativeLayerCros.whenCalled('requestPrinterStatusUpdate'); }) .then(() => { return waitBeforeNextRender(destinationSelect); }) .then(() => { assertFalse(destinationStatus.hidden); }); }); test(assert(printer_status_test_cros.TestNames.ChangeIcon), function() { return waitBeforeNextRender(destinationSelect).then(() => { const localCrosPrinter = createDestination('ID1', 'One', DestinationOrigin.CROS); const localNonCrosPrinter = createDestination('ID2', 'Two', DestinationOrigin.LOCAL); const cloudPrintDestination = new Destination( 'ID3', DestinationType.GOOGLE, DestinationOrigin.COOKIES, 'Three', DestinationConnectionStatus.ONLINE, {account: account}); const ownedCloudPrintDestination = new Destination( 'ID4', DestinationType.GOOGLE, DestinationOrigin.COOKIES, 'Four', DestinationConnectionStatus.ONLINE, {account: account, isOwned: true}); const crosEnterprisePrinter = new Destination( 'ID5', DestinationType.LOCAL, DestinationOrigin.CROS, 'Five', DestinationConnectionStatus.ONLINE, {isEnterprisePrinter: true}); const mobilePrinter = new Destination( 'ID7', DestinationType.MOBILE, DestinationOrigin.COOKIES, 'Seven', DestinationConnectionStatus.ONLINE); const saveToDrive = getGoogleDriveDestination('account'); const saveAsPdf = getSaveAsPdfDestination(); destinationSelect.recentDestinationList = [ localCrosPrinter, saveToDrive, saveAsPdf, ]; const dropdown = destinationSelect.$.dropdown; destinationSelect.destination = localCrosPrinter; destinationSelect.updateDestination(); assertEquals( 'print-preview:printer-status-grey', dropdown.destinationIcon); destinationSelect.destination = localNonCrosPrinter; destinationSelect.updateDestination(); assertEquals('print-preview:print', dropdown.destinationIcon); destinationSelect.destination = cloudPrintDestination; destinationSelect.updateDestination(); assertEquals('print-preview:printer-shared', dropdown.destinationIcon); destinationSelect.destination = ownedCloudPrintDestination; destinationSelect.updateDestination(); assertEquals('print-preview:print', dropdown.destinationIcon); destinationSelect.destination = crosEnterprisePrinter; destinationSelect.updateDestination(); assertEquals( 'print-preview:business-printer-status-grey', dropdown.destinationIcon); destinationSelect.destination = mobilePrinter; destinationSelect.updateDestination(); assertEquals('print-preview:smartphone', dropdown.destinationIcon); destinationSelect.destination = saveToDrive; destinationSelect.updateDestination(); assertEquals('print-preview:save-to-drive', dropdown.destinationIcon); destinationSelect.destination = saveAsPdf; destinationSelect.updateDestination(); assertEquals('cr:insert-drive-file', dropdown.destinationIcon); }); }); test( assert( printer_status_test_cros.TestNames.SuccessfulPrinterStatusAfterRetry), function() { nativeLayerCros.simulateStatusRetrySuccesful(); const destination = createDestination('ID10', 'Ten', DestinationOrigin.CROS); destination.setPrinterStatusRetryTimeoutForTesting(100); const whenStatusRequestsDonePromise = nativeLayerCros.waitForMultiplePrinterStatusRequests(2); destinationSelect.recentDestinationList = [ destination, ]; const dropdown = destinationSelect.$.dropdown; return whenStatusRequestsDonePromise .then(() => { assertEquals( 'print-preview:printer-status-grey', getIconString(dropdown, destination.key)); assertEquals( 0, nativeLayerCros.getCallCount( 'recordPrinterStatusRetrySuccessHistogram')); return waitBeforeNextRender(destinationSelect); }) .then(() => { // The printer status is requested twice because of the retry. assertEquals( 2, nativeLayerCros.getCallCount('requestPrinterStatusUpdate')); assertEquals( 'print-preview:printer-status-green', getIconString(dropdown, destination.key)); assertEquals( 1, nativeLayerCros.getCallCount( 'recordPrinterStatusRetrySuccessHistogram')); assertEquals( true, nativeLayerCros.getArgs( 'recordPrinterStatusRetrySuccessHistogram')[0]); }); }); });
the_stack
export const parseBoolean = (value: string): boolean => { switch (value) { case "true": return true; case "false": return false; default: throw new Error(`Unable to parse boolean value "${value}"`); } }; /* * Asserts a value is a boolean and returns it. * * @param value A value that is expected to be a boolean. * @returns The value if it's a boolean, undefined if it's null/undefined, * otherwise an error is thrown. */ export const expectBoolean = (value: any): boolean | undefined => { if (value === null || value === undefined) { return undefined; } if (typeof value === "boolean") { return value; } throw new TypeError(`Expected boolean, got ${typeof value}`); }; /** * Asserts a value is a number and returns it. * * @param value A value that is expected to be a number. * @returns The value if it's a number, undefined if it's null/undefined, * otherwise an error is thrown. */ export const expectNumber = (value: any): number | undefined => { if (value === null || value === undefined) { return undefined; } if (typeof value === "number") { return value; } throw new TypeError(`Expected number, got ${typeof value}`); }; const MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); /** * Asserts a value is a 32-bit float and returns it. * * @param value A value that is expected to be a 32-bit float. * @returns The value if it's a float, undefined if it's null/undefined, * otherwise an error is thrown. */ export const expectFloat32 = (value: any): number | undefined => { const expected = expectNumber(value); if (expected !== undefined && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { // IEEE-754 is an imperfect representation for floats. Consider the simple // value `0.1`. The representation in a 32-bit float would look like: // // 0 01111011 10011001100110011001101 // Actual value: 0.100000001490116119384765625 // // Note the repeating pattern of `1001` in the fraction part. The 64-bit // representation is similar: // // 0 01111111011 1001100110011001100110011001100110011001100110011010 // Actual value: 0.100000000000000005551115123126 // // So even for what we consider simple numbers, the representation differs // between the two formats. And it's non-obvious how one might look at the // 64-bit value (which is how JS represents numbers) and determine if it // can be represented reasonably in the 32-bit form. Primarily because you // can't know whether the intent was to represent `0.1` or the actual // value in memory. But even if you have both the decimal value and the // double value, that still doesn't communicate the intended precision. // // So rather than attempting to divine the intent of the caller, we instead // do some simple bounds checking to make sure the value is passingly // representable in a 32-bit float. It's not perfect, but it's good enough. // Perfect, even if possible to achieve, would likely be too costly to // be worth it. // // The maximum value of a 32-bit float. Since the 64-bit representation // could be more or less, we just round it up to the nearest whole number. // This further reduces our ability to be certain of the value, but it's // an acceptable tradeoff. // // Compare against the absolute value to simplify things. if (Math.abs(expected) > MAX_FLOAT) { throw new TypeError(`Expected 32-bit float, got ${value}`); } } return expected; }; /** * Asserts a value is an integer and returns it. * * @param value A value that is expected to be an integer. * @returns The value if it's an integer, undefined if it's null/undefined, * otherwise an error is thrown. */ export const expectLong = (value: any): number | undefined => { if (value === null || value === undefined) { return undefined; } if (Number.isInteger(value) && !Number.isNaN(value)) { return value; } throw new TypeError(`Expected integer, got ${typeof value}`); }; /** * @deprecated Use expectLong */ export const expectInt = expectLong; /** * Asserts a value is a 32-bit integer and returns it. * * @param value A value that is expected to be an integer. * @returns The value if it's an integer, undefined if it's null/undefined, * otherwise an error is thrown. */ export const expectInt32 = (value: any): number | undefined => expectSizedInt(value, 32); /** * Asserts a value is a 16-bit integer and returns it. * * @param value A value that is expected to be an integer. * @returns The value if it's an integer, undefined if it's null/undefined, * otherwise an error is thrown. */ export const expectShort = (value: any): number | undefined => expectSizedInt(value, 16); /** * Asserts a value is an 8-bit integer and returns it. * * @param value A value that is expected to be an integer. * @returns The value if it's an integer, undefined if it's null/undefined, * otherwise an error is thrown. */ export const expectByte = (value: any): number | undefined => expectSizedInt(value, 8); type IntSize = 32 | 16 | 8; const expectSizedInt = (value: any, size: IntSize): number | undefined => { const expected = expectLong(value); if (expected !== undefined && castInt(expected, size) !== expected) { throw new TypeError(`Expected ${size}-bit integer, got ${value}`); } return expected; }; const castInt = (value: number, size: IntSize) => { switch (size) { case 32: return Int32Array.of(value)[0]; case 16: return Int16Array.of(value)[0]; case 8: return Int8Array.of(value)[0]; } }; /** * Asserts a value is not null or undefined and returns it, or throws an error. * * @param value A value that is expected to be defined * @param location The location where we're expecting to find a defined object (optional) * @returns The value if it's not undefined, otherwise throws an error */ export const expectNonNull = <T>(value: T | null | undefined, location?: string): T => { if (value === null || value === undefined) { if (location) { throw new TypeError(`Expected a non-null value for ${location}`); } throw new TypeError("Expected a non-null value"); } return value; }; /** * Asserts a value is an JSON-like object and returns it. This is expected to be used * with values parsed from JSON (arrays, objects, numbers, strings, booleans). * * @param value A value that is expected to be an object * @returns The value if it's an object, undefined if it's null/undefined, * otherwise an error is thrown. */ export const expectObject = (value: any): { [key: string]: any } | undefined => { if (value === null || value === undefined) { return undefined; } if (typeof value === "object" && !Array.isArray(value)) { return value; } throw new TypeError(`Expected object, got ${typeof value}`); }; /** * Asserts a value is a string and returns it. * * @param value A value that is expected to be a string. * @returns The value if it's a string, undefined if it's null/undefined, * otherwise an error is thrown. */ export const expectString = (value: any): string | undefined => { if (value === null || value === undefined) { return undefined; } if (typeof value === "string") { return value; } throw new TypeError(`Expected string, got ${typeof value}`); }; /** * Asserts a value is a JSON-like object with only one non-null/non-undefined key and * returns it. * * @param value A value that is expected to be an object with exactly one non-null, * non-undefined key. * @return the value if it's a union, undefined if it's null/undefined, otherwise * an error is thrown. */ export const expectUnion = (value: unknown): { [key: string]: any } | undefined => { if (value === null || value === undefined) { return undefined; } const asObject = expectObject(value)!; const setKeys = Object.entries(asObject) .filter(([_, v]) => v !== null && v !== undefined) .map(([k, _]) => k); if (setKeys.length === 0) { throw new TypeError(`Unions must have exactly one non-null member`); } if (setKeys.length > 1) { throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); } return asObject; }; /** * Parses a value into a double. If the value is null or undefined, undefined * will be returned. If the value is a string, it will be parsed by the standard * parseFloat with one exception: NaN may only be explicitly set as the string * "NaN", any implicit Nan values will result in an error being thrown. If any * other type is provided, an exception will be thrown. * * @param value A number or string representation of a double. * @returns The value as a number, or undefined if it's null/undefined. */ export const strictParseDouble = (value: string | number): number | undefined => { if (typeof value == "string") { return expectNumber(parseNumber(value)); } return expectNumber(value); }; /** * @deprecated Use strictParseDouble */ export const strictParseFloat = strictParseDouble; /** * Parses a value into a float. If the value is null or undefined, undefined * will be returned. If the value is a string, it will be parsed by the standard * parseFloat with one exception: NaN may only be explicitly set as the string * "NaN", any implicit Nan values will result in an error being thrown. If any * other type is provided, an exception will be thrown. * * @param value A number or string representation of a float. * @returns The value as a number, or undefined if it's null/undefined. */ export const strictParseFloat32 = (value: string | number): number | undefined => { if (typeof value == "string") { return expectFloat32(parseNumber(value)); } return expectFloat32(value); }; // This regex matches JSON-style numbers. In short: // * The integral may start with a negative sign, but not a positive one // * No leading 0 on the integral unless it's immediately followed by a '.' // * Exponent indicated by a case-insensitive 'E' optionally followed by a // positive/negative sign and some number of digits. // It also matches both positive and negative infinity as well and explicit NaN. const NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; const parseNumber = (value: string): number => { const matches = value.match(NUMBER_REGEX); if (matches === null || matches[0].length !== value.length) { throw new TypeError(`Expected real number, got implicit NaN`); } return parseFloat(value); }; /** * Asserts a value is a number and returns it. If the value is a string * representation of a non-numeric number type (NaN, Infinity, -Infinity), * the value will be parsed. Any other string value will result in an exception * being thrown. Null or undefined will be returned as undefined. Any other * type will result in an exception being thrown. * * @param value A number or string representation of a non-numeric float. * @returns The value as a number, or undefined if it's null/undefined. */ export const limitedParseDouble = (value: string | number): number | undefined => { if (typeof value == "string") { return parseFloatString(value); } return expectNumber(value); }; /** * @deprecated Use limitedParseDouble */ export const handleFloat = limitedParseDouble; /** * @deprecated Use limitedParseDouble */ export const limitedParseFloat = limitedParseDouble; /** * Asserts a value is a 32-bit float and returns it. If the value is a string * representation of a non-numeric number type (NaN, Infinity, -Infinity), * the value will be parsed. Any other string value will result in an exception * being thrown. Null or undefined will be returned as undefined. Any other * type will result in an exception being thrown. * * @param value A number or string representation of a non-numeric float. * @returns The value as a number, or undefined if it's null/undefined. */ export const limitedParseFloat32 = (value: string | number): number | undefined => { if (typeof value == "string") { return parseFloatString(value); } return expectFloat32(value); }; const parseFloatString = (value: string): number => { switch (value) { case "NaN": return NaN; case "Infinity": return Infinity; case "-Infinity": return -Infinity; default: throw new Error(`Unable to parse float value: ${value}`); } }; /** * Parses a value into an integer. If the value is null or undefined, undefined * will be returned. If the value is a string, it will be parsed by parseFloat * and the result will be asserted to be an integer. If the parsed value is not * an integer, or the raw value is any type other than a string or number, an * exception will be thrown. * * @param value A number or string representation of an integer. * @returns The value as a number, or undefined if it's null/undefined. */ export const strictParseLong = (value: string | number): number | undefined => { if (typeof value === "string") { // parseInt can't be used here, because it will silently discard any // existing decimals. We want to instead throw an error if there are any. return expectLong(parseNumber(value)); } return expectLong(value); }; /** * @deprecated Use strictParseLong */ export const strictParseInt = strictParseLong; /** * Parses a value into a 32-bit integer. If the value is null or undefined, undefined * will be returned. If the value is a string, it will be parsed by parseFloat * and the result will be asserted to be an integer. If the parsed value is not * an integer, or the raw value is any type other than a string or number, an * exception will be thrown. * * @param value A number or string representation of a 32-bit integer. * @returns The value as a number, or undefined if it's null/undefined. */ export const strictParseInt32 = (value: string | number): number | undefined => { if (typeof value === "string") { // parseInt can't be used here, because it will silently discard any // existing decimals. We want to instead throw an error if there are any. return expectInt32(parseNumber(value)); } return expectInt32(value); }; /** * Parses a value into a 16-bit integer. If the value is null or undefined, undefined * will be returned. If the value is a string, it will be parsed by parseFloat * and the result will be asserted to be an integer. If the parsed value is not * an integer, or the raw value is any type other than a string or number, an * exception will be thrown. * * @param value A number or string representation of a 16-bit integer. * @returns The value as a number, or undefined if it's null/undefined. */ export const strictParseShort = (value: string | number): number | undefined => { if (typeof value === "string") { // parseInt can't be used here, because it will silently discard any // existing decimals. We want to instead throw an error if there are any. return expectShort(parseNumber(value)); } return expectShort(value); }; /** * Parses a value into an 8-bit integer. If the value is null or undefined, undefined * will be returned. If the value is a string, it will be parsed by parseFloat * and the result will be asserted to be an integer. If the parsed value is not * an integer, or the raw value is any type other than a string or number, an * exception will be thrown. * * @param value A number or string representation of an 8-bit integer. * @returns The value as a number, or undefined if it's null/undefined. */ export const strictParseByte = (value: string | number): number | undefined => { if (typeof value === "string") { // parseInt can't be used here, because it will silently discard any // existing decimals. We want to instead throw an error if there are any. return expectByte(parseNumber(value)); } return expectByte(value); };
the_stack
import {makePropertyWrapper} from './khronos-webgl-error'; /** * Resets a context to the initial state. * @param {!WebGLRenderingContext} ctx The webgl context to * reset. */ function resetToInitialState(ctx) { const isWebGL2RenderingContext = !!ctx.createTransformFeedback; if (isWebGL2RenderingContext) { ctx.bindVertexArray(null); } const numAttribs = ctx.getParameter(ctx.MAX_VERTEX_ATTRIBS); const tmp = ctx.createBuffer(); ctx.bindBuffer(ctx.ARRAY_BUFFER, tmp); for (var ii = 0; ii < numAttribs; ++ii) { ctx.disableVertexAttribArray(ii); ctx.vertexAttribPointer(ii, 4, ctx.FLOAT, false, 0, 0); ctx.vertexAttrib1f(ii, 0); if (isWebGL2RenderingContext) { ctx.vertexAttribDivisor(ii, 0); } } ctx.deleteBuffer(tmp); const numTextureUnits = ctx.getParameter(ctx.MAX_TEXTURE_IMAGE_UNITS); for (let ii = 0; ii < numTextureUnits; ++ii) { ctx.activeTexture(ctx.TEXTURE0 + ii); ctx.bindTexture(ctx.TEXTURE_CUBE_MAP, null); ctx.bindTexture(ctx.TEXTURE_2D, null); if (isWebGL2RenderingContext) { ctx.bindTexture(ctx.TEXTURE_2D_ARRAY, null); ctx.bindTexture(ctx.TEXTURE_3D, null); ctx.bindSampler(ii, null); } } ctx.activeTexture(ctx.TEXTURE0); ctx.useProgram(null); ctx.bindBuffer(ctx.ARRAY_BUFFER, null); ctx.bindBuffer(ctx.ELEMENT_ARRAY_BUFFER, null); ctx.bindFramebuffer(ctx.FRAMEBUFFER, null); ctx.bindRenderbuffer(ctx.RENDERBUFFER, null); ctx.disable(ctx.BLEND); ctx.disable(ctx.CULL_FACE); ctx.disable(ctx.DEPTH_TEST); ctx.disable(ctx.DITHER); ctx.disable(ctx.SCISSOR_TEST); ctx.blendColor(0, 0, 0, 0); ctx.blendEquation(ctx.FUNC_ADD); ctx.blendFunc(ctx.ONE, ctx.ZERO); ctx.clearColor(0, 0, 0, 0); ctx.clearDepth(1); ctx.clearStencil(-1); ctx.colorMask(true, true, true, true); ctx.cullFace(ctx.BACK); ctx.depthFunc(ctx.LESS); ctx.depthMask(true); ctx.depthRange(0, 1); ctx.frontFace(ctx.CCW); ctx.hint(ctx.GENERATE_MIPMAP_HINT, ctx.DONT_CARE); ctx.lineWidth(1); ctx.pixelStorei(ctx.PACK_ALIGNMENT, 4); ctx.pixelStorei(ctx.UNPACK_ALIGNMENT, 4); ctx.pixelStorei(ctx.UNPACK_FLIP_Y_WEBGL, false); ctx.pixelStorei(ctx.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false); // TODO: Delete this IF. if (ctx.UNPACK_COLORSPACE_CONVERSION_WEBGL) { ctx.pixelStorei(ctx.UNPACK_COLORSPACE_CONVERSION_WEBGL, ctx.BROWSER_DEFAULT_WEBGL); } ctx.polygonOffset(0, 0); ctx.sampleCoverage(1, false); ctx.scissor(0, 0, ctx.canvas.width, ctx.canvas.height); ctx.stencilFunc(ctx.ALWAYS, 0, 0xFFFFFFFF); ctx.stencilMask(0xFFFFFFFF); ctx.stencilOp(ctx.KEEP, ctx.KEEP, ctx.KEEP); ctx.viewport(0, 0, ctx.canvas.width, ctx.canvas.height); ctx.clear(ctx.COLOR_BUFFER_BIT | ctx.DEPTH_BUFFER_BIT | ctx.STENCIL_BUFFER_BIT); if (isWebGL2RenderingContext) { ctx.drawBuffers([ctx.BACK]); ctx.readBuffer(ctx.BACK); ctx.bindBuffer(ctx.COPY_READ_BUFFER, null); ctx.bindBuffer(ctx.COPY_WRITE_BUFFER, null); ctx.bindBuffer(ctx.PIXEL_PACK_BUFFER, null); ctx.bindBuffer(ctx.PIXEL_UNPACK_BUFFER, null); var numTransformFeedbacks = ctx.getParameter(ctx.MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS); for (var ii = 0; ii < numTransformFeedbacks; ++ii) { ctx.bindBufferBase(ctx.TRANSFORM_FEEDBACK_BUFFER, ii, null); } var numUBOs = ctx.getParameter(ctx.MAX_UNIFORM_BUFFER_BINDINGS); for (var ii = 0; ii < numUBOs; ++ii) { ctx.bindBufferBase(ctx.UNIFORM_BUFFER, ii, null); } ctx.disable(ctx.RASTERIZER_DISCARD); ctx.pixelStorei(ctx.UNPACK_IMAGE_HEIGHT, 0); ctx.pixelStorei(ctx.UNPACK_SKIP_IMAGES, 0); ctx.pixelStorei(ctx.UNPACK_ROW_LENGTH, 0); ctx.pixelStorei(ctx.UNPACK_SKIP_ROWS, 0); ctx.pixelStorei(ctx.UNPACK_SKIP_PIXELS, 0); ctx.pixelStorei(ctx.PACK_ROW_LENGTH, 0); ctx.pixelStorei(ctx.PACK_SKIP_ROWS, 0); ctx.pixelStorei(ctx.PACK_SKIP_PIXELS, 0); ctx.hint(ctx.FRAGMENT_SHADER_DERIVATIVE_HINT, ctx.DONT_CARE); } // TODO: This should NOT be needed but Firefox fails with 'hint' while(ctx.getError()); } /** * Given a canvas element returns a wrapped canvas element that will * simulate lost context. The canvas returned adds the following functions. * * loseContext: * simulates a lost context event. * * restoreContext: * simulates the context being restored. * * lostContextInNCalls: * loses the context after N gl calls. * * getNumCalls: * tells you how many gl calls there have been so far. * * setRestoreTimeout: * sets the number of milliseconds until the context is restored * after it has been lost. Defaults to 0. Pass -1 to prevent * automatic restoring. * * @param {!Canvas} canvas The canvas element to wrap. */ export function makeLostContextSimulatingCanvas(canvas) { let unwrappedContext_; let onLost_ = []; let onRestored_ = []; let wrappedContext_: Partial<Record<keyof WebGL2RenderingContext, any>> = {}; let contextId_ = 1; let contextLost_ = false; let resourceId_ = 0; let resourceDb_ = []; let numCallsToLoseContext_ = 0; let numCalls_ = 0; let canRestore_ = false; let restoreTimeout_ = 0; let isWebGL2RenderingContext; // Holds booleans for each GL error so can simulate errors. let glErrorShadow_ = { }; canvas.getContext = function(f) { return function() { var ctx = f.apply(canvas, arguments); // Did we get a context and is it a WebGL context? if ((ctx instanceof WebGLRenderingContext) || (window.WebGL2RenderingContext && (ctx instanceof WebGL2RenderingContext))) { if (ctx != unwrappedContext_) { if (unwrappedContext_) { throw "got different context" } isWebGL2RenderingContext = window.WebGL2RenderingContext && (ctx instanceof WebGL2RenderingContext); unwrappedContext_ = ctx; wrappedContext_ = makeLostContextSimulatingContext(unwrappedContext_); } return wrappedContext_; } return ctx; } }(canvas.getContext); function wrapEvent(listener) { if (typeof(listener) == "function") { return listener; } else { return function(info) { listener.handleEvent(info); } } } function addOnContextLostListener(listener) { onLost_.push(wrapEvent(listener)); }; function addOnContextRestoredListener(listener) { onRestored_.push(wrapEvent(listener)); }; function wrapAddEventListener(canvas) { var f = canvas.addEventListener; canvas.addEventListener = function(type, listener, bubble) { switch (type) { case 'webglcontextlost': addOnContextLostListener(listener); break; case 'webglcontextrestored': addOnContextRestoredListener(listener); break; default: f.apply(canvas, arguments); } }; } wrapAddEventListener(canvas); canvas.loseContext = function() { if (!contextLost_) { contextLost_ = true; numCallsToLoseContext_ = 0; ++contextId_; while (unwrappedContext_.getError()); clearErrors(); glErrorShadow_[unwrappedContext_.CONTEXT_LOST_WEBGL] = true; var event = makeWebGLContextEvent("context lost"); var callbacks = onLost_.slice(); setTimeout(function() { //log("numCallbacks:" + callbacks.length); for (var ii = 0; ii < callbacks.length; ++ii) { //log("calling callback:" + ii); callbacks[ii](event); } if (restoreTimeout_ >= 0) { setTimeout(function() { canvas.restoreContext(); }, restoreTimeout_); } }, 0); } }; canvas.restoreContext = function() { if (contextLost_) { if (onRestored_.length) { setTimeout(function() { if (!canRestore_) { throw "can not restore. webglcontestlost listener did not call event.preventDefault"; } freeResources(); resetToInitialState(unwrappedContext_); contextLost_ = false; numCalls_ = 0; canRestore_ = false; var callbacks = onRestored_.slice(); var event = makeWebGLContextEvent("context restored"); for (var ii = 0; ii < callbacks.length; ++ii) { callbacks[ii](event); } }, 0); } } }; canvas.loseContextInNCalls = function(numCalls) { if (contextLost_) { throw "You can not ask a lost contet to be lost"; } numCallsToLoseContext_ = numCalls_ + numCalls; }; canvas.getNumCalls = function() { return numCalls_; }; canvas.setRestoreTimeout = function(timeout) { restoreTimeout_ = timeout; }; function isWebGLObject(obj) { //return false; return (obj instanceof WebGLBuffer || obj instanceof WebGLFramebuffer || obj instanceof WebGLProgram || obj instanceof WebGLRenderbuffer || obj instanceof WebGLShader || obj instanceof WebGLTexture); } function checkResources(args) { for (var ii = 0; ii < args.length; ++ii) { var arg = args[ii]; if (isWebGLObject(arg)) { return arg.__webglDebugContextLostId__ == contextId_; } } return true; } function clearErrors() { var k = Object.keys(glErrorShadow_); for (var ii = 0; ii < k.length; ++ii) { delete glErrorShadow_[k[ii]]; } } function loseContextIfTime() { ++numCalls_; if (!contextLost_) { if (numCallsToLoseContext_ == numCalls_) { canvas.loseContext(); } } } // Makes a function that simulates WebGL when out of context. function makeLostContextFunctionWrapper(ctx, functionName) { var f = ctx[functionName]; return function() { // log("calling:" + functionName); // Only call the functions if the context is not lost. loseContextIfTime(); if (!contextLost_) { //if (!checkResources(arguments)) { // glErrorShadow_[wrappedContext_.INVALID_OPERATION] = true; // return; //} var result = f.apply(ctx, arguments); return result; } }; } function freeResources() { for (var ii = 0; ii < resourceDb_.length; ++ii) { var resource = resourceDb_[ii]; if (resource instanceof WebGLBuffer) { unwrappedContext_.deleteBuffer(resource); } else if (resource instanceof WebGLFramebuffer) { unwrappedContext_.deleteFramebuffer(resource); } else if (resource instanceof WebGLProgram) { unwrappedContext_.deleteProgram(resource); } else if (resource instanceof WebGLRenderbuffer) { unwrappedContext_.deleteRenderbuffer(resource); } else if (resource instanceof WebGLShader) { unwrappedContext_.deleteShader(resource); } else if (resource instanceof WebGLTexture) { unwrappedContext_.deleteTexture(resource); } else if (isWebGL2RenderingContext) { if (resource instanceof WebGLQuery) { unwrappedContext_.deleteQuery(resource); } else if (resource instanceof WebGLSampler) { unwrappedContext_.deleteSampler(resource); } else if (resource instanceof WebGLSync) { unwrappedContext_.deleteSync(resource); } else if (resource instanceof WebGLTransformFeedback) { unwrappedContext_.deleteTransformFeedback(resource); } else if (resource instanceof WebGLVertexArrayObject) { unwrappedContext_.deleteVertexArray(resource); } } } } function makeWebGLContextEvent(statusMessage) { return { statusMessage: statusMessage, preventDefault: function() { canRestore_ = true; } }; } return canvas; function makeLostContextSimulatingContext(ctx) { // copy all functions and properties to wrapper for (var propertyName in ctx) { if (typeof ctx[propertyName] == 'function') { wrappedContext_[propertyName] = makeLostContextFunctionWrapper( ctx, propertyName); } else { makePropertyWrapper(wrappedContext_, ctx, propertyName); } } // Wrap a few functions specially. wrappedContext_.getError = function() { loseContextIfTime(); if (!contextLost_) { let err; while (err = unwrappedContext_.getError()) { glErrorShadow_[err] = true; } } for (var err in glErrorShadow_) { if (glErrorShadow_[err]) { delete glErrorShadow_[err]; return err; } } return wrappedContext_.NO_ERROR; }; var creationFunctions = [ "createBuffer", "createFramebuffer", "createProgram", "createRenderbuffer", "createShader", "createTexture" ]; if (isWebGL2RenderingContext) { creationFunctions.push( "createQuery", "createSampler", "fenceSync", "createTransformFeedback", "createVertexArray" ); } for (var ii = 0; ii < creationFunctions.length; ++ii) { var functionName = creationFunctions[ii]; wrappedContext_[functionName] = function(f) { return function() { loseContextIfTime(); if (contextLost_) { return null; } var obj = f.apply(ctx, arguments); obj.__webglDebugContextLostId__ = contextId_; resourceDb_.push(obj); return obj; }; }(ctx[functionName]); } var functionsThatShouldReturnNull = [ "getActiveAttrib", "getActiveUniform", "getBufferParameter", "getContextAttributes", "getAttachedShaders", "getFramebufferAttachmentParameter", "getParameter", "getProgramParameter", "getProgramInfoLog", "getRenderbufferParameter", "getShaderParameter", "getShaderInfoLog", "getShaderSource", "getTexParameter", "getUniform", "getUniformLocation", "getVertexAttrib" ]; if (isWebGL2RenderingContext) { functionsThatShouldReturnNull.push( "getInternalformatParameter", "getQuery", "getQueryParameter", "getSamplerParameter", "getSyncParameter", "getTransformFeedbackVarying", "getIndexedParameter", "getUniformIndices", "getActiveUniforms", "getActiveUniformBlockParameter", "getActiveUniformBlockName" ); } for (var ii = 0; ii < functionsThatShouldReturnNull.length; ++ii) { var functionName = functionsThatShouldReturnNull[ii]; wrappedContext_[functionName] = function(f) { return function() { loseContextIfTime(); if (contextLost_) { return null; } return f.apply(ctx, arguments); } }(wrappedContext_[functionName]); } var isFunctions = [ "isBuffer", "isEnabled", "isFramebuffer", "isProgram", "isRenderbuffer", "isShader", "isTexture" ]; if (isWebGL2RenderingContext) { isFunctions.push( "isQuery", "isSampler", "isSync", "isTransformFeedback", "isVertexArray" ); } for (var ii = 0; ii < isFunctions.length; ++ii) { var functionName = isFunctions[ii]; wrappedContext_[functionName] = function(f) { return function() { loseContextIfTime(); if (contextLost_) { return false; } return f.apply(ctx, arguments); } }(wrappedContext_[functionName]); } wrappedContext_.checkFramebufferStatus = function(f) { return function() { loseContextIfTime(); if (contextLost_) { return wrappedContext_.FRAMEBUFFER_UNSUPPORTED; } return f.apply(ctx, arguments); }; }(wrappedContext_.checkFramebufferStatus); wrappedContext_.getAttribLocation = function(f) { return function() { loseContextIfTime(); if (contextLost_) { return -1; } return f.apply(ctx, arguments); }; }(wrappedContext_.getAttribLocation); wrappedContext_.getVertexAttribOffset = function(f) { return function() { loseContextIfTime(); if (contextLost_) { return 0; } return f.apply(ctx, arguments); }; }(wrappedContext_.getVertexAttribOffset); wrappedContext_.isContextLost = function() { return contextLost_; }; if (isWebGL2RenderingContext) { wrappedContext_.getFragDataLocation = function(f) { return function() { loseContextIfTime(); if (contextLost_) { return -1; } return f.apply(ctx, arguments); }; }(wrappedContext_.getFragDataLocation); wrappedContext_.clientWaitSync = function(f) { return function() { loseContextIfTime(); if (contextLost_) { return wrappedContext_.WAIT_FAILED; } return f.apply(ctx, arguments); }; }(wrappedContext_.clientWaitSync); wrappedContext_.getUniformBlockIndex = function(f) { return function() { loseContextIfTime(); if (contextLost_) { return wrappedContext_.INVALID_INDEX; } return f.apply(ctx, arguments); }; }(wrappedContext_.getUniformBlockIndex); } return wrappedContext_; } }
the_stack
import ObjectObserver from '../lib/ObjectObserver'; test('array push - primitives', () => { const events = []; let callBacks = 0; const pa = ObjectObserver.create([1, 2, 3, 4], changes => { events.push(...changes); callBacks += 1; }); pa.push(5); pa.push(6, 7); expect(events).toHaveLength(3); expect(callBacks).toBe(2); expect(events[0]).toEqual({ type: 'insert', path: [4], value: 5, }); expect(events[1]).toEqual({ type: 'insert', path: [5], value: 6, }); expect(events[2]).toEqual({ type: 'insert', path: [6], value: 7, }); }); test('array push - objects', () => { const events = []; const pa = ObjectObserver.create([], changes => events.push(...changes)); pa.push({ text: 'initial' }, { text: 'secondary' }); expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'insert', path: [0], value: { text: 'initial' }, }); expect(events[1]).toEqual({ type: 'insert', path: [1], value: { text: 'secondary' }, }); pa[0].text = 'name'; expect(events).toHaveLength(3); expect(events[2]).toEqual({ type: 'update', path: [0, 'text'], value: 'name', }); pa[1].text = 'more'; expect(events).toHaveLength(4); expect(events[3]).toEqual({ type: 'update', path: [1, 'text'], value: 'more', }); }); test('array push - arrays', () => { const events = []; const pa = ObjectObserver.create([], changes => events.push(...changes)); pa.push([], [{}]); expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'insert', path: [0], value: [], }); expect(events[1]).toEqual({ type: 'insert', path: [1], value: [{}], }); pa[0].push('name'); expect(events).toHaveLength(3); expect(events[2]).toEqual({ type: 'insert', path: [0, 0], value: 'name', }); pa[1][0].prop = 'more'; expect(events).toHaveLength(4); expect(events[3]).toEqual({ type: 'insert', path: [1, 0, 'prop'], value: 'more', }); }); test('array pop - primitives', () => { const events = []; const pa = ObjectObserver.create(['some'], changes => events.push(...changes)); const popped = pa.pop(); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'delete', path: [0], }); expect(popped).toBe('some'); }); test('array pop - objects', () => { const events = []; const pa = ObjectObserver.create([{ test: 'text' }], changes => events.push(...changes)); const pad: any = pa[0]; pa[0].test = 'test'; pad.test = 'more'; expect(events).toHaveLength(2); const popped: any = pa.pop(); expect(popped.test).toBe('more'); expect(events).toHaveLength(3); popped.new = 'value'; expect(events).toHaveLength(3); const eventsA = []; const newPad = ObjectObserver.create(pad, changes => eventsA.push(...changes)); newPad.test = 'change'; expect(eventsA).toHaveLength(1); }); test('array unshift - primitives', () => { const events = []; let callbacks = 0; const pa = ObjectObserver.create([], changes => { events.push(...changes); callbacks += 1; }); pa.unshift('a'); pa.unshift('b', 'c'); expect(events).toHaveLength(3); expect(callbacks).toBe(2); expect(events[0]).toEqual({ type: 'insert', path: [0], value: 'a', }); expect(events[1]).toEqual({ type: 'insert', path: [0], value: 'b', }); expect(events[2]).toEqual({ type: 'insert', path: [1], value: 'c', }); }); test('array unshift - objects', () => { const events = []; const pa = ObjectObserver.create([{ text: 'original' }], changes => { events.push(...changes); }); pa.unshift({ text: 'initial' }); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'insert', path: [0], value: { text: 'initial' }, }); events.splice(0); pa[0].text = 'name'; pa[1].text = 'other'; expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'update', path: [0, 'text'], value: 'name', }); expect(events[1]).toEqual({ type: 'update', path: [1, 'text'], value: 'other', }); }); test('array unshift - arrays', () => { const events = []; const pa = ObjectObserver.create([{ text: 'original' }], changes => { events.push(...changes); }); pa.unshift([{}] as any); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'insert', path: [0], value: [{}], }); events.splice(0); pa[0][0].text = 'name'; pa[1].text = 'other'; expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'insert', path: [0, 0, 'text'], value: 'name', }); expect(events[1]).toEqual({ type: 'update', path: [1, 'text'], value: 'other', }); }); test('array shift - primitives', () => { const events = []; const pa = ObjectObserver.create(['some'], changes => { events.push(...changes); }); const shifted = pa.shift(); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'delete', path: [0], }); expect(shifted).toBe('some'); }); test('array shift - objects', () => { const events = []; const pa = ObjectObserver.create( [{ text: 'a', inner: { test: 'more' } }, { text: 'b' }], changes => events.push(...changes), ); const pa0 = pa[0]; const pa0i: any = pa0.inner; pa[0].text = 'b'; pa0i.test = 'test'; expect(events).toHaveLength(2); events.splice(0); const shifted = pa.shift(); expect(shifted.text).toBe('b'); expect(shifted.inner.test).toBe('test'); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'delete', path: [0], }); events.splice(0); pa[0].text = 'c'; expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'update', path: [0, 'text'], value: 'c', }); events.splice(0); shifted.text = 'd'; expect(events).toHaveLength(0); }); test('array reverse - primitives (flat array)', () => { const events = []; const pa = ObjectObserver.create([1, 2, 3], changes => events.push(...changes)); const reversed = pa.reverse(); expect(reversed).toEqual(pa); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'reorder', path: [], value: [2, 1, 0], }); }); test('array reverse - primitives (nested array)', () => { const events = []; const pa = ObjectObserver.create({ a1: { a2: [1, 2, 3] } }, changes => events.push(...changes)); const reversed = pa.a1.a2.reverse(); expect(reversed).toEqual(pa.a1.a2); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'reorder', path: ['a1', 'a2'], value: [2, 1, 0], }); }); test('array reverse - objects', () => { const events = []; const pa = ObjectObserver.create([{ name: 'a' }, { name: 'b' }, { name: 'c' }], changes => events.push(...changes), ); pa[0].name = 'A'; const reversed = pa.reverse(); pa[0].name = 'C'; expect(reversed).toEqual(pa); expect(events).toHaveLength(3); expect(events[0]).toEqual({ type: 'update', path: [0, 'name'], value: 'A', }); expect(events[1]).toEqual({ type: 'reorder', path: [], value: [2, 1, 0], }); expect(events[2]).toEqual({ type: 'update', path: [0, 'name'], value: 'C', }); }); test('array sort - primitives (flat array)', () => { const events = []; const pa = ObjectObserver.create([3, 2, 1], changes => events.push(...changes)); let sorted = pa.sort(); expect(sorted).toEqual(pa); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'reorder', path: [], value: [2, 1, 0], }); expect(pa).toEqual([1, 2, 3]); sorted = pa.sort((a, b) => { return a < b ? 1 : -1; }); expect(sorted).toEqual(pa); expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'reorder', path: [], value: [2, 1, 0], }); expect(pa).toEqual([3, 2, 1]); }); test('array sort - primitives (flat array with duplicates)', () => { const events = []; const pa = ObjectObserver.create([3, 2, 1, 2, 1], changes => events.push(...changes)); const sorted = pa.sort(); expect(sorted).toEqual(pa); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'reorder', path: [], value: [2, 4, 1, 3, 0], }); expect(pa).toEqual([1, 1, 2, 2, 3]); }); test('array sort - objects', () => { const events = []; const pa = ObjectObserver.create([{ name: 'a' }, { name: 'b' }, { name: 'c' }], changes => events.push(...changes), ); pa[0].name = 'A'; const sorted = pa.sort((a, b) => { return a.name < b.name ? 1 : -1; }); pa[0].name = 'C'; if (sorted !== pa) throw new Error('sort base functionality broken'); expect(events).toHaveLength(3); expect(events[0]).toEqual({ type: 'update', path: [0, 'name'], value: 'A', }); expect(events[1]).toEqual({ type: 'reorder', path: [], value: [2, 1, 0], }); expect(events[2]).toEqual({ type: 'update', path: [0, 'name'], value: 'C', }); }); test('array fill - primitives', () => { const events = []; const pa: any[] = ObjectObserver.create([1, 2, 3], changes => events.push(...changes)); const filled = pa.fill('a'); if (filled !== pa) throw new Error('fill base functionality broken'); expect(events).toHaveLength(3); expect(events[0]).toEqual({ type: 'update', path: [0], value: 'a', }); expect(events[1]).toEqual({ type: 'update', path: [1], value: 'a', }); expect(events[2]).toEqual({ type: 'update', path: [2], value: 'a', }); events.splice(0); pa.fill('b', 1, 3); expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'update', path: [1], value: 'b', }); expect(events[1]).toEqual({ type: 'update', path: [2], value: 'b', }); events.splice(0); pa.fill('c', -1, 3); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'update', path: [2], value: 'c', }); events.splice(0); // simulating insertion of a new item into array (fill does not extend an array, so we may do it only on internal items) delete pa[1]; pa.fill('d', 1, 2); expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'delete', path: [1], }); expect(events[1]).toEqual({ type: 'update', path: [1], value: 'd', }); }); test('array fill - objects', () => { const events = []; const pa: any = ObjectObserver.create( [{ some: 'text' }, { some: 'else' }, { some: 'more' }], changes => events.push(...changes), ); const filled = pa.fill({ name: 'Niv' }); if (filled !== pa) throw new Error('fill base functionality broken'); expect(events).toHaveLength(3); expect(events[0]).toEqual({ type: 'update', path: [0], value: { name: 'Niv' }, }); expect(events[1]).toEqual({ type: 'update', path: [1], value: { name: 'Niv' }, }); expect(events[2]).toEqual({ type: 'update', path: [2], value: { name: 'Niv' }, }); events.length = 0; pa[1].name = 'David'; expect(events[0]).toEqual({ type: 'update', path: [1, 'name'], value: 'David', }); expect(events).toHaveLength(1); }); test('array fill - arrays', () => { const events = []; const pa: any = ObjectObserver.create( [{ some: 'text' }, { some: 'else' }, { some: 'more' }], changes => events.push(...changes), ); const filled = pa.fill([{ name: 'Niv' }]); expect(filled).toEqual(pa); expect(events).toHaveLength(3); expect(events[0]).toEqual({ type: 'update', path: [0], value: [{ name: 'Niv' }], }); expect(events[1]).toEqual({ type: 'update', path: [1], value: [{ name: 'Niv' }], }); expect(events[2]).toEqual({ type: 'update', path: [2], value: [{ name: 'Niv' }], }); events.length = 0; pa[1][0].name = 'David'; expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'update', path: [1, 0, 'name'], value: 'David', }); }); test('array splice - primitives', () => { const events = []; let callbacks = 0; const pa: any = ObjectObserver.create([1, 2, 3, 4, 5, 6], changes => { events.push(...changes); callbacks += 1; }); const spliced = pa.splice(2, 2, 'a'); expect(spliced).toEqual([3, 4]); expect(events).toHaveLength(2); expect(callbacks).toBe(1); expect(events[0]).toEqual({ type: 'update', path: [2], value: 'a', }); expect(events[1]).toEqual({ type: 'delete', path: [3], }); events.splice(0); callbacks = 0; // pa = [1,2,'a',5,6] pa.splice(-3); expect(events).toHaveLength(3); expect(callbacks).toBe(1); expect(events[0]).toEqual({ type: 'delete', path: [2], }); expect(events[1]).toEqual({ type: 'delete', path: [3], }); expect(events[2]).toEqual({ type: 'delete', path: [4], }); expect(pa).toHaveLength(2); events.length = 0; callbacks = 0; // pa = [1,2] pa.splice(0); expect(events).toHaveLength(2); expect(callbacks).toBe(1); expect(events[0]).toEqual({ type: 'delete', path: [0], }); expect(events[1]).toEqual({ type: 'delete', path: [1], }); }); test('array splice - objects', () => { const events = []; const pa = ObjectObserver.create( [{ text: 'a' }, { text: 'b' }, { text: 'c' }, { text: 'd' }], changes => events.push(...changes), ); pa.splice(1, 2, { text: '1' }); expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'update', path: [1], value: { text: '1' }, }); expect(events[1]).toEqual({ type: 'delete', path: [2], }); expect(pa).toHaveLength(3); events.splice(0); pa[1].text = 'B'; pa[2].text = 'D'; expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'update', path: [1, 'text'], value: 'B', }); expect(events[1]).toEqual({ type: 'update', path: [2, 'text'], value: 'D', }); events.splice(0); pa.splice(1, 1, { text: 'A' }, { text: 'B' }); expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'update', path: [1], value: { text: 'A' }, }); expect(events[1]).toEqual({ type: 'insert', path: [2], value: { text: 'B' }, }); events.splice(0); pa[3].text = 'C'; expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'update', path: [3, 'text'], value: 'C', }); }); describe('copyWithin', () => { test('array copyWithin - primitives', () => { const events = []; let callbacks = 0; const pa = ObjectObserver.create([1, 2, 3, 4, 5, 6], changes => { events.push(...changes); callbacks += 1; }); let copied = pa.copyWithin(2, 0, 3); expect(pa).toEqual(copied); expect(events).toHaveLength(3); expect(callbacks).toBe(1); expect(events[0]).toEqual({ type: 'update', path: [2], value: 1, }); expect(events[1]).toEqual({ type: 'update', path: [3], value: 2, }); expect(events[2]).toEqual({ type: 'update', path: [4], value: 3, }); events.splice(0); callbacks = 0; // pa = [1,2,1,2,3,6] copied = pa.copyWithin(-3, 0); expect(pa).toEqual(copied); expect(events).toHaveLength(3); expect(callbacks).toBe(1); expect(events[0]).toEqual({ type: 'update', path: [3], value: 1, }); expect(events[1]).toEqual({ type: 'update', path: [4], value: 2, }); expect(events[2]).toEqual({ type: 'update', path: [5], value: 1, }); events.splice(0); callbacks = 0; // pa = [1,2,1,1,2,1] copied = pa.copyWithin(1, -3, 9); expect(pa).toEqual(copied); expect(events).toHaveLength(2); expect(callbacks).toBe(1); expect(events[0]).toEqual({ type: 'update', path: [1], value: 1, }); expect(events[1]).toEqual({ type: 'update', path: [2], value: 2, }); // update at index 4 should not be evented, since 1 === 1 events.splice(0); callbacks = 0; }); test('array copyWithin - objects', () => { const events = []; const pa = ObjectObserver.create( [{ text: 'a' }, { text: 'b' }, { text: 'c' }, { text: 'd' }], changes => { events.push(...changes); }, ); const copied = pa.copyWithin(1, 2, 3); expect(pa).toEqual(copied); expect(events).toHaveLength(1); expect(events[0]).toEqual({ type: 'update', path: [1], value: { text: 'c' }, }); events.length = 0; pa[1].text = 'B'; pa[2].text = 'D'; expect(events).toHaveLength(2); expect(events[0]).toEqual({ type: 'update', path: [1, 'text'], value: 'B', }); expect(events[1]).toEqual({ type: 'update', path: [2, 'text'], value: 'D', }); }); });
the_stack
declare namespace LiteMol.Viewer { var VERSION: { number: string; date: string; }; } declare namespace LiteMol.Viewer.DataSources { import Bootstrap = LiteMol.Bootstrap; import Entity = Bootstrap.Entity; const DownloadMolecule: Bootstrap.Tree.Transformer<Entity.Root, Entity.Action, Entity.Transformer.Molecule.DownloadMoleculeSourceParams>; type ObtainDownloadSource = { kind: 'CoordinateServer'; id: string; type: 'Cartoon' | 'Full'; lowPrecisionCoords: boolean; serverUrl: string; } | { kind: 'PDBe Updated mmCIF'; id: string; } | { kind: 'URL'; format: Core.Formats.FormatInfo; url: string; } | { kind: 'File on Disk'; file?: File; }; const ObtainDownloadSources: ObtainDownloadSource['kind'][]; interface MoleculeDownloadParams { sourceKind: ObtainDownloadSource['kind']; sources: { [kind: string]: ObtainDownloadSource; }; } const ObtainMolecule: Bootstrap.Tree.Transformer<Entity.Root, Entity.Action, MoleculeDownloadParams>; } declare namespace LiteMol.Viewer.ValidatorDB { import Entity = Bootstrap.Entity; interface Report extends Entity<Entity.Behaviour.Props<Interactivity.Behaviour>> { } const Report: Entity.Type<Entity.Behaviour.Props<Interactivity.Behaviour>>; namespace Api { type Report = { get(authAsymId: string): undefined | { get(authSeqNumber: number): undefined | { flags: string[]; isRed: boolean; chiralityMismatches: { has(atomName: string): boolean; }; }; }; }; function createReport(data: any): Report; } namespace Interactivity { class Behaviour implements Bootstrap.Behaviour.Dynamic { context: Bootstrap.Context; report: Api.Report; private provider; dispose(): void; register(behaviour: any): void; private getChainId; private processInfo; constructor(context: Bootstrap.Context, report: Api.Report); } } const DownloadAndCreate: Bootstrap.Tree.Transformer<Entity.Molecule.Molecule, Entity.Action, { reportRef?: string | undefined; }>; const ApplyTheme: Bootstrap.Tree.Transformer<Report, Entity.Action, {}>; } declare namespace LiteMol.Extensions.DensityStreaming { type FieldSource = 'X-ray' | 'EM'; type DataType = 'EM' | '2FO-FC' | 'FO-FC'; type FieldType = '2Fo-Fc' | 'Fo-Fc(-ve)' | 'Fo-Fc(+ve)' | 'EM'; const FieldSources: FieldSource[]; interface SetupParams { server: string; id: string; source: FieldSource; initialStreamingParams?: Partial<CreateStreamingParams>; streamingEntityRef?: string; } type CreateStreamingParams = { readonly maxRadius: number; readonly server: string; readonly source: FieldSource; readonly id: string; readonly header: ServerDataFormat.Header; displayType: CreateStreamingParams.DisplayTypeKind; detailLevel: number; radius: number; isoValueType: Bootstrap.Visualization.Density.IsoValueType; isoValues: { [F in FieldType]?: number; }; showEverythingExtent: number; forceBox?: boolean; } & { [F in FieldType]?: Bootstrap.Visualization.Density.Style; }; namespace CreateStreamingParams { type DisplayTypeKind = 'Everything' | 'Around Selection'; } namespace ServerDataFormat { type ValueType = 'float32' | 'int8'; namespace ValueType { const Float32: ValueType; const Int8: ValueType; } type ValueArray = Float32Array | Int8Array; type DetailLevel = { precision: number; maxVoxels: number; }; interface Spacegroup { number: number; size: number[]; angles: number[]; /** Determine if the data should be treated as periodic or not. (e.g. X-ray = periodic, EM = not periodic) */ isPeriodic: boolean; } interface ValuesInfo { mean: number; sigma: number; min: number; max: number; } interface Sampling { byteOffset: number; /** How many values along each axis were collapsed into 1 */ rate: number; valuesInfo: ValuesInfo[]; /** Number of samples along each axis, in axisOrder */ sampleCount: number[]; } interface Header { /** Format version number */ formatVersion: string; /** Axis order from the slowest to fastest moving, same as in CCP4 */ axisOrder: number[]; /** Origin in fractional coordinates, in axisOrder */ origin: number[]; /** Dimensions in fractional coordinates, in axisOrder */ dimensions: number[]; spacegroup: Spacegroup; channels: string[]; /** Determines the data type of the values */ valueType: ValueType; /** The value are stored in blockSize^3 cubes */ blockSize: number; sampling: Sampling[]; /** Precision data the server can show. */ availablePrecisions: DetailLevel[]; isAvailable: boolean; } } } declare namespace LiteMol.Extensions.DensityStreaming { import Entity = Bootstrap.Entity; import Tree = Bootstrap.Tree; interface Streaming extends Entity<Entity.Behaviour.Props<Behaviour>> { } const Streaming: Entity.Type<Entity.Behaviour.Props<Behaviour>>; const CreateStreaming: Tree.Transformer<Entity.Molecule.Molecule, Streaming, CreateStreamingParams>; const Setup: Tree.Transformer<Entity.Molecule.Molecule, Entity.Action, SetupParams>; } declare namespace LiteMol.Extensions.DensityStreaming { import Entity = Bootstrap.Entity; class Behaviour implements Bootstrap.Behaviour.Dynamic { context: Bootstrap.Context; params: CreateStreamingParams; private obs; private server; private behaviour; private groups; private download; private selectionBox; private modelBoundingBox; private channels; private cache; private performance; private wasCached; private types; private areBoxesSame; private getModelBoundingBox; private stop; private remove; private clear; private groupDone; private checkResult; private apply; private finish; private createXray; private createEm; private extendSelectionBox; private isSameMolecule; private static getChannel; private noChannels; private parseChannels; private query; private tryUpdateSelectionDataBox; private update; private toSigma; private syncStyles; private updateVisual; private invalidateStyles; invalidateParams(newParams: CreateStreamingParams): Promise<void>; dispose(): void; register(behaviour: Entity.Behaviour.Any): void; constructor(context: Bootstrap.Context, params: CreateStreamingParams); } } declare namespace LiteMol.Extensions.DensityStreaming { class CreateView extends LiteMol.Plugin.Views.Transform.ControllerBase<Bootstrap.Components.Transform.Controller<SetupParams>> { protected renderControls(): JSX.Element; } const IsoInfo: { [F in FieldType]: { min: number; max: number; dataKey: DataType; }; }; class StreamingView extends LiteMol.Plugin.Views.Transform.ControllerBase<Bootstrap.Components.Transform.DensityVisual<CreateStreamingParams, FieldType>> { private updateIso; private iso; private style; private details; private updateValueType; private displayType; protected renderControls(): JSX.Element; } } declare namespace LiteMol.Extensions.ComplexReprensetation.Carbohydrates.Shapes { import Geom = LiteMol.Core.Geometry; import Model = Core.Structure.Molecule.Model; const Sphere: Geom.Surface; const Cube: Geom.Surface; const Diamond: Geom.Surface; const Cone: Geom.Surface; const ConeLeft: Geom.Surface; const ConeRight: Geom.Surface; const Star: Geom.Surface; const FlatRectangle: Geom.Surface; const FlatDiamond: Geom.Surface; const FlatPentagon: Geom.Surface; const FlatHexagon: Geom.Surface; function stripe(s: Geom.Surface): Geom.Surface[]; function split(s: Geom.Surface): Geom.Surface[]; function makeTransform(model: Model, entry: Entry, radiusFactor: number, type: Params['type']): { scale: number[]; rotation: number[]; translation: number[]; }; } declare namespace LiteMol.Extensions.ComplexReprensetation.Carbohydrates.Mapping { interface RepresentationEntry { instanceName: string; name: string; shape: Core.Geometry.Surface[]; color: Visualization.Color[]; axisUp: number[]; axisSide: number[]; } const RingNames: { __len: number; [n: string]: number; }[]; function isResidueRepresentable(name: string): boolean; function getResidueRepresentation(name: string): RepresentationEntry | undefined; } declare namespace LiteMol.Extensions.ComplexReprensetation.Carbohydrates { import Struct = Core.Structure; import Model = Struct.Molecule.Model; import LA = Core.Geometry.LinearAlgebra; import Entity = Bootstrap.Entity; import Tree = Bootstrap.Tree; type RepresentationType = 'Icons' | 'Reduced' | 'Hydbrid'; interface Link { type: 'Carbohydrate' | 'Terminal'; rA: number; rB: number; atomA: number; atomB: number; centerA: LA.Vector3; centerB: LA.Vector3; bondType: Struct.BondType; } interface Entry { representation: Mapping.RepresentationEntry; ringCenter: LA.Vector3; ringRadius: number; ringAtoms: number[]; links: Link[]; terminalLinks: Link[]; } interface Info { links: Link[]; map: Core.Utils.FastMap<number, number>; entries: Entry[]; carbohydrateIndices: number[]; terminalIndices: number[]; warnings: string[]; } type FullParams = { type: 'Full'; fullSize: 'Small' | 'Medium' | 'Large'; showTerminalLinks: boolean; showTerminalAtoms: boolean; linkColor: Visualization.Color; }; type IconsParams = { type: 'Icons'; iconScale: number; }; type Params = FullParams | IconsParams; const Types: Params['type'][]; const FullSizes: FullParams['fullSize'][]; const DefaultIconsParams: Params; const DefaultFullParams: Params; type Tags = { type: 'CarbohydrateRepresentation'; colors: Core.Utils.FastMap<number, Visualization.Color>; }; type Tag = { type: 'Link'; link: Link; } | { type: 'Residue'; instanceName: string; residueIndex: number; model: Model; } | { type: 'Terminal'; residueIndex: number; model: Model; }; function isRepresentable(model: Model, residueIndices: number[]): boolean; namespace Transforms { interface CarbohydratesInfo extends Entity<{ info: Info; }> { } const CarbohydratesInfo: Entity.Type<{ info: Info; }>; const CreateInfo: Tree.Transformer<Entity.Molecule.Model, CarbohydratesInfo, { info: Info; }>; const CreateVisual: Tree.Transformer<CarbohydratesInfo, Entity.Molecule.Visual, Params>; } function EmptyInfo(warnings: string[]): Info; function getInfo(params: { model: Model; fragment: Struct.Query.Fragment; atomMask: Core.Utils.Mask; bonds: Struct.BondTable; }): Info; function getRepresentation(model: Model, info: Info, params: Params): { surface: Core.Computation<Core.Geometry.Surface>; mapper: (pickId: number) => number[] | undefined; tags: Tags; theme: Visualization.Theme; }; } declare namespace LiteMol.Extensions.ComplexReprensetation.Carbohydrates { function formatResidueName(model: Core.Structure.Molecule.Model, r: number): string; function HighlightCustomElementsBehaviour(context: Bootstrap.Context): void; } declare namespace LiteMol.Extensions.ComplexReprensetation { import Model = Core.Structure.Molecule.Model; import S = Core.Structure; import Q = S.Query; interface Info { sequence: { all: number[]; interacting: number[]; modified: number[]; }; het: { carbohydrates: Carbohydrates.Info; other: number[]; }; freeWaterAtoms: number[]; } function createComplexRepresentation(computation: Core.Computation.Context, model: Model, queryCtx: Q.Context): Promise<Info>; } declare namespace LiteMol.Extensions.ComplexReprensetation.Transforms { import Entity = Bootstrap.Entity; import Tree = Bootstrap.Tree; interface ComplexInfo extends Entity<{ info: Info; }> { } const ComplexInfo: Entity.Type<{ info: Info; }>; const CreateComplexInfo: Tree.Transformer<Bootstrap.Visualization.Molecule.Source, ComplexInfo, {}>; const CreateVisual: Tree.Transformer<ComplexInfo, Entity.Action, {}>; let SuppressCreateVisualWhenModelIsAdded: boolean; function CreateRepresentationWhenModelIsAddedBehaviour(context: Bootstrap.Context): void; } declare namespace LiteMol.Extensions.ComplexReprensetation.Carbohydrates.UI { class CreateVisual extends LiteMol.Plugin.Views.Transform.ControllerBase<Bootstrap.Components.Transform.Controller<Params>> { private updateVisual; renderControls(): JSX.Element; } } declare namespace LiteMol.Extensions.ParticleColoring { import Tree = Bootstrap.Tree; import Entity = Bootstrap.Entity; interface Params { min: number; max: number; steps: number; opacity: number; } interface DistanceInfo { min: number; max: number; distances: Float32Array; } interface Coloring extends Entity<{ info: DistanceInfo; }> { } const Coloring: Entity.Type<{ info: DistanceInfo; }>; const Apply: Tree.Transformer<Entity.Molecule.Visual, Coloring, Params>; function makeRainbow(steps: number): Visualization.Color[]; } declare namespace LiteMol.Extensions.ParticleColoring.UI { class Apply extends LiteMol.Plugin.Views.Transform.ControllerBase<Bootstrap.Components.Transform.Controller<Params>> { private rainbow; renderControls(): JSX.Element; } } declare namespace LiteMol.Extensions.RNALoops { class CreateLoopAnnotationView extends LiteMol.Plugin.Views.Transform.ControllerBase<Bootstrap.Components.Transform.Controller<DownloadAndCreateProps>> { protected renderControls(): JSX.Element; } } declare namespace LiteMol.Extensions.RNALoops { import Entity = Bootstrap.Entity; interface LoopAnnotation extends Entity<Entity.Behaviour.Props<Interactivity.Behaviour>> { } const LoopAnnotation: Entity.Type<Entity.Behaviour.Props<Interactivity.Behaviour>>; namespace Api { interface ResidueRef { modelId: string; authAsymId: string; authSeqNumber: number; insCode: string; } interface Entry { id: string; type: 'IL' | 'HL' | 'J3'; residues: ResidueRef[]; } interface Annotation { [modelId: string]: { [chainId: string]: { [resSeqNumber: number]: { [insCode: string]: Entry[]; }; }; }; } function parseCSV(data: string): Entry[]; function create(entries: Entry[]): Annotation; function getEntries(annotation: Annotation, modelId: string, asymId: string, seqNumber: number, insCode: string): Entry[] | undefined; } namespace Interactivity { class Behaviour implements Bootstrap.Behaviour.Dynamic { context: Bootstrap.Context; annotation: Api.Annotation; private provider; dispose(): void; register(behaviour: any): void; private processInfo; constructor(context: Bootstrap.Context, annotation: Api.Annotation); } } interface DownloadAndCreateProps { server: string; reportRef?: string; } const DownloadAndCreate: Bootstrap.Tree.Transformer<Entity.Molecule.Molecule, Entity.Action, DownloadAndCreateProps>; const ApplyTheme: Bootstrap.Tree.Transformer<LoopAnnotation, Entity.Action, {}>; } declare namespace LiteMol.Viewer.PDBe.Data { import Bootstrap = LiteMol.Bootstrap; import Entity = Bootstrap.Entity; import Transformer = Bootstrap.Entity.Transformer; const DownloadMolecule: Bootstrap.Tree.Transformer<Entity.Root, Entity.Action, Transformer.Molecule.DownloadMoleculeSourceParams>; interface DownloadBinaryCIFFromCoordinateServerParams { id?: string; type?: 'Cartoon' | 'Full'; lowPrecisionCoords?: boolean; serverUrl?: string; } const DownloadBinaryCIFFromCoordinateServer: Bootstrap.Tree.Transformer<Entity.Root, Entity.Action, DownloadBinaryCIFFromCoordinateServerParams>; } declare namespace LiteMol.Viewer.PDBe.Data { import Bootstrap = LiteMol.Bootstrap; import Entity = Bootstrap.Entity; import Tree = Bootstrap.Tree; const DensitySourceLabels: { 'electron-density': string; 'emdb-pdbid': string; 'emdb-id': string; }; const DensitySources: (keyof typeof DensitySourceLabels)[]; interface DownloadDensityParams { /** * Default source is 'electron-density' */ sourceId?: keyof typeof DensitySourceLabels; id?: string | { [sourceId: string]: string; }; } interface DensityActionContext { id: string; refs: string[]; groupRef?: string; } const DownloadDensity: Tree.Transformer<Entity.Root, Entity.Action, DownloadDensityParams>; } declare namespace LiteMol.Viewer.PDBe.Validation { import Entity = Bootstrap.Entity; interface Report extends Entity<Entity.Behaviour.Props<Interactivity.Behaviour>> { } const Report: Entity.Type<Entity.Behaviour.Props<Interactivity.Behaviour>>; namespace Api { function getResidueId(seqNumber: number, insCode: string | null): string; function getEntry(report: any, modelId: string, entity: string, asymId: string, residueId: string): any; function createReport(data: any): any; } namespace Interactivity { class Behaviour implements Bootstrap.Behaviour.Dynamic { context: Bootstrap.Context; report: any; private provider; dispose(): void; register(behaviour: any): void; private processInfo; constructor(context: Bootstrap.Context, report: any); } } const DownloadAndCreate: Bootstrap.Tree.Transformer<Entity.Molecule.Molecule, Entity.Action, { reportRef?: string | undefined; }>; const ApplyTheme: Bootstrap.Tree.Transformer<Report, Entity.Action, {}>; } declare namespace LiteMol.Viewer.PDBe.SequenceAnnotation { import Entity = Bootstrap.Entity; import Query = LiteMol.Core.Structure.Query; interface Annotations extends Entity<{ data: any; }> { } const Annotations: Entity.Type<{ data: any; }>; interface Annotation extends Entity<{ query: Query.Source; color: Visualization.Color; }> { } const Annotation: Entity.Type<{ query: Query.Source; color: Visualization.Color; }>; interface Behaviour extends Entity<Entity.Behaviour.Props<Interactivity.Behaviour>> { } const Behaviour: Entity.Type<Entity.Behaviour.Props<Interactivity.Behaviour>>; namespace Interactivity { class Behaviour implements Bootstrap.Behaviour.Dynamic { context: Bootstrap.Context; private node; private current; private subs; private toHighlight; private isHighlightOn; dispose(): void; register(behaviour: Entity.Behaviour.Any): void; private __highlight; readonly molecule: Entity.Molecule.Molecule | undefined; private resetTheme; private getCached; private setCached; private highlight; private focus; private apply; private update; constructor(context: Bootstrap.Context); } } interface CreateSingleProps { id?: string; data?: any; color?: Visualization.Color; } const CreateSingle: Bootstrap.Tree.Transformer<Entity.Group, Annotation, CreateSingleProps>; const DownloadAndCreate: Bootstrap.Tree.Transformer<Entity.Molecule.Molecule, Entity.Action, {}>; } declare namespace LiteMol.Viewer.PDBe.Views { class CreateSequenceAnnotationView extends LiteMol.Plugin.Views.Transform.ControllerBase<Bootstrap.Components.Transform.Controller<SequenceAnnotation.CreateSingleProps>> { protected renderControls(): JSX.Element; } class DownloadBinaryCIFFromCoordinateServerView extends LiteMol.Plugin.Views.Transform.ControllerBase<Bootstrap.Components.Transform.Controller<Data.DownloadBinaryCIFFromCoordinateServerParams>> { protected renderControls(): JSX.Element; } class DownloadDensityView extends LiteMol.Plugin.Views.Transform.ControllerBase<Bootstrap.Components.Transform.Controller<Data.DownloadDensityParams>> { private getId; private updateId; protected renderControls(): JSX.Element; } } declare namespace LiteMol.Viewer.Views { class LoadExample extends LiteMol.Plugin.Views.Transform.ControllerBase<Bootstrap.Components.Transform.Controller<Viewer.Examples.LoadExampleParams>> { protected renderControls(): JSX.Element; } class ObtainDownload extends LiteMol.Plugin.Views.Transform.ControllerBase<Bootstrap.Components.Transform.Controller<DataSources.MoleculeDownloadParams>> { private updateSourceParams; private coordServer; private PDBe; private url; private file; protected renderControls(): JSX.Element; } } declare namespace LiteMol.Viewer.Examples { const ExampleMap: { [name: string]: { name: string; provider: (plugin: Plugin.Controller) => void; }; }; const ExampleIds: string[]; interface LoadExampleParams { exampleId: string; } const LoadExample: Bootstrap.Tree.Transformer<Bootstrap.Entity.Root, Bootstrap.Entity.Action, LoadExampleParams>; } declare namespace LiteMol.Viewer { const PluginSpec: Plugin.Specification; } declare namespace LiteMol.Viewer { function createInstance(target: HTMLElement, layoutState: Bootstrap.Components.LayoutState, ignoreUrlParams?: boolean): Plugin.Controller | undefined; }
the_stack
import { ServiceClient, ServiceClientOptions, ServiceCallback, HttpOperationResponse, ServiceClientCredentials } from 'ms-rest'; import * as models from "./models"; export default class TextAnalyticsClient extends ServiceClient { /** * @class * Initializes a new instance of the TextAnalyticsClient class. * @constructor * * @param {credentials} credentials - Subscription credentials which uniquely identify client subscription. * * @param {string} endpoint - Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com). * * @param {object} [options] - The parameter options * * @param {Array} [options.filters] - Filters to be added to the request pipeline * * @param {object} [options.requestOptions] - Options for the underlying request object * {@link https://github.com/request/request#requestoptions-callback Options doc} * * @param {boolean} [options.noRetryPolicy] - If set to true, turn off default retry policy * */ constructor(credentials: ServiceClientCredentials, endpoint: string, options?: ServiceClientOptions); credentials: ServiceClientCredentials; endpoint: string; /** * @summary The API returns the detected language and a numeric score between 0 * and 1. * * Scores close to 1 indicate 100% certainty that the identified language is * true. A total of 120 languages are supported. * * @param {object} [options] Optional Parameters. * * @param {boolean} [options.showStats] (optional) if set to true, response * will contain input and document level statistics. * * @param {object} [options.languageBatchInput] Collection of documents to * analyze. * * @param {array} [options.languageBatchInput.documents] * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<LanguageBatchResult>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ detectLanguageWithHttpOperationResponse(options?: { showStats? : boolean, languageBatchInput? : models.LanguageBatchInput, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.LanguageBatchResult>>; /** * @summary The API returns the detected language and a numeric score between 0 * and 1. * * Scores close to 1 indicate 100% certainty that the identified language is * true. A total of 120 languages are supported. * * @param {object} [options] Optional Parameters. * * @param {boolean} [options.showStats] (optional) if set to true, response * will contain input and document level statistics. * * @param {object} [options.languageBatchInput] Collection of documents to * analyze. * * @param {array} [options.languageBatchInput.documents] * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {LanguageBatchResult} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {LanguageBatchResult} [result] - The deserialized result object if an error did not occur. * See {@link LanguageBatchResult} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ detectLanguage(options?: { showStats? : boolean, languageBatchInput? : models.LanguageBatchInput, customHeaders? : { [headerName: string]: string; } }): Promise<models.LanguageBatchResult>; detectLanguage(callback: ServiceCallback<models.LanguageBatchResult>): void; detectLanguage(options: { showStats? : boolean, languageBatchInput? : models.LanguageBatchInput, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.LanguageBatchResult>): void; /** * @summary The API returns a list of recognized entities in a given document. * * To get even more information on each recognized entity we recommend using * the Bing Entity Search API by querying for the recognized entities names. * See the <a * href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/text-analytics-supported-languages">Supported * languages in Text Analytics API</a> for the list of enabled languages. * * @param {object} [options] Optional Parameters. * * @param {boolean} [options.showStats] (optional) if set to true, response * will contain input and document level statistics. * * @param {object} [options.multiLanguageBatchInput] Collection of documents to * analyze. * * @param {array} [options.multiLanguageBatchInput.documents] * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<EntitiesBatchResult>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ entitiesWithHttpOperationResponse(options?: { showStats? : boolean, multiLanguageBatchInput? : models.MultiLanguageBatchInput, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.EntitiesBatchResult>>; /** * @summary The API returns a list of recognized entities in a given document. * * To get even more information on each recognized entity we recommend using * the Bing Entity Search API by querying for the recognized entities names. * See the <a * href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/text-analytics-supported-languages">Supported * languages in Text Analytics API</a> for the list of enabled languages. * * @param {object} [options] Optional Parameters. * * @param {boolean} [options.showStats] (optional) if set to true, response * will contain input and document level statistics. * * @param {object} [options.multiLanguageBatchInput] Collection of documents to * analyze. * * @param {array} [options.multiLanguageBatchInput.documents] * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {EntitiesBatchResult} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {EntitiesBatchResult} [result] - The deserialized result object if an error did not occur. * See {@link EntitiesBatchResult} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ entities(options?: { showStats? : boolean, multiLanguageBatchInput? : models.MultiLanguageBatchInput, customHeaders? : { [headerName: string]: string; } }): Promise<models.EntitiesBatchResult>; entities(callback: ServiceCallback<models.EntitiesBatchResult>): void; entities(options: { showStats? : boolean, multiLanguageBatchInput? : models.MultiLanguageBatchInput, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.EntitiesBatchResult>): void; /** * @summary The API returns a list of strings denoting the key talking points * in the input text. * * See the <a * href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview#supported-languages">Text * Analytics Documentation</a> for details about the languages that are * supported by key phrase extraction. * * @param {object} [options] Optional Parameters. * * @param {boolean} [options.showStats] (optional) if set to true, response * will contain input and document level statistics. * * @param {object} [options.multiLanguageBatchInput] Collection of documents to * analyze. Documents can now contain a language field to indicate the text * language * * @param {array} [options.multiLanguageBatchInput.documents] * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<KeyPhraseBatchResult>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ keyPhrasesWithHttpOperationResponse(options?: { showStats? : boolean, multiLanguageBatchInput? : models.MultiLanguageBatchInput, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.KeyPhraseBatchResult>>; /** * @summary The API returns a list of strings denoting the key talking points * in the input text. * * See the <a * href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview#supported-languages">Text * Analytics Documentation</a> for details about the languages that are * supported by key phrase extraction. * * @param {object} [options] Optional Parameters. * * @param {boolean} [options.showStats] (optional) if set to true, response * will contain input and document level statistics. * * @param {object} [options.multiLanguageBatchInput] Collection of documents to * analyze. Documents can now contain a language field to indicate the text * language * * @param {array} [options.multiLanguageBatchInput.documents] * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {KeyPhraseBatchResult} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {KeyPhraseBatchResult} [result] - The deserialized result object if an error did not occur. * See {@link KeyPhraseBatchResult} for more information. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ keyPhrases(options?: { showStats? : boolean, multiLanguageBatchInput? : models.MultiLanguageBatchInput, customHeaders? : { [headerName: string]: string; } }): Promise<models.KeyPhraseBatchResult>; keyPhrases(callback: ServiceCallback<models.KeyPhraseBatchResult>): void; keyPhrases(options: { showStats? : boolean, multiLanguageBatchInput? : models.MultiLanguageBatchInput, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.KeyPhraseBatchResult>): void; /** * @summary The API returns a numeric score between 0 and 1. * * Scores close to 1 indicate positive sentiment, while scores close to 0 * indicate negative sentiment. A score of 0.5 indicates the lack of sentiment * (e.g. a factoid statement). See the <a * href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview#supported-languages">Text * Analytics Documentation</a> for details about the languages that are * supported by sentiment analysis. * * @param {object} [options] Optional Parameters. * * @param {boolean} [options.showStats] (optional) if set to true, response * will contain input and document level statistics. * * @param {object} [options.multiLanguageBatchInput] Collection of documents to * analyze. * * @param {array} [options.multiLanguageBatchInput.documents] * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<Object>} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. */ sentimentWithHttpOperationResponse(options?: { showStats? : boolean, multiLanguageBatchInput? : models.MultiLanguageBatchInput, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<any>>; /** * @summary The API returns a numeric score between 0 and 1. * * Scores close to 1 indicate positive sentiment, while scores close to 0 * indicate negative sentiment. A score of 0.5 indicates the lack of sentiment * (e.g. a factoid statement). See the <a * href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview#supported-languages">Text * Analytics Documentation</a> for details about the languages that are * supported by sentiment analysis. * * @param {object} [options] Optional Parameters. * * @param {boolean} [options.showStats] (optional) if set to true, response * will contain input and document level statistics. * * @param {object} [options.multiLanguageBatchInput] Collection of documents to * analyze. * * @param {array} [options.multiLanguageBatchInput.documents] * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {ServiceCallback} [optionalCallback] - The optional callback. * * @returns {ServiceCallback|Promise} If a callback was passed as the last * parameter then it returns the callback else returns a Promise. * * {Promise} A promise is returned. * * @resolve {Object} - The deserialized result object. * * @reject {Error|ServiceError} - The error object. * * {ServiceCallback} optionalCallback(err, result, request, response) * * {Error|ServiceError} err - The Error object if an error occurred, null otherwise. * * {Object} [result] - The deserialized result object if an error did not occur. * * {WebResource} [request] - The HTTP Request object if an error did not occur. * * {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur. */ sentiment(options?: { showStats? : boolean, multiLanguageBatchInput? : models.MultiLanguageBatchInput, customHeaders? : { [headerName: string]: string; } }): Promise<any>; sentiment(callback: ServiceCallback<any>): void; sentiment(options: { showStats? : boolean, multiLanguageBatchInput? : models.MultiLanguageBatchInput, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<any>): void; } export { TextAnalyticsClient, models as TextAnalyticsModels };
the_stack
import {OptionParser, ParseType} from '../src/option_parser'; import * as path from 'path'; import * as fs from 'fs'; import {descriptor2typestr, int_classname, is_primitive_type} from '../src/util'; import {IClasspathItem, ClasspathFactory, IndexedClasspathJar, UnindexedClasspathJar} from '../src/classpath'; import {ReferenceClassData, ClassData, ArrayClassData, PrimitiveClassData} from '../src/ClassData'; import {ClassReference} from '../src/ConstantPool'; import {Method, Field} from '../src/methods'; import * as JVMTypes from '../includes/JVMTypes'; import * as JDKInfo from '../vendor/java_home/jdk.json'; import {TriState} from '../src/enums'; import * as async from 'async'; // Makes our stack traces point to the TypeScript source code lines. require('source-map-support').install({ handleUncaughtExceptions: true }); let classpath: IClasspathItem[] = null, parser = new OptionParser( { default: { classpath: { type: ParseType.NORMAL_VALUE_SYNTAX, alias: 'cp', optDesc: ' <class search path of directories and zip/jar files>', desc: 'A : separated list of directories, JAR archives, and ZIP archives to search for class files.', }, help: { alias: '?', desc: 'print this help message' }, directory: { type: ParseType.NORMAL_VALUE_SYNTAX, alias: 'd', optDesc: ' <directory>', desc: 'Output directory' }, javascript: { alias: 'js', desc: 'Generate JavaScript templates (Default is true)' }, typescript: { alias: 'ts', desc: 'Generate TypeScript templates' }, "doppiojvm-path": { type: ParseType.NORMAL_VALUE_SYNTAX, optDesc: ' <path to doppiojvm module>', alias: 'dpath', desc: "Path to the doppiojvm module. Defaults to 'doppiojvm', referring to the NPM module." }, "force_headers": { type: ParseType.NORMAL_VALUE_SYNTAX, optDesc: ':[<classname>:]', alias: 'f', desc: '[TypeScript only] Forces doppioh to generate TypeScript headers for specified JVM classes', }, "headers_only": { alias: 'ho', desc: '[TypeScript only] Only generate header file.' } } } ); function printEraseableLine(line: string): void { // Undocumented functions. if ((<any> process.stdout)['clearLine']) { (<any> process.stdout).clearLine(); (<any> process.stdout).cursorTo(0); process.stdout.write(line); } } function printHelp(): void { process.stdout.write("Usage: doppioh [flags] class_or_package_names\n" + parser.help('default') + "\n"); } // Remove "node" and "path/to/doppioh.js". let parseResults = parser.parse(process.argv.slice(2)), args = parseResults['default']; if (args.flag('help', false) || process.argv.length === 2) { printHelp(); process.exit(1); } let outputDirectory = args.stringOption('directory', '.'); /** * java/lang/String.class => Ljava/lang/String; */ function file2desc(fname: string): string { return `L${fname.slice(0, fname.length - 6).replace(/\\/g, '/')};`; } let cache: {[desc: string]: ClassData} = {}; /** * Returns the classes in the given directory in descriptor format. */ function getClasses(item: string): string[] { let rv: string[] = []; // Find classpath items that contains this item as a directory. let cpItems: IClasspathItem[] = []; for (let i = 0; i < classpath.length; i++) { let searchedItem = item; let stat = classpath[i].tryStatSync(searchedItem); if (!stat) { searchedItem = `${item}.class`; stat = classpath[i].tryStatSync(searchedItem); } if (!stat) { continue; } else { if (!stat.isDirectory()) { // Files only counts if it is a class file. // Prevents an issue with the `doppio` shortcut counting as the `doppio` executable. if (path.extname(searchedItem) === '.class') { rv.push(file2desc(searchedItem)); } } else { cpItems.push(classpath[i]); } } } if (rv.length === 0 && cpItems.length === 0) { throw new Error(`Unable to find resource ${item}.`); } if (cpItems.length > 0) { // Recursively process. let dirStack: string[] = [item]; while (dirStack.length > 0) { let dir = dirStack.pop(); for (let i = 0; i < cpItems.length; i++) { let dirListing = cpItems[i].tryReaddirSync(dir); if (dirListing === null) { continue; } else { for (let i = 0; i < dirListing.length; i++) { let item = dirListing[i]; let itemPath = path.join(dir, item); if (path.extname(itemPath) === ".class") { rv.push(file2desc(itemPath)); } else { dirStack.push(itemPath); } } } } } } return rv; } function loadClass(type: string): Buffer { for (let i = 0; i < classpath.length; i++) { let item = classpath[i]; switch(item.hasClass(type)) { case TriState.INDETERMINATE: case TriState.TRUE: let buff = item.tryLoadClassSync(type); if (buff !== null) { return buff; } break; } } throw new Error(`Unable to find class ${type}`); } function findClass(descriptor: string): ClassData { if (cache[descriptor] !== undefined) { return cache[descriptor]; } var rv: ClassData; try { switch(descriptor[0]) { case 'L': rv = new ReferenceClassData(loadClass(descriptor2typestr(descriptor))); // Resolve the class. var superClassRef = (<ReferenceClassData<JVMTypes.java_lang_Object>> rv).getSuperClassReference(), interfaceClassRefs = (<ReferenceClassData<JVMTypes.java_lang_Object>> rv).getInterfaceClassReferences(), superClass: ReferenceClassData<JVMTypes.java_lang_Object> = null, interfaceClasses: ReferenceClassData<JVMTypes.java_lang_Object>[] = []; if (superClassRef !== null) { superClass = <ReferenceClassData<JVMTypes.java_lang_Object>> findClass(superClassRef.name); } if (interfaceClassRefs.length > 0) { interfaceClasses = interfaceClassRefs.map((iface: ClassReference) => <ReferenceClassData<JVMTypes.java_lang_Object>> findClass(iface.name)); } (<ReferenceClassData<JVMTypes.java_lang_Object>> rv).setResolved(superClass, interfaceClasses); break; case '[': rv = new ArrayClassData(descriptor.slice(1), null); break; default: rv = new PrimitiveClassData(descriptor, null); break; } cache[descriptor] = rv; return rv; } catch (e) { throw new Error(`Unable to read class file for ${descriptor}: ${e}\n${e.stack}`); } } function processClassData(stream: NodeJS.WritableStream, template: ITemplate, classData: ReferenceClassData<JVMTypes.java_lang_Object>) { var fixedClassName: string = classData.getInternalName().replace(/\//g, '_'), nativeFound: boolean = false; // Shave off L and ; fixedClassName = fixedClassName.substring(1, fixedClassName.length - 1); var methods = classData.getMethods(); methods.forEach((method: Method) => { if (method.accessFlags.isNative()) { if (!nativeFound) { template.classStart(stream, fixedClassName); nativeFound = true; } template.method(stream, classData.getInternalName(), method.signature, method.accessFlags.isStatic(), method.parameterTypes, method.returnType); } }); if (nativeFound) { template.classEnd(stream, fixedClassName); } } /** * A Doppioh output template. */ interface ITemplate { getExtension(): string; fileStart(stream: NodeJS.WritableStream): void; fileEnd(stream: NodeJS.WritableStream): void; classStart(stream: NodeJS.WritableStream, className: string): void; classEnd(stream: NodeJS.WritableStream, className: string): void; method(stream: NodeJS.WritableStream, classDesc: string, methodName: string, isStatic: boolean, argTypes: string[], rv: string): void; } /** * TypeScript declaration file (JVMTypes.d.ts). */ class TSDeclarationFile { private headerCount: number = 0; private headerSet: { [clsName: string]: boolean} = {}; private headerPath: string; private headerStream: NodeJS.WritableStream; private generateQueue: ReferenceClassData<JVMTypes.java_lang_Object>[] = []; private doppiojvmPath: string; constructor(doppiojvmPath: string, outputPath: string) { this.headerPath = path.resolve(outputPath, "JVMTypes.d.ts"); this.doppiojvmPath = path.relative(outputPath, doppiojvmPath); // Parse existing types file for existing definitions. We'll remake them. try { var existingHeaders = fs.readFileSync(this.headerPath).toString(), searchIdx = 0, clsName: string; // Pass 1: Classes. while ((searchIdx = existingHeaders.indexOf("export class ", searchIdx)) > -1) { clsName = existingHeaders.slice(searchIdx + 13, existingHeaders.indexOf(" ", searchIdx + 13)); if (clsName.indexOf("JVMArray") !== 0) { this.generateClassDefinition(this.tstype2jvmtype(clsName)); } searchIdx++; } searchIdx = 0; // Pass 2: Interfaces. while ((searchIdx = existingHeaders.indexOf("export interface ", searchIdx)) > -1) { clsName = existingHeaders.slice(searchIdx + 17, existingHeaders.indexOf(" ", searchIdx + 17)); this.generateClassDefinition(this.tstype2jvmtype(clsName)); searchIdx++; } } catch (e) { // Ignore. } this.headerStream = fs.createWriteStream(this.headerPath); this.headersStart(); // Generate required types. this.generateArrayDefinition(); this.generateMiscDefinitions(); this.generateClassDefinition('Ljava/lang/Throwable;'); if (args.stringOption('force_headers', null)) { var clses = args.stringOption('force_headers', null).split(':'); clses.forEach((clsName: string) => { this.generateClassDefinition(int_classname(clsName)); }); } } public headersStart(): void { this.headerStream.write(`// TypeScript declaration file for JVM types. Automatically generated by doppioh. // http://github.com/plasma-umass/doppio import * as DoppioJVM from '${this.doppiojvmPath.replace(/\\/g, '/')}'; import JVMThread = DoppioJVM.VM.Threading.JVMThread; import Long = DoppioJVM.VM.Long; import ClassData = DoppioJVM.VM.ClassFile.ClassData; import ArrayClassData = DoppioJVM.VM.ClassFile.ArrayClassData; import ReferenceClassData = DoppioJVM.VM.ClassFile.ReferenceClassData; import Monitor = DoppioJVM.VM.Monitor; import ClassLoader = DoppioJVM.VM.ClassFile.ClassLoader; import Interfaces = DoppioJVM.VM.Interfaces; declare module JVMTypes {\n`); } /** * Emits TypeScript type declarations. Separated from fileEnd, since one can * use doppioh to emit headers only. */ public headersEnd(): void { this._processGenerateQueue(); // Print newline to clear eraseable line. printEraseableLine(`Processed ${this.headerCount} classes.\n`); this.headerStream.end(`} export = JVMTypes;\n`, () => {}); } private _processHeader(cls: ReferenceClassData<JVMTypes.java_lang_Object>): void { var desc = cls.getInternalName(), interfaces = cls.getInterfaceClassReferences().map((iface: ClassReference) => iface.name), superClass = cls.getSuperClassReference(), methods = cls.getMethods().concat(cls.getMirandaAndDefaultMethods()), fields = cls.getFields(), methodsSeen: { [name: string]: boolean } = {}, injectedFields = cls.getInjectedFields(), injectedMethods = cls.getInjectedMethods(), injectedStaticMethods = cls.getInjectedStaticMethods(); printEraseableLine(`[${this.headerCount++}] Processing header for ${descriptor2typestr(desc)}...`); if (cls.accessFlags.isInterface()) { // Interfaces map to TypeScript interfaces. this.headerStream.write(` export interface ${this.jvmtype2tstype(desc, false)}`); } else { this.headerStream.write(` export class ${this.jvmtype2tstype(desc, false)}`); } // Note: Interface classes have java.lang.Object as a superclass. // While java_lang_Object is a class, TypeScript will extract an interface // for the class under-the-covers and extract it, correctly providing us // with injected JVM methods on interface types (e.g. getClass()). if (superClass !== null) { this.headerStream.write(` extends ${this.jvmtype2tstype(superClass.name, false)}`); } if (interfaces.length > 0) { if (cls.accessFlags.isInterface()) { // Interfaces can extend multiple interfaces, and can extend classes! // Add a comma after the guaranteed "java_lang_Object". this.headerStream.write(`, `); } else { // Classes can implement multiple interfaces. this.headerStream.write(` implements `); } this.headerStream.write(`${interfaces.map((ifaceName: string) => this.jvmtype2tstype(ifaceName, false)).join(", ")}`); } this.headerStream.write(` {\n`); Object.keys(injectedFields).forEach((name: string) => this._outputInjectedField(name, injectedFields[name], this.headerStream)); Object.keys(injectedMethods).forEach((name: string) => this._outputInjectedMethod(name, injectedMethods[name], this.headerStream)); Object.keys(injectedStaticMethods).forEach((name: string) => this._outputInjectedStaticMethod(name, injectedStaticMethods[name], this.headerStream)); fields.forEach((f) => this._outputField(f, this.headerStream)); methods.forEach((m) => this._outputMethod(m, this.headerStream)); cls.getUninheritedDefaultMethods().forEach((m) => this._outputMethod(m, this.headerStream)); this.headerStream.write(` }\n`); } /** * Converts a typestring to its equivalent TypeScript type. */ public jvmtype2tstype(desc: string, prefix: boolean = true): string { switch(desc[0]) { case '[': return (prefix ? 'JVMTypes.' : '') + `JVMArray<${this.jvmtype2tstype(desc.slice(1), prefix)}>`; case 'L': // Ensure all converted reference types get generated headers. this.generateClassDefinition(desc); return (prefix ? 'JVMTypes.' : '') + descriptor2typestr(desc).replace(/_/g, '__').replace(/\//g, '_'); case 'J': return 'Long'; case 'V': return 'void'; default: // Primitives. return 'number'; } } /** * Converts a TypeScript type into its equivalent JVM type. */ private tstype2jvmtype(tsType: string): string { if (tsType.indexOf('JVMArray') === 0) { return `[${this.tstype2jvmtype(tsType.slice(9, tsType.length - 1))}`; } else if (tsType === 'number') { throw new Error("Ambiguous."); } else if (tsType === 'void') { return 'V'; } else { // _ => /, and // => _ since we encode underscores as double underscores. return `L${tsType.replace(/_/g, '/').replace(/\/\//g, '_')};`; } } /** * Generates a TypeScript class definition for the given class object. */ public generateClassDefinition(desc: string): void { if (this.headerSet[desc] !== undefined || is_primitive_type(desc)) { // Already generated, or is a primitive. return; } else if (desc[0] === '[') { // Ensure component type is created. return this.generateClassDefinition(desc.slice(1)); } else { // Mark this class as queued for headerification. We use a queue instead // of a recursive scheme to avoid stack overflows. this.headerSet[desc] = true; this.generateQueue.push(<ReferenceClassData<JVMTypes.java_lang_Object>> findClass(desc)); } } /** * Outputs a method signature for the given method on the given stream. * NOTE: We require a class argument because default interface methods are * defined on classes, not on the interfaces they belong to. */ private _outputMethod(m: Method, stream: NodeJS.WritableStream, nonVirtualOnly: boolean = false) { var argTypes = m.parameterTypes, rType = m.returnType, args: string = "", cbSig = `e?: java_lang_Throwable${rType === 'V' ? "" : `, rv?: ${this.jvmtype2tstype(rType, false)}`}`, methodSig: string, methodFlags = `public${m.accessFlags.isStatic() ? ' static' : ''}`; if (argTypes.length > 0) { // Arguments are a giant tuple type. // NOTE: Long / doubles take up two argument slots. The second argument is always NULL. args = `args: [${argTypes.map((type: string, i: number) => `${this.jvmtype2tstype(type, false)}${(type === "J" || type === "D") ? ', any' : ''}`).join(", ")}]`; } else { args = `args: {}[]`; } methodSig = `(thread: JVMThread, ${args}, cb?: (${cbSig}) => void): void`; // A quick note about methods: It's illegal to have two methods with the // same signature in the same class, even if one is static and the other // isn't. if (m.cls.accessFlags.isInterface()) { if (m.accessFlags.isStatic()) { // XXX: We ignore static interface methods right now, as reconciling them with TypeScript's // type system would be messy. Also, they are brand new in Java 8. } else { // Virtual only, TypeScript interface syntax. stream.write(` "${m.signature}"${methodSig};\n`); } } else { if (!nonVirtualOnly) { stream.write(` ${methodFlags} "${m.signature}"${methodSig};\n`); } stream.write(` ${methodFlags} "${m.fullSignature}"${methodSig};\n`); } } /** * Outputs the field's type for the given field on the given stream. */ private _outputField(f: Field, stream: NodeJS.WritableStream) { var fieldType = f.rawDescriptor, cls = f.cls; if (cls.accessFlags.isInterface()) { // XXX: Ignore static interface fields for now, as reconciling them with TypeScript's // type system would be messy. return; } if (f.accessFlags.isStatic()) { stream.write(` public static "${descriptor2typestr(cls.getInternalName())}/${f.name}": ${this.jvmtype2tstype(fieldType, false)};\n`); } else { stream.write(` public "${descriptor2typestr(cls.getInternalName())}/${f.name}": ${this.jvmtype2tstype(fieldType, false)};\n`); } } /** * Outputs information on a field injected by the JVM. */ private _outputInjectedField(name: string, type: string, stream: NodeJS.WritableStream) { stream.write(` public ${name}: ${type};\n`); } /** * Output information on a method injected by the JVM. */ private _outputInjectedMethod(name: string, type: string, stream: NodeJS.WritableStream) { stream.write(` public ${name}${type};\n`); } /** * Output information on a static method injected by the JVM. */ private _outputInjectedStaticMethod(name: string, type: string, stream: NodeJS.WritableStream) { stream.write(` public static ${name}${type};\n`); } private _processGenerateQueue(): void { while (this.generateQueue.length > 0) { this._processHeader(this.generateQueue.pop()); } } /** * Generates the generic JVM array type definition. */ private generateArrayDefinition(): void { this.headerStream.write(` export class JVMArray<T> extends java_lang_Object { /** * NOTE: Our arrays are either JS arrays, or TypedArrays for primitive * types. */ public array: T[]; public getClass(): ArrayClassData<T>; /** * Create a new JVM array of this type that starts at start, and ends at * end. End defaults to the end of the array. */ public slice(start: number, end?: number): JVMArray<T>; }\n`); } private generateMiscDefinitions(): void { this.headerStream.write(` // Basic, valid JVM types. export type BasicType = number | java_lang_Object | Long; export type JVMFunction = (thread: JVMThread, args: BasicType[], cb: (e?: JVMTypes.java_lang_Object, rv?: BasicType) => void) => void;\n`); } } /** * TypeScript output template. */ class TSTemplate implements ITemplate { private classesSeen: string[] = []; private doppiojvmPath: string; public static declFile: TSDeclarationFile = null; constructor(doppiojvmPath: string, outputPath: string) { this.doppiojvmPath = path.relative(outputPath, doppiojvmPath); if (TSTemplate.declFile === null) { TSTemplate.declFile = new TSDeclarationFile(doppiojvmPath, outputPath); } } public getExtension(): string { return 'ts'; } public fileStart(stream: NodeJS.WritableStream): void { // Reference all of the doppio interfaces. stream.write(`import JVMTypes = require("./JVMTypes"); import DoppioJVM = require('${this.doppiojvmPath.replace(/\\/g, '/')}'); import JVMThread = DoppioJVM.VM.Threading.JVMThread; import Long = DoppioJVM.VM.Long; declare var registerNatives: (natives: any) => void;\n`); } public fileEnd(stream: NodeJS.WritableStream): void { var i: number; // Export everything! stream.write("\n// Export line. This is what DoppioJVM sees.\nregisterNatives({"); for (i = 0; i < this.classesSeen.length; i++) { var kls = this.classesSeen[i]; if (i > 0) stream.write(','); stream.write("\n '" + kls.replace(/_/g, '/') + "': " + kls); } stream.write("\n});\n"); } public classStart(stream: NodeJS.WritableStream, className: string): void { stream.write("\nclass " + className + " {\n"); this.classesSeen.push(className); TSTemplate.declFile.generateClassDefinition(`L${className.replace(/_/g, "/")};`); } public classEnd(stream: NodeJS.WritableStream, className: string): void { stream.write("\n}\n"); } public method(stream: NodeJS.WritableStream, classDesc: string, methodName: string, isStatic: boolean, argTypes: string[], rType: string): void { var trueRtype = TSTemplate.declFile.jvmtype2tstype(rType), rval = ""; if (trueRtype === 'number') { rval = "0"; } else if (trueRtype !== 'void') { rval = "null"; } argTypes.concat([rType]).forEach((type: string) => { TSTemplate.declFile.generateClassDefinition(type); }); stream.write(` public static '${methodName}'(thread: JVMThread${isStatic ? '' : `, javaThis: ${TSTemplate.declFile.jvmtype2tstype(classDesc)}`}${argTypes.length === 0 ? '' : ', ' + argTypes.map((type: string, i: number) => `arg${i}: ${TSTemplate.declFile.jvmtype2tstype(type)}`).join(", ")}): ${TSTemplate.declFile.jvmtype2tstype(rType)} { thread.throwNewException('Ljava/lang/UnsatisfiedLinkError;', 'Native method not implemented.');${rval !== '' ? `\n return ${rval};` : ''} }\n`); } } /** * JavaScript output template. */ class JSTemplate implements ITemplate { private firstMethod: boolean = true; private firstClass: boolean = true; public getExtension(): string { return 'js'; } public fileStart(stream: NodeJS.WritableStream): void { stream.write("// This entire object is exported. Feel free to define private helper functions above it.\nregisterNatives({"); } public fileEnd(stream: NodeJS.WritableStream): void { stream.write("\n});\n"); } public classStart(stream: NodeJS.WritableStream, className: string): void { this.firstMethod = true; if (this.firstClass) { this.firstClass = false; } else { stream.write(",\n"); } stream.write("\n '" + className.replace(/_/g, '/') + "': {\n"); } public classEnd(stream: NodeJS.WritableStream, className: string): void { stream.write("\n\n }"); } public method(stream: NodeJS.WritableStream, classDesc: string, methodName: string, isStatic: boolean, argTypes: string[], rType: string): void { // Construct the argument signature, figured out from the methodName. var argSig: string = 'thread', i: number; if (!isStatic) { argSig += ', javaThis'; } for (i = 0; i < argTypes.length; i++) { argSig += ', arg' + i; } if (this.firstMethod) { this.firstMethod = false; } else { // End the previous method. stream.write(',\n'); } stream.write("\n '" + methodName + "': function(" + argSig + ") {"); stream.write("\n thread.throwNewException('Ljava/lang/UnsatisfiedLinkError;', 'Native method not implemented.');"); stream.write("\n }"); } } const JAVA_HOME = path.resolve(__dirname, "../vendor/java_home"); let classpathPaths = JDKInfo.classpath.map((item) => path.resolve(JAVA_HOME, item)).concat(args.stringOption('classpath', '.').split(':')); let classNames = args.unparsedArgs(); if (classNames.length === 0) { throw new Error(`Must specify a class name.`); } if (!fs.existsSync(outputDirectory)) { fs.mkdirSync(outputDirectory); } // Initialize classpath. ClasspathFactory(JAVA_HOME, classpathPaths, (items: IClasspathItem[]) => { // Normally, JARs are loaded asynchronously. Force them to be loaded, which allows us // to load classes synchronously. async.each(items, (item: IClasspathItem, cb: (e?: Error) => void) => { if (item instanceof UnindexedClasspathJar || item instanceof IndexedClasspathJar) { item.loadJar(cb); } else { cb(); } }, (e?: Error) => { if (e) { throw e; } classpath = items; try { classNames.forEach((className) => { let targetName: string = className.replace(/\//g, '_').replace(/\./g, '_'), targetPath: string = className.replace(/\./g, '/'); let template = args.flag('typescript', false) ? new TSTemplate(args.stringOption('doppiojvm-path', 'doppiojvm'), outputDirectory) : new JSTemplate(); let stream = fs.createWriteStream(path.join(outputDirectory, targetName + '.' + template.getExtension())); template.fileStart(stream); let classes = getClasses(targetPath); for (let i = 0; i < classes.length; i++) { let desc = classes[i]; processClassData(stream, template, <ReferenceClassData<JVMTypes.java_lang_Object>> findClass(desc)); } template.fileEnd(stream); stream.end(new Buffer(''), () => {}); if (args.flag('typescript', false) && args.flag('headers_only', false)) { fs.unlinkSync(path.join(outputDirectory, targetName + '.' + template.getExtension())); } }); if (args.flag('typescript', false)) { TSTemplate.declFile.headersEnd(); } } catch (e) { console.error(`Encountered error: ${e}`); } }); });
the_stack
import { IWebPartContext } from '@microsoft/sp-webpart-base'; import { SPHttpClient } from '@microsoft/sp-http'; import { IDocument } from "../common/IObjects"; import IDataProvider from "./IDataProvider"; import { Utils } from '../common/Utils'; export default class SharePointDataProvider implements IDataProvider { private _webPartContext: IWebPartContext; private _libraryAbsoluteUrl: string; private _webAbsoluteUrl: string; constructor(value: IWebPartContext, libraryUrl: string) { this._webPartContext = value; this._libraryAbsoluteUrl = libraryUrl.lastIndexOf("/") == libraryUrl.length - 1 ? libraryUrl.substr(0, libraryUrl.length - 1) : libraryUrl; this._webAbsoluteUrl = value.pageContext.web.absoluteUrl; } /** * Check is all settings passed in the constructor are correctly initialized */ public validateSettings(): boolean { if (!this._libraryAbsoluteUrl) { return false; } return true; } /** * Returns all documents from the Search index where the Path contains the library url * Note: Library url is passed as parameter in the constructor */ public readDocumentsFromSearch(): Promise<IDocument[]> { let utility = new Utils(); let searchQuery = '(path:"' + encodeURIComponent(this._libraryAbsoluteUrl) + '*")AND(IsDocument:1)'; let webAbsoluteUrl = this._webPartContext.pageContext.web.absoluteUrl; const searchRequestUrl1: string = `${webAbsoluteUrl}/_api/search/query?querytext='${searchQuery}'` + "&selectproperties='DocId,ContentType,ModifiedBy,LastModifiedTime,FileExtension,Path,SPWebURL,UIVersionStringOWSTEXT,UniqueId'"; // log in the console for debugging purpose console.log(searchQuery); return this._webPartContext.spHttpClient.get( searchRequestUrl1, SPHttpClient.configurations.v1, { headers: { "odata-version": "3.0", "accept": "application/json;odata=verbose" }, method: "GET" }) .then((response: any) => { debugger; if (response.status >= 200 && response.status < 300) { return response.json(); } else { return Promise.reject(new Error(JSON.stringify(response))); } }).then((response: any) => { debugger; //convert the reuselts in object with properties let results: any[] = response.d.query.PrimaryQueryResult.RelevantResults.Table.Rows.results; var obj = []; for (let l = 0; l < results.length; l++) { var cells = results[l].Cells.results; var cell = {}; for (let m = 0; m < cells.length; m++) { cell[cells[m].Key] = cells[m].Value; } obj.push(cell); } // use the search results as objects let docs: IDocument[] = []; for (let i = 0; i < obj.length; i++) { docs.push({ Id: parseInt(obj[i].DocId), FileRef: utility.GetFileRef(obj[i].OriginalPath), Modified: utility.GetFormatedDateString(obj[i].LastModifiedTime), ModifiedBy: obj[i].ModifiedBy, FileIcon: utility.GetImgUrlByFileExtension(obj[i].FileExtension), Name: utility.GetFileName(obj[i].Path), VersionString: obj[i].UIVersionStringOWSTEXT, ContentType: utility.GetContentType(obj[i].ContentType), ParentWebUrl: obj[i].SPWebURL, UniqueId: obj[i].UniqueId.replace("{", "").replace("}", "") }); } return docs; }); /* NOTE: the above code use get request for retrieving the search results; alternatively, you can use POST request Sample code: var body = { 'request': { '__metadata': { 'type': 'Microsoft.Office.Server.Search.REST.SearchRequest' }, 'Querytext': searchQuery, 'RowLimit': '100', 'TrimDuplicates': 'False', 'SelectProperties': { 'results': ['DocId', 'ModifiedBy', 'OriginalPath', 'LastModifiedTime', 'FileExtension', 'Path', 'SPWebURL'] } } }; const searchRequestUrl: string = `${webAbsoluteUrl}/_api/search/postquery`; return this._webPartContext.spHttpClient.post( searchRequestUrl, SPHttpClient.configurations.v1, { headers: { "odata-version": "3.0", "accept": "application/json;odata=verbose" }, body: JSON.stringify(body), method: "POST" }) */ } /** * Returns all documents from the library * Note: Library url is passed as parameter in the constructor */ public readDocumentsFromLibrary(): Promise<IDocument[]> { debugger; let utility = new Utils(); let libraryRelativeUrl = utility.GetRelativePathFromAbsolute(this._libraryAbsoluteUrl); return this._readListId(libraryRelativeUrl).then((listId: string): Promise<IDocument[]> => { const queryUrlGetAllItems: string = this._webAbsoluteUrl + `/_api/web/lists(guid'${listId}')/Items` + "?$select=ID,DocIcon,FileLeafRef,FileRef,Modified,UniqueId,OData__UIVersionString,ContentTypeId,ContentType/Name,Editor/Title&$expand=Editor,ContentType"; /* The above query will get all items, including folders and items in the folders. After that we remove those items, that are not based on the Document Content Type. Depending on your logic, you can use different endpoints, like: /_api/web/lists(guid'${listId}')/GetItems(query=@v1)?@v1={"FolderServerRelativeUrl" : "${libraryRelativeUrl}", "ViewXml":"<View Scope='RecursiveAll'></View>"} /_api/web/GetFolderByServerRelativePath(decodedurl='${libraryRelativeUrl}')?$select=ID,FileLeafRef,FileRef,ModifiedBy&$expand=Files,ModifiedBy /_api/web/GetFolderByServerRelativeUrl('${libraryRelativeUrl}')/Files?$expand=ListItemAllFields */ return this._webPartContext.spHttpClient.get( queryUrlGetAllItems, SPHttpClient.configurations.v1) .then( (response: any) => { if (response.status >= 200 && response.status < 300) { return response.json(); } else { return Promise.reject(new Error(JSON.stringify(response))); } }) .then((data: any) => { debugger; let documents: IDocument[] = []; if (data) { for (let i = 0; i < data.value.length; i++) { let item = data.value[i]; //check the content type; Include only documents in the response if (item.ContentTypeId.indexOf("0x0101") == 0) { var doc: IDocument = { Id: item.Id, FileRef: item.FileRef, Name: item.FileLeafRef, VersionString: item.OData__UIVersionString, ContentType: item.ContentType.Name, ModifiedBy: item.Editor.Title, Modified: utility.GetFormatedDateString(item.Modified), UniqueId: item.UniqueId, ParentWebUrl: this._webAbsoluteUrl,// this will work in case the library is in the same web as the web part! //icon for the Folder content type is a different FileIcon: item.ContentType.Name != "Folder" ? utility.GetImgUrlByFileExtension(item.DocIcon) : utility.GetImgUrlByFileExtension("folder") }; documents.push(doc); } } } return documents; }).catch((ex) => { console.log("readDocumentsFromLibrary > spHttpClient.get()...catch:", ex); throw ex; }); }); } // Helper Methods /** * Returns the list's ID based on its site relative url * listRelativeUrl format: '/sites/mysite/shared documents' * returned value: Guid if succeeded, otherwise - empty string */ private _readListId(listRelativeUrl: string): Promise<string> { let queryUrlGetList = this._webAbsoluteUrl + "/_api/web/GetFolderByServerRelativePath(decodedurl='" + decodeURIComponent(listRelativeUrl) + "')/Properties"; return this._webPartContext.spHttpClient.get( queryUrlGetList, SPHttpClient.configurations.v1) .then( (response: any) => { if (response.status >= 200 && response.status < 300) { return response.json(); } else { return Promise.reject(new Error(JSON.stringify(response))); } }) .then((data: any) => { debugger; if (data) { let listIdValue: string = data.vti_x005f_listname; // string format '{00000000-0000-0000-0000-000000000000}' let listId = listIdValue.replace("{", "").replace("}", ""); return listId; } else { console.log("no list info"); } return ""; }).catch((ex) => { console.log("_readListId > spHttpClient.get()...catch:", ex); throw ex; }); } }
the_stack
import { TestSuiteInfo } from "vscode-test-adapter-api"; import { walk, getFilesAndAllTestIds, IElmBinaries, buildElmTestArgs, buildElmTestArgsWithReport, getFilePath, mergeTopLevelSuites, } from "../util"; import { expect } from "chai"; describe("util", () => { const suiteWithoutChildren: TestSuiteInfo = { type: "suite", id: "a", label: "a", children: [], }; const suiteWithFiles: TestSuiteInfo = { type: "suite", id: "a", label: "a", file: "file0", children: [ { type: "test", id: "a/b", label: "b", file: "file2", }, { type: "test", id: "a/c", label: "c", file: "file1", }, { type: "test", id: "a/d", label: "d", file: "file2", }, ], }; describe("walk suite", () => { it("no children", () => { const walked = Array.from(walk(suiteWithoutChildren)); expect(walked).to.eql([suiteWithoutChildren]); }); it("depth first", () => { const suite: TestSuiteInfo = { type: "suite", id: "a", label: "a", children: [ { type: "suite", id: "a/b", label: "b", children: [ { type: "test", id: "a/b/c", label: "c", }, { type: "test", id: "a/b/d", label: "d", }, ], }, { type: "suite", id: "a/e", label: "e", children: [], }, ], }; const walked = Array.from(walk(suite)); expect(walked.map((n) => n.label)).to.eql(["a", "b", "c", "d", "e"]); }); }); describe("find files for tests", () => { it("empty", () => { const ids = ["x"]; const [files, allIds] = getFilesAndAllTestIds(ids, suiteWithoutChildren); // eslint-disable-next-line @typescript-eslint/no-unused-expressions expect(files).to.be.empty; // eslint-disable-next-line @typescript-eslint/no-unused-expressions expect(allIds).to.be.empty; }); it("two tests", () => { const ids = ["a/b"]; const [files, allIds] = getFilesAndAllTestIds(ids, suiteWithFiles); expect(files).to.eql(["file2"]); expect(allIds).to.eql(["a/b", "a/d"]); }); it("unique file names", () => { const ids = ["a/b", "a/d"]; const [files, allIds] = getFilesAndAllTestIds(ids, suiteWithFiles); expect(files).to.eql(["file2"]); expect(allIds).to.eql(["a/b", "a/d"]); }); }); describe("get elm-test args", () => { it("without anything", () => { const binaries: IElmBinaries = {}; const args = buildElmTestArgs(binaries); expect(args).to.eql(["elm-test"]); }); it("with local elm-test", () => { const binaries: IElmBinaries = { elmTest: "local/elm-test", }; const args = buildElmTestArgs(binaries); expect(args).to.eql(["local/elm-test"]); }); it("with local elm compiler (0.19)", () => { const binaries: IElmBinaries = { elmTest: "local/elm-test", elm: "local/elm", }; const args = buildElmTestArgs(binaries); expect(args).to.eql(["local/elm-test", "--compiler", "local/elm"]); }); it("with files", () => { const binaries: IElmBinaries = { elmTest: "local/elm-test", elm: "local/elm", }; const files = ["file1", "file2"]; const args = buildElmTestArgs(binaries, files); expect(args).to.eql([ "local/elm-test", "--compiler", "local/elm", "file1", "file2", ]); }); it("with report", () => { const args: string[] = ["path/elm-test", "file"]; const withReport = buildElmTestArgsWithReport(args); expect(withReport).to.eql(["path/elm-test", "file", "--report", "json"]); }); }); describe("getFilePathUnderTests", () => { it("top level", () => { const path = getFilePath({ tag: "testCompleted", labels: ["Module"], messages: [], status: { tag: "pass" }, duration: 13, }); expect(path).to.eq("Module.elm"); }); it("first level", () => { const path = getFilePath({ tag: "testCompleted", labels: ["Module.Sub"], messages: [], status: { tag: "pass" }, duration: 13, }); expect(path).to.eq("Module/Sub.elm"); }); it("deeper level", () => { const path = getFilePath({ tag: "testCompleted", labels: ["Module.Sub.Deep"], messages: [], status: { tag: "pass" }, duration: 13, }); expect(path).to.eq("Module/Sub/Deep.elm"); }); }); describe("merge test suites", () => { const to: TestSuiteInfo = { type: "suite", id: "top", label: "top", file: "file1", line: 1, children: [ { type: "suite", id: "a/b", label: "b", file: "file2", line: 2, children: [ { type: "suite", id: "a/b/deep", label: "deep", file: "file2", line: 22, children: [], }, ], }, { type: "suite", id: "a/e", label: "e", file: "file3", line: 3, children: [], }, ], }; it("mismatched", () => { const from: TestSuiteInfo = { type: "suite", id: "another-top", label: "another top", children: [ { type: "suite", id: "c", label: "c", children: [], }, ], }; expect(mergeTopLevelSuites(from, to)).to.eql({ type: "suite", id: "top", label: "top", file: "file1", line: 1, children: [ { type: "suite", id: "a/b", label: "b", file: "file2", line: 2, children: [ { type: "suite", id: "a/b/deep", label: "deep", file: "file2", line: 22, children: [], }, ], }, { type: "suite", id: "a/e", label: "e", file: "file3", line: 3, children: [], }, { type: "suite", id: "another-top", label: "another top", children: [ { type: "suite", id: "c", label: "c", children: [], }, ], }, ], }); }); it("replace one", () => { const from: TestSuiteInfo = { type: "suite", id: "top", label: "top", children: [ { type: "suite", id: "a/b", label: "new b", children: [], }, ], }; expect(mergeTopLevelSuites(from, to)).to.eql({ type: "suite", id: "top", label: "top", file: "file1", line: 1, children: [ { type: "suite", id: "a/b", label: "new b", file: "file2", line: 2, children: [], }, { type: "suite", id: "a/e", label: "e", file: "file3", line: 3, children: [], }, ], }); }); it("replace one and keep deep location", () => { const from: TestSuiteInfo = { type: "suite", id: "top", label: "top", children: [ { type: "suite", id: "a/b", label: "new b", children: [ { type: "suite", id: "a/b/deep", label: "new deep", children: [], }, ], }, ], }; expect(mergeTopLevelSuites(from, to)).to.eql({ type: "suite", id: "top", label: "top", file: "file1", line: 1, children: [ { type: "suite", id: "a/b", label: "new b", file: "file2", line: 2, children: [ { type: "suite", id: "a/b/deep", label: "new deep", file: "file2", line: 22, children: [], }, ], }, { type: "suite", id: "a/e", label: "e", file: "file3", line: 3, children: [], }, ], }); }); it("append one", () => { const from: TestSuiteInfo = { type: "suite", id: "top", label: "top", children: [ { type: "suite", id: "a/d", label: "new d", children: [], }, ], }; expect(mergeTopLevelSuites(from, to)).to.eql({ type: "suite", id: "top", label: "top", file: "file1", line: 1, children: [ { type: "suite", id: "a/b", label: "b", file: "file2", line: 2, children: [ { type: "suite", id: "a/b/deep", label: "deep", file: "file2", line: 22, children: [], }, ], }, { type: "suite", id: "a/e", label: "e", file: "file3", line: 3, children: [], }, { type: "suite", id: "a/d", label: "new d", children: [], }, ], }); }); }); });
the_stack
import React from 'react'; import isArray from '@antv/util/lib/is-array'; import isNumber from '@antv/util/lib/is-number'; const isBrowser = typeof window !== 'undefined'; const G6 = isBrowser ? require('@antv/g6') : null; const duration = 2000; const animateOpacity = 0.6; const animateBackOpacity = 0.1; const virtualEdgeOpacity = 0.1; const realEdgeOpacity = 0.2; const darkBackColor = 'rgb(43, 47, 51)'; const disableColor = '#777'; const theme = 'dark'; const subjectColors = [ '#3D76DD', '#19A576', '#65789B', '#B98700', '#5349E0', '#5AB8DB', '#7B48A1', '#D77622', '#008685', '#D37099', ]; export const colorSets = G6 ? G6.Util.getColorSetsBySubjectColors(subjectColors, darkBackColor, theme, disableColor) : []; export const global = { node: { style: { fill: '#2B384E', }, labelCfg: { style: { fill: '#acaeaf', stroke: '#191b1c', }, }, stateStyles: { focus: { fill: '#2B384E', }, }, }, edge: { style: { stroke: '#acaeaf', realEdgeStroke: '#acaeaf', //'#f00', realEdgeOpacity, strokeOpacity: realEdgeOpacity, }, labelCfg: { style: { fill: '#acaeaf', realEdgeStroke: '#acaeaf', //'#f00', realEdgeOpacity: 0.5, stroke: '#191b1c', }, }, stateStyles: { focus: { stroke: '#fff', // '#3C9AE8', }, }, }, }; if (G6) { // Custom super node G6.registerNode( 'aggregated-node', { draw(cfg: any, group) { let width = 53, height = 27; const style = cfg.style || {}; const colorSet = cfg.colorSet || colorSets[0]; // halo for hover group.addShape('rect', { attrs: { x: -width * 0.55, y: -height * 0.6, width: width * 1.1, height: height * 1.2, fill: colorSet.mainFill, opacity: 0.9, lineWidth: 0, radius: (height / 2 || 13) * 1.2, }, name: 'halo-shape', visible: false, }); // focus stroke for hover group.addShape('rect', { attrs: { x: -width * 0.55, y: -height * 0.6, width: width * 1.1, height: height * 1.2, fill: colorSet.mainFill, // '#3B4043', stroke: '#AAB7C4', lineWidth: 1, lineOpacty: 0.85, radius: (height / 2 || 13) * 1.2, }, name: 'stroke-shape', visible: false, }); const keyShape = group.addShape('rect', { attrs: { ...style, x: -width / 2, y: -height / 2, width, height, fill: colorSet.mainFill, // || '#3B4043', stroke: colorSet.mainStroke, lineWidth: 2, cursor: 'pointer', radius: height / 2 || 13, lineDash: [2, 2], }, name: 'aggregated-node-keyShape', }); let labelStyle = {}; if (cfg.labelCfg) { labelStyle = Object.assign(labelStyle, cfg.labelCfg.style); } group.addShape('text', { attrs: { text: `${cfg.count}`, x: 0, y: 0, textAlign: 'center', textBaseline: 'middle', cursor: 'pointer', fontSize: 12, fill: '#fff', opacity: 0.85, fontWeight: 400, }, name: 'count-shape', className: 'count-shape', draggable: true, }); // tag for new node if (cfg.new) { group.addShape('circle', { attrs: { x: width / 2 - 3, y: -height / 2 + 3, r: 4, fill: '#6DD400', lineWidth: 0.5, stroke: '#FFFFFF', }, name: 'typeNode-tag-circle', }); } return keyShape; }, setState: (name, value, item) => { const group = item.get('group'); if (name === 'layoutEnd' && value) { const labelShape = group.find((e) => e.get('name') === 'text-shape'); if (labelShape) labelShape.set('visible', true); } else if (name === 'hover') { if (item.hasState('focus')) { return; } const halo = group.find((e) => e.get('name') === 'halo-shape'); const keyShape: any = item.getKeyShape(); const colorSet = item.getModel().colorSet || colorSets[0]; if (value) { halo && halo.show(); keyShape.attr('fill', colorSet.activeFill); } else { halo && halo.hide(); keyShape.attr('fill', colorSet.mainFill); } } else if (name === 'focus') { const stroke = group.find((e) => e.get('name') === 'stroke-shape'); const keyShape: any = item.getKeyShape(); const colorSet = item.getModel().colorSet || colorSets[0]; if (value) { stroke && stroke.show(); keyShape.attr('fill', colorSet.selectedFill); } else { stroke && stroke.hide(); keyShape.attr('fill', colorSet.mainFill); } } }, update: undefined, }, 'single-node', ); // Custom real node G6.registerNode( 'real-node', { draw(cfg: any, group) { let r = 30; if (isNumber(cfg.size)) { r = (cfg.size as number) / 2; } else if (isArray(cfg.size)) { r = cfg.size[0] / 2; } const style = cfg.style || {}; const colorSet = cfg.colorSet || colorSets[0]; // halo for hover group.addShape('circle', { attrs: { x: 0, y: 0, r: r + 5, fill: style.fill || colorSet.mainFill || '#2B384E', opacity: 0.9, lineWidth: 0, }, name: 'halo-shape', visible: false, }); // focus stroke for hover group.addShape('circle', { attrs: { x: 0, y: 0, r: r + 5, fill: style.fill || colorSet.mainFill || '#2B384E', stroke: '#fff', strokeOpacity: 0.85, lineWidth: 1, }, name: 'stroke-shape', visible: false, }); const keyShape = group.addShape('circle', { attrs: { ...style, x: 0, y: 0, r, fill: colorSet.mainFill, stroke: colorSet.mainStroke, lineWidth: 2, cursor: 'pointer', }, name: 'aggregated-node-keyShape', }); let labelStyle = {}; if (cfg.labelCfg) { labelStyle = Object.assign(labelStyle, cfg.labelCfg.style); } if (cfg.label) { const text = cfg.label; let labelStyle: any = {}; let refY = 0; if (cfg.labelCfg) { labelStyle = Object.assign(labelStyle, cfg.labelCfg.style); refY += cfg.labelCfg.refY || 0; } let offsetY = 0; const fontSize = labelStyle.fontSize < 8 ? 8 : labelStyle.fontSize; const lineNum = (cfg.labelLineNum as number) || 1; offsetY = lineNum * (fontSize || 12); group.addShape('text', { attrs: { text, x: 0, y: r + refY + offsetY + 5, textAlign: 'center', textBaseLine: 'alphabetic', cursor: 'pointer', fontSize, fill: '#fff', opacity: 0.85, fontWeight: 400, stroke: global.edge.labelCfg.style.stroke, }, name: 'text-shape', className: 'text-shape', }); } // tag for new node if (cfg.new) { group.addShape('circle', { attrs: { x: r - 3, y: -r + 3, r: 4, fill: '#6DD400', lineWidth: 0.5, stroke: '#FFFFFF', }, name: 'typeNode-tag-circle', }); } return keyShape; }, setState: (name, value, item) => { const group = item.get('group'); if (name === 'layoutEnd' && value) { const labelShape = group.find((e) => e.get('name') === 'text-shape'); if (labelShape) labelShape.set('visible', true); } else if (name === 'hover') { if (item.hasState('focus')) { return; } const halo = group.find((e) => e.get('name') === 'halo-shape'); const keyShape: any = item.getKeyShape(); const colorSet = item.getModel().colorSet || colorSets[0]; if (value) { halo && halo.show(); keyShape.attr('fill', colorSet.activeFill); } else { halo && halo.hide(); keyShape.attr('fill', colorSet.mainFill); } } else if (name === 'focus') { const stroke = group.find((e) => e.get('name') === 'stroke-shape'); const label = group.find((e) => e.get('name') === 'text-shape'); const keyShape: any = item.getKeyShape(); const colorSet = item.getModel().colorSet || colorSets[0]; if (value) { stroke && stroke.show(); keyShape.attr('fill', colorSet.selectedFill); label && label.attr('fontWeight', 800); } else { stroke && stroke.hide(); keyShape.attr('fill', colorSet.mainFill); // '#2B384E' label && label.attr('fontWeight', 400); } } }, update: undefined, }, 'aggregated-node', ); // 这样可以继承 aggregated-node 的 setState // Custom the quadratic edge for multiple edges between one node pair G6.registerEdge( 'custom-quadratic', { setState: (name, value, item) => { const group = item.get('group'); const model = item.getModel(); if (name === 'focus') { const back = group.find((ele) => ele.get('name') === 'back-line'); if (back) { back.stopAnimate(); back.remove(); back.destroy(); } const keyShape = group.find((ele) => ele.get('name') === 'edge-shape'); const arrow: any = model.style.endArrow; if (value) { if (keyShape.cfg.animation) { keyShape.stopAnimate(true); } keyShape.attr({ strokeOpacity: animateOpacity, opacity: animateOpacity, stroke: '#fff', endArrow: { ...arrow, stroke: '#fff', fill: '#fff', }, }); if (model.isReal) { const { lineWidth, path, endArrow, stroke } = keyShape.attr(); const back = group.addShape('path', { attrs: { lineWidth, path, stroke, endArrow, opacity: animateBackOpacity, }, name: 'back-line', }); back.toBack(); const length = keyShape.getTotalLength(); keyShape.animate( (ratio) => { // the operations in each frame. Ratio ranges from 0 to 1 indicating the prograss of the animation. Returns the modified configurations const startLen = ratio * length; // Calculate the lineDash const cfg = { lineDash: [startLen, length - startLen], }; return cfg; }, { repeat: true, // Whether executes the animation repeatly duration, // the duration for executing once }, ); } else { let index = 0; const lineDash = keyShape.attr('lineDash'); const totalLength = lineDash[0] + lineDash[1]; keyShape.animate( () => { index++; if (index > totalLength) { index = 0; } const res = { lineDash, lineDashOffset: -index, }; // returns the modified configurations here, lineDash and lineDashOffset here return res; }, { repeat: true, // whether executes the animation repeatly duration, // the duration for executing once }, ); } } else { keyShape.stopAnimate(); const stroke = '#acaeaf'; const opacity = model.isReal ? realEdgeOpacity : virtualEdgeOpacity; keyShape.attr({ stroke, strokeOpacity: opacity, opacity, endArrow: { ...arrow, stroke, fill: stroke, }, }); } } }, }, 'quadratic', ); // Custom the line edge for single edge between one node pair G6.registerEdge( 'custom-line', { setState: (name, value, item) => { const group = item.get('group'); const model = item.getModel(); if (name === 'focus') { const keyShape = group.find((ele) => ele.get('name') === 'edge-shape'); const back = group.find((ele) => ele.get('name') === 'back-line'); if (back) { back.stopAnimate(); back.remove(); back.destroy(); } const arrow: any = model.style.endArrow; if (value) { if (keyShape.cfg.animation) { keyShape.stopAnimate(true); } keyShape.attr({ strokeOpacity: animateOpacity, opacity: animateOpacity, stroke: '#fff', endArrow: { ...arrow, stroke: '#fff', fill: '#fff', }, }); if (model.isReal) { const { path, stroke, lineWidth } = keyShape.attr(); const back = group.addShape('path', { attrs: { path, stroke, lineWidth, opacity: animateBackOpacity, }, name: 'back-line', }); back.toBack(); const length = keyShape.getTotalLength(); keyShape.animate( (ratio) => { // the operations in each frame. Ratio ranges from 0 to 1 indicating the prograss of the animation. Returns the modified configurations const startLen = ratio * length; // Calculate the lineDash const cfg = { lineDash: [startLen, length - startLen], }; return cfg; }, { repeat: true, // Whether executes the animation repeatly duration, // the duration for executing once }, ); } else { const lineDash = keyShape.attr('lineDash'); const totalLength = lineDash[0] + lineDash[1]; let index = 0; keyShape.animate( () => { index++; if (index > totalLength) { index = 0; } const res = { lineDash, lineDashOffset: -index, }; // returns the modified configurations here, lineDash and lineDashOffset here return res; }, { repeat: true, // whether executes the animation repeatly duration, // the duration for executing once }, ); } } else { keyShape.stopAnimate(); const stroke = '#acaeaf'; const opacity = model.isReal ? realEdgeOpacity : virtualEdgeOpacity; keyShape.attr({ stroke, strokeOpacity: opacity, opacity: opacity, endArrow: { ...arrow, stroke, fill: stroke, }, }); } } }, }, 'single-edge', ); } const reacComponent = () => { return <></>; }; export default reacComponent;
the_stack
import { Event24Core } from "./core/Event24Core"; import { Tween24 } from "./Tween24"; import { ClassUtil } from "./utils/ClassUtil"; import { HTMLUtil } from "./utils/HTMLUtil"; export class Event24 { // Etc Events static readonly CANCEL :string = "cancel"; static readonly ERROR :string = "error"; static readonly SCROLL :string = "scroll"; static readonly SELECT :string = "select"; static readonly SHOW :string = "show"; static readonly WHEEL :string = "wheel"; // Connection Events static readonly OFFLINE :string = "offline"; static readonly ONLINE :string = "online"; // Key Events static readonly KEY_DOWN :string = "keydown"; static readonly KEY_UP :string = "keyup"; // Focus Events static readonly FOCUS_BLUR :string = "blur"; static readonly FOCUS :string = "focus"; static readonly FOCUS_IN :string = "focusin"; static readonly FOCUS_OUT :string = "focusout"; // Drag Events static readonly DRAG :string = "drag"; static readonly DRAG_END :string = "dragend"; static readonly DRAG_EXIT :string = "dragexit"; static readonly DRAG_LEAVE :string = "dragleave"; static readonly DRAG_OVER :string = "dragover"; static readonly DRAG_START :string = "dragstart"; static readonly DROP :string = "drop"; // History Events static readonly HASH_CHANGE :string = "hashchange"; static readonly PAGE_HIDE :string = "pagehide"; static readonly PAGE_SHOW :string = "pageshow"; static readonly POP_STATE :string = "popstate"; // Mouse Events static readonly MOUSE_OVER :string = "mouseover"; static readonly MOUSE_OUT :string = "mouseout"; static readonly MOUSE_ENTER :string = "mouseenter"; static readonly MOUSE_LEAVE :string = "mouseleave"; static readonly MOUSE_MOVE :string = "mousemove"; static readonly MOUSE_UP :string = "mouseleave"; static readonly MOUSE_DOWN :string = "mouseleave"; // Click Events static readonly CLICK :string = "click"; static readonly AUX_CLICK :string = "auxclick"; static readonly DOUBLE_CLICK :string = "dblclick"; // Touch Events static readonly TOUCH_START :string = "touchstart"; static readonly TOUCH_END :string = "touchend"; static readonly TOUCH_MOVE :string = "touchmove"; static readonly TOUCH_CANCEL :string = "touchcancel"; // Print Events static readonly AFTER_PRINT :string = "afterprint"; static readonly BEFORE_PRINT :string = "beforeprint"; // Messaging Events static readonly MESSAGE :string = "message"; static readonly MESSAGE_ERROR :string = "messageerror"; // Clipboard Events static readonly CLIPBOARD_COPY :string = "copy"; static readonly CLIPBOARD_CUT :string = "cut"; static readonly CLIPBOARD_PASTE :string = "paste"; // Fullscreen Events static readonly FULLSCREEN_CHANGE :string = "fullscreenchange"; static readonly FULLSCREEN_ERROR :string = "fullscreenerror"; // Load, Unload Events static readonly BEFOREUN_LOAD :string = "beforeunload"; static readonly DOM_CONTENT_LOADED :string = "DOMContentLoaded"; static readonly LOAD :string = "load"; static readonly UNLOAD :string = "unload"; // Window Events static readonly LANGUAGE_CHANGE :string = "languagechange"; static readonly ORIENTATION_CHANGE :string = "orientationchange"; static readonly DEVICE_MOTION :string = "devicemotion"; static readonly DEVICE_ORIENTATION :string = "deviceorientation"; static readonly RESIZE :string = "resize"; static readonly STORAGE :string = "storage"; // Pointer Events static readonly POINTER_OVER :string = "pointerover"; static readonly POINTER_ENTER :string = "pointerenter"; static readonly POINTER_DOWN :string = "pointerdown"; static readonly POINTER_MOVE :string = "pointermove"; static readonly POINTER_UP :string = "pointerup"; static readonly POINTER_CANCEL :string = "pointercancel"; static readonly POINTER_OUT :string = "pointerout"; static readonly POINTER_LEAVE :string = "pointerleave"; static readonly GOT_POINTERCAPTURE :string = "gotpointercapture"; static readonly LOST_POINTERCAPTURE :string = "lostpointercapture"; // Animation, Transition Events static readonly ANIMATION_CANCEL :string = "animationcancel"; static readonly ANIMATION_END :string = "animationend"; static readonly ANIMATION_ITERATION :string = "animationiteration"; static readonly ANIMATION_START :string = "animationstart"; static readonly TRANSITION_CANCEL :string = "transitioncancel"; static readonly TRANSITION_END :string = "transitionend"; static readonly TRANSITION_RUN :string = "transitionrun"; static readonly TRANSITION_START :string = "transitionstart"; // Promise Rejection Events static readonly REJECTION_HANDLED :string = "rejectionhandled"; static readonly UNHANDLED_REJECTION :string = "unhandledrejection"; // Gamepad Events static readonly GAMEPAD_CONNECTED :string = "gamepadconnected"; static readonly GAMEPAD_DISCONNECTED :string = "gamepaddisconnected"; // Manifest Events static readonly APP_INSTALLED :string = "appinstalled"; static readonly BEFORE_INSTALL_PROMPT :string = "beforeinstallprompt"; // Mouse Force Events static readonly WEBKIT_MOUSE_FORCE_CHANGED :string = "webkitmouseforcechanged"; static readonly WEBKIT_MOUSE_FORCE_DOWN :string = "webkitmouseforcedown"; static readonly WEBKIT_MOUSE_FORCE_WILLBEGIN :string = "webkitmouseforcewillbegin"; static readonly WEBKIT_MOUSE_FORCE_UP :string = "webkitmouseforceup"; // Web VR Events static readonly VR_DISPLAY_ACTIVATE :string = "vrdisplayactivate"; static readonly VR_DISPLAY_BLUR :string = "vrdisplayblur"; static readonly VR_DISPLAY_CONNECT :string = "vrdisplayconnect"; static readonly VR_DISPLAY_DEACTIVATE :string = "vrdisplaydeactivate"; static readonly VR_DISPLAY_DISCONNECT :string = "vrdisplaydisconnect"; static readonly VR_DISPLAY_FOCUS :string = "vrdisplayfocus"; static readonly VR_DISPLAY_PRESENT_CHANGE :string = "vrdisplaypresentchange"; static readonly VR_DISPLAY_POINTER_RESTRICTED :string = "vrdisplaypointerrestricted"; static readonly VR_DISPLAY_POINTER_UNRESTRICTED :string = "vrdisplaypointerunrestricted"; private static _allEvents:Map<any, {[key:string]:Event24Core[]}> = new Map<any, {[key:string]:Event24Core[]}>(); private _eventCores:Event24Core[]; constructor () { this._eventCores = []; } private _addEventCore(core:Event24Core) { this._eventCores.push(core); } /** * イベントリスナーを設定します。 * @memberof Event24 */ setEventListener() { for (const eventCore of this._eventCores) { eventCore.setEventListener(); } } /** * イベントリスナーを解除します。 * @memberof Event24 */ removeEventListener() { for (const eventCore of this._eventCores) { eventCore.removeEventListener(); } } /** * 設定したトゥイーンを停止させるイベントを設定します。 * @param {(string|string[])} eventType トゥイーンを停止するイベントタイプ * @memberof Event24 */ addStopEvent(eventType:string|string[]):Event24 { for (const eventCore of this._eventCores) { eventCore.addStopEvent(eventType); } return this; } willChange(use:boolean):Event24 { for (const eventCore of this._eventCores) { eventCore.willChange(use); } return this; } // ------------------------------------------ // // Static Method // // ------------------------------------------ /** * イベントに合わせて再生されるトゥイーンを設定します。 * @static * @param {(any|any[])} target イベントの対象 * @param {(string|string[])} eventType トゥイーンを再生するイベントタイプ * @param {Tween24} tween イベントに合わせて再生されるトゥイーン * @memberof Event24 */ public static add(target:any|any[], eventType:string|string[], tween:Tween24):Event24 { return Event24._add(target, eventType, tween, null); } public static __addCallback(target:any|any[], eventType:string|string[], callback:Function):Event24 { return Event24._add(target, eventType, null, callback); } private static _add(target:any|any[], eventType:string|string[], tween:Tween24|null, callback:Function|null):Event24 { const event:Event24 = new Event24(); const eventTypes = Array.isArray(eventType) ? eventType : [eventType]; if (ClassUtil.isString(target)) { target = String(target).split(","); } for (const type of eventTypes) { if (Array.isArray(target)) { if (ClassUtil.isString(target[0])) { for (const query of target) { for (const eventTarget of HTMLUtil.querySelectorAll(query)) { event._addEventCore(Event24._create(eventTarget, query, type, tween, callback)); } } } else { for (const eventTarget of target) { event._addEventCore(Event24._create(eventTarget, null, type, tween, callback)); } } } else { event._addEventCore(Event24._create(target, null, type, tween, callback)); } } return event; } private static _create(target:HTMLElement|any, query:string|null, eventType:string, tween:Tween24|null, callback:Function|null):Event24Core { let events = Event24._allEvents.get(target); if (!events) { events = {}; Event24._allEvents?.set(target, events); } const core = new Event24Core(target, query, eventType, tween, callback); core.setEventListener(); let cores = events[eventType]; if (!cores) cores = events[eventType] = []; cores.push(core); return core; } /** * 対象に設定されたイベントを解除します。 * @static * @param {(any|any[])} target イベントの対象 * @param {(string|string[])} eventType トゥイーンを再生するイベントタイプ * @memberof Event24 */ public static remove(target:any|any[], eventType:string|string[]):void { const eventTypes = Array.isArray(eventType) ? eventType : [eventType]; for (const t of Event24._getEventTarget(target)) { let events = Event24._allEvents.get(t); if (events) { for (const type of eventTypes) { let cores = events[type]; if (cores) { for (const core of cores) { core.removeEventListener(); } delete events[type]; } } } } } /** * 対象のイベントをすべて解除します。 * @static * @param {(any|any[])} target イベントの対象 * @memberof Event24 */ public static removeAllByTarget(target:any|any[]):void { for (const t of Event24._getEventTarget(target)) { let events = Event24._allEvents.get(t); if (events) { for (const type in events) { for (const core of events[type]) { core.removeEventListener(); } delete events[type]; } } Event24._allEvents.delete(t); } } /** * 設定したすべてのイベントを解除します。 * @static * @memberof Event24 */ public static removeAll():void { Event24._allEvents.forEach(function(events, target) { for (const type in events) { for (const core of events[type]) { core.removeEventListener(); } delete events[type]; } Event24._allEvents.delete(target); }); } private static _getEventTarget(target:any):any[] { let eventTarget:any[] = []; if (ClassUtil.isString(target)) { target = String(target).split(","); } if (Array.isArray(target)) { if (ClassUtil.isString(target[0])) { for (const t of target) { eventTarget = eventTarget.concat(HTMLUtil.querySelectorAll(t)); } } else { eventTarget = target; } } else { eventTarget.push(target); } return eventTarget; } }
the_stack
import {CustomTokenizer, RegexTokenizer, Token, TokenizerChain} from "parselib"; import {AssertionMap, CharactersMap} from "../builderHelpers"; export const Decompile = (input: string | RegExp) : string => { if(input instanceof RegExp) input = input.toString(); let split = input.split("/"); split.shift(); let flags = split.pop().split("").map(flag => "flag(" + flag + ");").join("\n"); if(flags.length > 0) flags += "\n\n"; const tokens = groupTokenizerChain.run(split.join("/")); return flags + REXSDataToString(Recurse(tokens)); } const REXSDataToString = (data: REXSData[], indent: number = 0) : string => { const indentStr = " ".repeat(indent * 4); let lines: string[] = []; for (let tag of data) { if(tag.tag === "ugroup"){ lines.push(REXSDataToString(tag.body, indent)); continue; } lines.push(indentStr + tag.tag + "(" + (tag.params || "") + ")" + (tag.body ? " {" : ";")); if(tag.body){ lines.push(REXSDataToString(tag.body, indent+1)); lines.push(indentStr + "}"); } } return lines.join("\n"); } //I don't even want to think about this code ever again. const Recurse = (tokens: Token[], data?: RecurseData) : REXSData[] => { const stringSeq = tokens.map(token => token.value).join(""); let outerTag: REXSData = null; let isSet = false; if(data && data.startToken.value === "(") { if(stringSeq.startsWith("?:")){ outerTag = {tag: "ugroup"}; tokens = tokens.slice(2); } else if(stringSeq.startsWith("?=")) { outerTag = {tag: "ahead"}; tokens = tokens.slice(2); } else if(stringSeq.startsWith("?!")) { outerTag = {tag: "ahead", params: "not"}; tokens = tokens.slice(2); } else if(stringSeq.startsWith("?<=")) { outerTag = {tag: "before"}; tokens = tokens.slice(3); } else if(stringSeq.startsWith("?<!")) { outerTag = {tag: "before", params: "not"}; tokens = tokens.slice(3); } else { outerTag = {tag: "group"}; } } else if(data && data.startToken.value === "[") { isSet = true; outerTag = {tag: "set"}; } let depth = 0; let startPos = -1; let out: REXSData[] = []; let curRepeat: REXSData = null; let awaitingEnd = false; let onOR = false; let orOut: REXSData = null; let recurseOnSet = false; //We need to go through each token, and find the highest nested sequences. for(let i = 0; i < tokens.length; i++){ if(tokens[i].isToken && ["(", "["].includes(tokens[i].value) && !recurseOnSet && !isSet){ if(curRepeat){ curRepeat.params = getRepeatParams(curRepeat.params); out.push(curRepeat); curRepeat = null; awaitingEnd = false; } if (depth === 0) startPos = i; depth++; if(tokens[i].value === "[") recurseOnSet = true; } else if(tokens[i].isToken && [")", "]"].includes(tokens[i].value) && !isSet && !(recurseOnSet && tokens[i].value === ")")) { if(startPos === -1) throw new Error("The input contains an invalid sequence."); if(depth === 1) { out.push(...Recurse(tokens.slice(startPos + 1, i), {startToken: tokens[startPos]})); } depth--; recurseOnSet = false; } else if(depth === 0) { let token = tokens[i]; if(!isSet){ if(["*", "+", "?", "{"].includes(token.value) && !curRepeat) { const popped = out.pop(); if(popped.tag === "match" && popped.params.startsWith("\"") && popped.params.endsWith("\"")) { const sub = popped.params.substring(1, popped.params.length-1); if(sub.length !== 1) { out.push({tag: "match", params: "\""+sub.substring(0, sub.length-1)+"\""}); popped.params = "\""+sub.substring(sub.length-1)+"\""; } } curRepeat = {tag: "repeat", params: token.value, body: [popped]}; if(token.value === "{") awaitingEnd = true; } else if(curRepeat && awaitingEnd){ curRepeat.params += token.value; if(token.value === "}") { awaitingEnd = false; } } else if(curRepeat && token.value === "?") { curRepeat.params = getRepeatParams(curRepeat.params + "?"); out.push(curRepeat); curRepeat = null; awaitingEnd = false; } else if(token.value === "|") { if(curRepeat){ curRepeat.params = getRepeatParams(curRepeat.params); out.push(curRepeat); curRepeat = null; awaitingEnd = false; } if(!onOR || !orOut) { orOut = {tag: "or", body: []}; onOR = true; } orOut.body.push({tag: "orpart", body: out}); out = []; } else if(curRepeat) { curRepeat.params = getRepeatParams(curRepeat.params); out.push(curRepeat); curRepeat = null; awaitingEnd = false; pushMatch(tokenToREXS(token, isSet), out); } else { pushMatch(tokenToREXS(token, isSet), out); } } else if(isSet && outerTag) { if(token.value === "^" && i === 0){ outerTag.params = "not"; } else if(token.value === "-" && i !== 0 && i !== tokens.length-1) { out.push({tag: "to"}); } else if(token.isToken && !["]", "\\", "^", "-"].includes(token.value)) { pushMatch(tokenToREXS(token, isSet), out); } else { pushMatch(tokenToREXS(token, isSet), out); } } else { if(curRepeat){ curRepeat.params = getRepeatParams(curRepeat.params); out.push(curRepeat); curRepeat = null; awaitingEnd = false; } pushMatch(tokenToREXS(token, isSet), out); } } if(depth < 0){ throw new Error("The input contains an invalid sequence."); } } if(depth !== 0) throw new Error("The input contains an invalid sequence."); if(curRepeat){ curRepeat.params = getRepeatParams(curRepeat.params); out.push(curRepeat); } if(onOR) { orOut.body.push({tag: "orpart", body: out}); out = [orOut]; } if(outerTag) { outerTag.body = out; out = [outerTag]; } return out; } interface RecurseData { startToken: Token; } interface REXSData { tag: string; params?: string; body?: REXSData[]; } const tokenToREXS = (token: Token, isSet: boolean) : REXSData => { const character = Object.keys(CharactersMap).filter(key => CharactersMap[key] === token.value); if(character.length > 0) return {tag: "match", params: character[0]}; const assertion = Object.keys(AssertionMap).filter(key => AssertionMap[key] === token.value); if(assertion.length > 0 && !isSet) return {tag: "assert", params: assertion[0]}; if(token.value.startsWith("\\c")){ return {tag: "match", params: "CONTROL, " + token.value.substring(2)}; } if(token.value.startsWith("\\x")){ return {tag: "match", params: "HEX, " + token.value.substring(2)}; } if(token.value.startsWith("\\u")){ return {tag: "match", params: "HEX, " + token.value.substring(2)}; } if(token.value.startsWith("\\")){ const parse = parseInt(token.value.substring(1)); if(!isNaN(parse)){ if(parse === 0){ return {tag: "match", params: "NULL"}; } else { return {tag: "backref", params: token.value.substring(1)}; } } } return {tag: "match", params: "\""+unEscape(token.value)+"\""}; } const pushMatch = (match: REXSData, out: REXSData[]) => { if(match.tag !== "match" || out.length < 1 || out[out.length-1].tag !== "match" || !out[out.length-1].params || !out[out.length-1].params.startsWith("\"") || !match.params || !match.params.startsWith("\"")) { out.push(match); return; } out[out.length-1].params = out[out.length-1].params.substring(0, out[out.length-1].params.length-1) + match.params.substring(1, match.params.length-1) + "\""; } const getRepeatParams = (params: string) : string => { let startVal: string = ""; let endVal: string = ""; let greedy: string = ""; if(params.startsWith("*")){ startVal = "0"; endVal = "inf"; } if(params.startsWith("+")){ startVal = "1"; endVal = "inf"; } if(params.startsWith("?")){ startVal = "0"; endVal = "1"; } if(params.startsWith("{")){ const split = params.substring(1, params.length-1).split(","); if(split.length === 1){ startVal = split[0]; } if(split.length === 2){ startVal = split[0]; if(split[1]){ endVal = split[1]; } else { endVal = "inf"; } } } if(params.length > 1 && params[params.length-1] === "?"){ greedy = "nongreedy"; } return [startVal, endVal, greedy].filter(Boolean).join(", "); } //expressions/unescape.rexs const unEscape = (val: string) : string => { return val.replace(/(?<!\\)(?:\\\\)*\\(.)/g, "$1").replace(/\\\\/g, "\\"); } //expressions/tokenizer.rexs const groupTokenizerChain = new TokenizerChain(new RegexTokenizer(/(?<!\\)(?:\\\\)*((?:[\*\.\^\$\|\[\]\-\(\)\+\?\{\}\,<=!:]|\\d|\\D|\\w|\\W|\\s|\\S|\\t|\\r|\\n|\\v|\\f|\[\\b\]|\\\d|\\c[A-Z]|\\x(?:[0-9a-f]){2}|\\u(?:[0-9a-f]){4}|\\b|\\B))/g)).token(new CustomTokenizer(token => { let out = []; if(token.length > 2) { const value = token.replace(/\\\\/g, ""); token = token.substring(0, token.length - value.length); if (token) out.push({value: token, isToken: false}); out.push({value: value, isToken: true}); } else { out.push({value: token, isToken: true}); } return out; }));
the_stack
import {Mutable, Proto, Equals, FromAny} from "@swim/util"; import {Affinity} from "../fastener/Affinity"; import {FastenerContext} from "../fastener/FastenerContext"; import {FastenerOwner, FastenerInit, FastenerClass, Fastener} from "../fastener/Fastener"; import {StringProperty} from "./"; // forward import import {NumberProperty} from "./"; // forward import import {BooleanProperty} from "./"; // forward import /** @internal */ export type MemberPropertyValue<O, K extends keyof O> = O[K] extends Property<any, infer T> ? T : never; /** @internal */ export type MemberPropertyValueInit<O, K extends keyof O> = O[K] extends Property<any, any, infer U> ? U : never; /** @internal */ export type MemberPropertyInit<O, K extends keyof O> = O[K] extends Property<any, infer T, infer U> ? T | U : never; /** @internal */ export type MemberPropertyInitMap<O> = {-readonly [K in keyof O as O[K] extends Property ? K : never]?: MemberPropertyInit<O, K>}; /** @internal */ export type PropertyValue<P extends Property<any, any>> = P extends Property<any, infer T> ? T : never; /** @internal */ export type PropertyValueInit<P extends Property<any, any>> = P extends Property<any, infer T, infer U> ? T | U : never; /** @public */ export interface PropertyInit<T = unknown, U = T> extends FastenerInit { extends?: {prototype: Property<any, any>} | string | boolean | null; type?: unknown; value?: T | U; updateFlags?: number; willInherit?(superFastener: Property<unknown, T>): void; didInherit?(superFastener: Property<unknown, T>): void; willUninherit?(superFastener: Property<unknown, T>): void; didUninherit?(superFastener: Property<unknown, T>): void; willBindSuperFastener?(superFastener: Property<unknown, T>): void; didBindSuperFastener?(superFastener: Property<unknown, T>): void; willUnbindSuperFastener?(superFastener: Property<unknown, T>): void; didUnbindSuperFastener?(superFastener: Property<unknown, T>): void; transformSuperValue?(superValue: T): T; transformValue?(value: T): T; willSetValue?(newValue: T, oldValue: T): void; didSetValue?(newValue: T, oldValue: T): void; initValue?(): T | U; definedValue?(value: T): boolean; equalValues?(newValue: T, oldValue: T | undefined): boolean; fromAny?(value: T | U): T; } /** @public */ export type PropertyDescriptor<O = unknown, T = unknown, U = T, I = {}> = ThisType<Property<O, T, U> & I> & PropertyInit<T, U> & Partial<I>; /** @public */ export interface PropertyClass<P extends Property<any, any> = Property<any, any>> extends FastenerClass<P> { } /** @public */ export interface PropertyFactory<P extends Property<any, any> = Property<any, any>> extends PropertyClass<P> { extend<I = {}>(className: string, classMembers?: Partial<I> | null): PropertyFactory<P> & I; specialize(type: unknown): PropertyFactory | null; define<O, T, U = T>(className: string, descriptor: PropertyDescriptor<O, T, U>): PropertyFactory<Property<any, T, U>>; define<O, T, U = T, I = {}>(className: string, descriptor: {implements: unknown} & PropertyDescriptor<O, T, U, I>): PropertyFactory<Property<any, T, U> & I>; <O, T extends string | undefined = string | undefined, U extends string | undefined = string | undefined>(descriptor: {type: typeof String} & PropertyDescriptor<O, T, U>): PropertyDecorator; <O, T extends number | undefined = number | undefined, U extends number | string | undefined = number | string | undefined>(descriptor: {type: typeof Number} & PropertyDescriptor<O, T, U>): PropertyDecorator; <O, T extends boolean | undefined = boolean | undefined, U extends boolean | string | undefined = boolean | string | undefined>(descriptor: {type: typeof Boolean} & PropertyDescriptor<O, T, U>): PropertyDecorator; <O, T, U = T>(descriptor: ({type: FromAny<T, U>} | {fromAny(value: T | U): T}) & PropertyDescriptor<O, T, U>): PropertyDecorator; <O, T, U = T>(descriptor: PropertyDescriptor<O, T, U>): PropertyDecorator; <O, T, U = T, I = {}>(descriptor: {implements: unknown} & PropertyDescriptor<O, T, U, I>): PropertyDecorator; } /** @public */ export interface Property<O = unknown, T = unknown, U = T> extends Fastener<O> { (): T; (value: T | U, affinity?: Affinity): O; /** @override */ get fastenerType(): Proto<Property<any, any>>; /** @internal @override */ setInherited(inherited: boolean, superFastener: Property<unknown, T>): void; /** @protected @override */ willInherit(superFastener: Property<unknown, T>): void; /** @protected @override */ onInherit(superFastener: Property<unknown, T>): void; /** @protected @override */ didInherit(superFastener: Property<unknown, T>): void; /** @protected @override */ willUninherit(superFastener: Property<unknown, T>): void; /** @protected @override */ onUninherit(superFastener: Property<unknown, T>): void; /** @protected @override */ didUninherit(superFastener: Property<unknown, T>): void; /** @override */ readonly superFastener: Property<unknown, T> | null; /** @internal @override */ getSuperFastener(): Property<unknown, T> | null; /** @protected @override */ willBindSuperFastener(superFastener: Property<unknown, T>): void; /** @protected @override */ onBindSuperFastener(superFastener: Property<unknown, T>): void; /** @protected @override */ didBindSuperFastener(superFastener: Property<unknown, T>): void; /** @protected @override */ willUnbindSuperFastener(superFastener: Property<unknown, T>): void; /** @protected @override */ onUnbindSuperFastener(superFastener: Property<unknown, T>): void; /** @protected @override */ didUnbindSuperFastener(superFastener: Property<unknown, T>): void; /** @internal */ readonly subFasteners: ReadonlyArray<Property<unknown, T>> | null; /** @internal @override */ attachSubFastener(subFastener: Property<unknown, T>): void; /** @internal @override */ detachSubFastener(subFastener: Property<unknown, T>): void; get superValue(): T | undefined; getSuperValue(): NonNullable<T>; getSuperValueOr<E>(elseValue: E): NonNullable<T> | E; transformSuperValue(superValue: T): T; readonly value: T; getValue(): NonNullable<T>; getValueOr<E>(elseValue: E): NonNullable<T> | E; transformValue(value: T): T; setValue(newValue: T | U, affinity?: Affinity): void; /** @protected */ willSetValue(newValue: T, oldValue: T): void; /** @protected */ onSetValue(newValue: T, oldValue: T): void; /** @protected */ didSetValue(newValue: T, oldValue: T): void; /** @internal @protected */ decohereSubFasteners(): void; /** @internal @protected */ decohereSubFastener(subFastener: Property<unknown, T>): void; /** @override */ recohere(t: number): void; get updateFlags(): number | undefined; // optional prototype field /** @internal */ definedValue(value: T): boolean; /** @internal */ equalValues(newValue: T, oldValue: T | undefined): boolean; /** @internal */ fromAny(value: T | U): T; } /** @public */ export const Property = (function (_super: typeof Fastener) { const Property: PropertyFactory = _super.extend("Property"); Object.defineProperty(Property.prototype, "fastenerType", { get: function (this: Property): Proto<Property<any, any>> { return Property; }, configurable: true, }); Property.prototype.onInherit = function <T>(this: Property<unknown, T>, superFastener: Property<unknown, T>): void { const superValue = this.transformSuperValue(superFastener.value); this.setValue(superValue, Affinity.Reflexive); }; Property.prototype.onBindSuperFastener = function <T>(this: Property<unknown, T>, superFastener: Property<unknown, T>): void { (this as Mutable<typeof this>).superFastener = superFastener; _super.prototype.onBindSuperFastener.call(this, superFastener); }; Property.prototype.onUnbindSuperFastener = function <T>(this: Property<unknown, T>, superFastener: Property<unknown, T>): void { _super.prototype.onUnbindSuperFastener.call(this, superFastener); (this as Mutable<typeof this>).superFastener = null; }; Property.prototype.attachSubFastener = function <T>(this: Property<unknown, T>, subFastener: Property<unknown, T>): void { let subFasteners = this.subFasteners as Property<unknown, T>[] | null; if (subFasteners === null) { subFasteners = []; (this as Mutable<typeof this>).subFasteners = subFasteners; } subFasteners.push(subFastener); }; Property.prototype.detachSubFastener = function <T>(this: Property<unknown, T>, subFastener: Property<unknown, T>): void { const subFasteners = this.subFasteners as Property<unknown, T>[] | null; if (subFasteners !== null) { const index = subFasteners.indexOf(subFastener); if (index >= 0) { subFasteners.splice(index, 1); } } }; Object.defineProperty(Property.prototype, "superValue", { get: function <T>(this: Property<unknown, T>): T | undefined { const superFastener = this.superFastener; return superFastener !== null ? superFastener.value : void 0; }, configurable: true, }); Property.prototype.getSuperValue = function <T>(this: Property<unknown, T>): NonNullable<T> { const superValue = this.superValue; if (superValue === void 0 || superValue === null) { let message = superValue + " "; if (this.name.length !== 0) { message += this.name + " "; } message += "super value"; throw new TypeError(message); } return superValue as NonNullable<T>; }; Property.prototype.getSuperValueOr = function <T, E>(this: Property<unknown, T>, elseValue: E): NonNullable<T> | E { let superValue: T | E | undefined = this.superValue; if (superValue === void 0 || superValue === null) { superValue = elseValue; } return superValue as NonNullable<T> | E; }; Property.prototype.transformSuperValue = function <T>(this: Property<unknown, T>, superValue: T): T { return superValue; }; Property.prototype.getValue = function <T>(this: Property<unknown, T>): NonNullable<T> { const value = this.value; if (value === void 0 || value === null) { let message = value + " "; if (this.name.length !== 0) { message += this.name + " "; } message += "value"; throw new TypeError(message); } return value as NonNullable<T>; }; Property.prototype.getValueOr = function <T, E>(this: Property<unknown, T>, elseValue: E): NonNullable<T> | E { let value: T | E = this.value; if (value === void 0 || value === null) { value = elseValue; } return value as NonNullable<T> | E; }; Property.prototype.transformValue = function <T>(this: Property<unknown, T>, value: T): T { return value; }; Property.prototype.setValue = function <T, U>(this: Property<unknown, T, U>, newValue: T | U, affinity?: Affinity): void { if (affinity === void 0) { affinity = Affinity.Extrinsic; } if (this.minAffinity(affinity)) { newValue = this.fromAny(newValue); newValue = this.transformValue(newValue); const oldValue = this.value; if (!this.equalValues(newValue, oldValue)) { this.willSetValue(newValue, oldValue); (this as Mutable<typeof this>).value = newValue; this.onSetValue(newValue, oldValue); this.didSetValue(newValue, oldValue); this.setCoherent(true); this.decohereSubFasteners(); } } }; Property.prototype.willSetValue = function <T>(this: Property<unknown, T>, newValue: T, oldValue: T): void { // hook }; Property.prototype.onSetValue = function <T>(this: Property<unknown, T>, newValue: T, oldValue: T): void { const updateFlags = this.updateFlags; const fastenerContext = this.owner; if (updateFlags !== void 0 && FastenerContext.has(fastenerContext, "requireUpdate")) { fastenerContext.requireUpdate(updateFlags); } }; Property.prototype.didSetValue = function <T>(this: Property<unknown, T>, newValue: T, oldValue: T): void { // hook }; Property.prototype.decohereSubFasteners = function (this: Property): void { const subFasteners = this.subFasteners; for (let i = 0, n = subFasteners !== null ? subFasteners.length : 0; i < n; i += 1) { this.decohereSubFastener(subFasteners![i]!); } }; Property.prototype.decohereSubFastener = function (this: Property, subFastener: Property): void { if ((subFastener.flags & Fastener.InheritedFlag) === 0 && Math.min(this.flags & Affinity.Mask, Affinity.Intrinsic) >= (subFastener.flags & Affinity.Mask)) { subFastener.setInherited(true, this); } else if ((subFastener.flags & Fastener.InheritedFlag) !== 0 && (subFastener.flags & Fastener.DecoherentFlag) === 0) { subFastener.setCoherent(false); subFastener.decohere(); } }; Property.prototype.recohere = function (this: Property, t: number): void { if ((this.flags & Fastener.InheritedFlag) !== 0) { const superFastener = this.superFastener; if (superFastener !== null) { const superValue = this.transformSuperValue(superFastener.value); this.setValue(superValue, Affinity.Reflexive); } } }; Property.prototype.definedValue = function <T>(this: Property<unknown, T>, value: T): boolean { return value !== void 0 && value !== null; }; Property.prototype.equalValues = function <T>(this: Property<unknown, T>, newValue: T, oldValue: T | undefined): boolean { return Equals(newValue, oldValue); }; Property.prototype.fromAny = function <T, U>(this: Property<unknown, T, U>, value: T | U): T { return value as T; }; Property.construct = function <P extends Property<any, any>>(propertyClass: {prototype: P}, property: P | null, owner: FastenerOwner<P>): P { if (property === null) { property = function (value?: PropertyValue<P> | PropertyValueInit<P>, affinity?: Affinity): PropertyValue<P> | FastenerOwner<P> { if (arguments.length === 0) { return property!.value; } else { property!.setValue(value!, affinity); return property!.owner; } } as P; delete (property as Partial<Mutable<P>>).name; // don't clobber prototype name Object.setPrototypeOf(property, propertyClass.prototype); } property = _super.construct(propertyClass, property, owner) as P; Object.defineProperty(property, "superFastener", { // override getter value: null, writable: true, enumerable: true, configurable: true, }); (property as Mutable<typeof property>).subFasteners = null; (property as Mutable<typeof property>).value = void 0 as unknown as PropertyValue<P>; return property; }; Property.specialize = function (type: unknown): PropertyFactory | null { if (type === String) { return StringProperty; } else if (type === Number) { return NumberProperty; } else if (type === Boolean) { return BooleanProperty; } return null; }; Property.define = function <O, T, U>(className: string, descriptor: PropertyDescriptor<O, T, U>): PropertyFactory<Property<any, T, U>> { let superClass = descriptor.extends as PropertyFactory | null | undefined; const affinity = descriptor.affinity; const inherits = descriptor.inherits; const value = descriptor.value; const initValue = descriptor.initValue; delete descriptor.extends; delete descriptor.implements; delete descriptor.affinity; delete descriptor.inherits; delete descriptor.value; delete descriptor.initValue; if (superClass === void 0 || superClass === null) { superClass = this.specialize(descriptor.type); } if (superClass === null) { superClass = this; if (descriptor.fromAny === void 0 && FromAny.is<T, U>(descriptor.type)) { descriptor.fromAny = descriptor.type.fromAny; } } const propertyClass = superClass.extend(className, descriptor); propertyClass.construct = function (propertyClass: {prototype: Property<any, any>}, property: Property<O, T, U> | null, owner: O): Property<O, T, U> { property = superClass!.construct(propertyClass, property, owner); if (affinity !== void 0) { property.initAffinity(affinity); } if (inherits !== void 0) { property.initInherits(inherits); } if (initValue !== void 0) { (property as Mutable<typeof property>).value = property.fromAny(initValue()); } else if (value !== void 0) { (property as Mutable<typeof property>).value = property.fromAny(value); } return property; }; return propertyClass; }; return Property; })(Fastener);
the_stack
//@ts-check ///<reference path="devkit.d.ts" /> declare namespace DevKit { namespace FormEmailServerProfile_Information { interface tab_tab_3_Sections { _2EB17E5B_3A06_43BD_BB50_23F8630CD9F8_SECTION_1: DevKit.Controls.Section; _2EB17E5B_3A06_43BD_BB50_23F8630CD9F8_SECTION_2: DevKit.Controls.Section; _2EB17E5B_3A06_43BD_BB50_23F8630CD9F8_SECTION_3: DevKit.Controls.Section; } interface tab_tab_4_Sections { tab_4_section_1: DevKit.Controls.Section; } interface tab_tab_3 extends DevKit.Controls.ITab { Section: tab_tab_3_Sections; } interface tab_tab_4 extends DevKit.Controls.ITab { Section: tab_tab_4_Sections; } interface Tabs { tab_3: tab_tab_3; tab_4: tab_tab_4; } interface Body { Tab: Tabs; /** Type additional information that describes the email server profile. */ Description: DevKit.Controls.String; /** Email Server Type Name */ EmailServerTypeName: DevKit.Controls.String; /** Type the tenant ID of Exchange Online. */ ExchangeOnlineTenantId: DevKit.Controls.String; /** Select the incoming email authentication protocol that is used for connecting to the email server. */ IncomingAuthenticationProtocol: DevKit.Controls.OptionSet; /** Select how credentials will be retrieved for incoming email. */ IncomingCredentialRetrieval: DevKit.Controls.OptionSet; /** Type the password for incoming email. */ IncomingPassword: DevKit.Controls.String; /** Type the Exchange port number for incoming mail. */ IncomingPortNumber: DevKit.Controls.Integer; /** Type the location of the server for incoming email. */ IncomingServerLocation: DevKit.Controls.String; /** Select whether to use impersonation to access the mailbox to process incoming emails. */ IncomingUseImpersonation: DevKit.Controls.Boolean; /** Type the user name for incoming email. */ IncomingUserName: DevKit.Controls.String; /** Select whether to use the Secure Sockets Layer (SSL) protocol for incoming email. */ IncomingUseSSL: DevKit.Controls.Boolean; /** Maximum number of concurrent connections allowed to the email server per authenticated user. */ MaxConcurrentConnections: DevKit.Controls.Integer; /** Minimum polling interval, in minutes, for mailboxes that are associated with this email server profile. */ MinPollingIntervalInMinutes: DevKit.Controls.Integer; /** Indicates whether to move undelivered incoming emails to the Undeliverable folder in Microsoft Exchange. */ MoveUndeliveredEmails: DevKit.Controls.Boolean; /** Type a meaningful name for the email server profile. This name is displayed when you need to select an email server profile. */ Name: DevKit.Controls.String; notescontrol: DevKit.Controls.Note; /** ClientId used for OAuth athentication scheme */ OauthClientId: DevKit.Controls.String; /** Client secret used for the OAuth authentication scheme */ OauthClientSecret: DevKit.Controls.String; /** Select the outgoing email authentication protocol that is used for connecting to the email server. */ OutgoingAuthenticationProtocol: DevKit.Controls.OptionSet; /** Select how credentials will be retrieved for outgoing email. */ OutgoingCredentialRetrieval: DevKit.Controls.OptionSet; /** Type the password for outgoing email. */ OutgoingPassword: DevKit.Controls.String; /** Type the Exchange port number for outgoing mail. */ OutgoingPortNumber: DevKit.Controls.Integer; /** Type the location of the server for outgoing email. */ OutgoingServerLocation: DevKit.Controls.String; /** Select whether to use impersonation for accessing the mailbox to process outgoing emails. */ OutgoingUseImpersonation: DevKit.Controls.Boolean; /** Type the user name for outgoing email. */ OutgoingUsername: DevKit.Controls.String; /** Select whether to use the Secure Sockets Layer (SSL) protocol for outgoing email. */ OutgoingUseSSL: DevKit.Controls.Boolean; /** Enter the user or team who is assigned to manage the record. This field is updated every time the record is assigned to a different user. */ OwnerId: DevKit.Controls.Lookup; /** Shows the date and time after which email messages that are received will be processed for mailboxes associated with the email server profile. */ ProcessEmailsReceivedAfter: DevKit.Controls.DateTime; /** Select whether to send an email alert if more than 50% of the mailboxes in this email server profile failed to synchronize in an hour period. */ SendEmailAlert: DevKit.Controls.Boolean; /** Select the profile's email server type. */ ServerType: DevKit.Controls.OptionSet; /** Select whether to timeout a single mailbox. */ TimeoutMailboxConnection: DevKit.Controls.Boolean; /** Type the number of milliseconds to timeout a single mailbox. The upper limit is 100 seconds. */ TimeoutMailboxConnectionAfterAmount: DevKit.Controls.Integer; /** Select whether to automatically discover the server location */ UseAutoDiscover: DevKit.Controls.Boolean; /** Select whether to use the Exchange Online Tenant ID obtained from running Microsoft Azure PowerShell cmdlets (highly recommended). If you select No, you can edit this field manually */ UseDefaultTenantId: DevKit.Controls.Boolean; /** Select whether to use the same settings for incoming and outgoing connections. */ UseSameSettingsForOutgoingConnections: DevKit.Controls.Boolean; } interface Footer extends DevKit.Controls.IFooter { /** Shows whether the email server profile is active or inactive. */ StateCode: DevKit.Controls.OptionSet; } } class FormEmailServerProfile_Information extends DevKit.IForm { /** * DynamicsCrm.DevKit form EmailServerProfile_Information * @param executionContext the execution context * @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource" */ constructor(executionContext: any, defaultWebResourceName?: string); /** Utility functions/methods/objects for Dynamics 365 form */ Utility: DevKit.Utility; /** The Body section of form EmailServerProfile_Information */ Body: DevKit.FormEmailServerProfile_Information.Body; /** The Footer section of form EmailServerProfile_Information */ Footer: DevKit.FormEmailServerProfile_Information.Footer; } class EmailServerProfileApi { /** * DynamicsCrm.DevKit EmailServerProfileApi * @param entity The entity object */ constructor(entity?: any); /** * Get the value of alias * @param alias the alias value * @param isMultiOptionSet true if the alias is multi OptionSet */ getAliasedValue(alias: string, isMultiOptionSet?: boolean): any; /** * Get the formatted value of alias * @param alias the alias value * @param isMultiOptionSet true if the alias is multi OptionSet */ getAliasedFormattedValue(alias: string, isMultiOptionSet?: boolean): string; /** The entity object */ Entity: any; /** The entity name */ EntityName: string; /** The entity collection name */ EntityCollectionName: string; /** The @odata.etag is then used to build a cache of the response that is dependant on the fields that are retrieved */ "@odata.etag": string; /** AAD ResourceId used for OAuth athentication scheme */ AadResourceId: DevKit.WebApi.StringValue; /** Shows who created the record. */ CreatedBy: DevKit.WebApi.LookupValueReadonly; /** Shows the date and time when the record was created. The date and time are displayed in the time zone selected in Microsoft Dynamics 365 options. */ CreatedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Shows who created the record on behalf of another user. */ CreatedOnBehalfBy: DevKit.WebApi.LookupValueReadonly; /** Type the default location of the server. */ DefaultServerLocation: DevKit.WebApi.StringValue; /** Type additional information that describes the email server profile. */ Description: DevKit.WebApi.StringValue; /** Unique identifier of the email server profile. */ EmailServerProfileId: DevKit.WebApi.GuidValue; /** Email Server Type Name */ EmailServerTypeName: DevKit.WebApi.StringValueReadonly; /** Indicates the code page to use when encoding email content. */ EncodingCodePage: DevKit.WebApi.StringValue; /** The default image for the entity. */ EntityImage: DevKit.WebApi.StringValue; EntityImage_Timestamp: DevKit.WebApi.BigIntValueReadonly; EntityImage_URL: DevKit.WebApi.StringValueReadonly; /** For internal use only. */ EntityImageId: DevKit.WebApi.GuidValueReadonly; /** Type the tenant ID of Exchange Online. */ ExchangeOnlineTenantId: DevKit.WebApi.StringValue; /** Select the version of Exchange that is on the email server. */ ExchangeVersion: DevKit.WebApi.OptionSetValue; /** Select the incoming email authentication protocol that is used for connecting to the email server. */ IncomingAuthenticationProtocol: DevKit.WebApi.OptionSetValue; /** Select how credentials will be retrieved for incoming email. */ IncomingCredentialRetrieval: DevKit.WebApi.OptionSetValue; /** Indicates the incoming partner application. */ IncomingPartnerApplication: DevKit.WebApi.LookupValueReadonly; /** Type the password for incoming email. */ IncomingPassword: DevKit.WebApi.StringValue; /** Type the Exchange port number for incoming mail. */ IncomingPortNumber: DevKit.WebApi.IntegerValue; /** Type the location of the server for incoming email. */ IncomingServerLocation: DevKit.WebApi.StringValue; /** Select whether to use impersonation to access the mailbox to process incoming emails. */ IncomingUseImpersonation: DevKit.WebApi.BooleanValue; /** Type the user name for incoming email. */ IncomingUserName: DevKit.WebApi.StringValue; /** Select whether to use the Secure Sockets Layer (SSL) protocol for incoming email. */ IncomingUseSSL: DevKit.WebApi.BooleanValue; IsIncomingPasswordSet: DevKit.WebApi.BooleanValueReadonly; IsOauthClientSecretSet: DevKit.WebApi.BooleanValueReadonly; IsOutgoingPasswordSet: DevKit.WebApi.BooleanValueReadonly; /** The Azure Key Vault reference id */ keyvaultreferenceid: DevKit.WebApi.LookupValue; /** Shows the last test authorization status of email server profile */ LastAuthorizationStatus: DevKit.WebApi.OptionSetValue; /** Shows the Dynamics 365 message obtained during the Last Test */ LastCrmMessage: DevKit.WebApi.StringValue; /** Shows the last test Execution status of email server profile */ LastTestExecutionStatus: DevKit.WebApi.OptionSetValue; /** Shows the EWS Request created during the Last Test */ LastTestRequest: DevKit.WebApi.StringValue; /** Shows the EWS Response obtained during the Last Test */ LastTestResponse: DevKit.WebApi.StringValue; /** Shows the Last Test Start date and time */ LastTestStartTime_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Shows the Time taken while running the last test */ LastTestTotalExecutionTime: DevKit.WebApi.BigIntValue; /** Shows the last test Validation status of email server profile */ LastTestValidationStatus: DevKit.WebApi.OptionSetValue; /** The managed identity id */ managedidentityid: DevKit.WebApi.LookupValue; /** Maximum number of concurrent connections allowed to the email server per authenticated user. */ MaxConcurrentConnections: DevKit.WebApi.IntegerValue; /** Minimum polling interval, in minutes, for mailboxes that are associated with this email server profile. */ MinPollingIntervalInMinutes: DevKit.WebApi.IntegerValue; /** Shows who last updated the record. */ ModifiedBy: DevKit.WebApi.LookupValueReadonly; /** Shows the date and time when the record was last updated. The date and time are displayed in the time zone selected in Microsoft Dynamics 365 options. */ ModifiedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Shows who last updated the record on behalf of another user. */ ModifiedOnBehalfBy: DevKit.WebApi.LookupValueReadonly; /** Indicates whether to move undelivered incoming emails to the Undeliverable folder in Microsoft Exchange. */ MoveUndeliveredEmails: DevKit.WebApi.BooleanValue; /** Type a meaningful name for the email server profile. This name is displayed when you need to select an email server profile. */ Name: DevKit.WebApi.StringValue; /** ClientId used for OAuth athentication scheme */ OauthClientId: DevKit.WebApi.StringValue; /** Client secret used for the OAuth authentication scheme */ OauthClientSecret: DevKit.WebApi.StringValue; /** Unique identifier of the organization associated with the record. */ OrganizationId: DevKit.WebApi.LookupValueReadonly; /** Select the outgoing email authentication protocol that is used for connecting to the email server. */ OutgoingAuthenticationProtocol: DevKit.WebApi.OptionSetValue; /** Indicates whether the email connector will grant delegate access permissions to the accessing user when required while processing outgoing emails. */ OutgoingAutoGrantDelegateAccess: DevKit.WebApi.BooleanValue; /** Select how credentials will be retrieved for outgoing email. */ OutgoingCredentialRetrieval: DevKit.WebApi.OptionSetValue; /** Indicates the outgoing partner application. */ OutgoingPartnerApplication: DevKit.WebApi.LookupValueReadonly; /** Type the password for outgoing email. */ OutgoingPassword: DevKit.WebApi.StringValue; /** Type the Exchange port number for outgoing mail. */ OutgoingPortNumber: DevKit.WebApi.IntegerValue; /** Type the location of the server for outgoing email. */ OutgoingServerLocation: DevKit.WebApi.StringValue; /** Select whether to use impersonation for accessing the mailbox to process outgoing emails. */ OutgoingUseImpersonation: DevKit.WebApi.BooleanValue; /** Type the user name for outgoing email. */ OutgoingUsername: DevKit.WebApi.StringValue; /** Select whether to use the Secure Sockets Layer (SSL) protocol for outgoing email. */ OutgoingUseSSL: DevKit.WebApi.BooleanValue; /** Email Server Profile Owner's email address */ OwnerEmailAddress: DevKit.WebApi.StringValue; /** Enter the user who is assigned to manage the record. This field is updated every time the record is assigned to a different user */ OwnerId_systemuser: DevKit.WebApi.LookupValue; /** Enter the team who is assigned to manage the record. This field is updated every time the record is assigned to a different team */ OwnerId_team: DevKit.WebApi.LookupValue; /** Select the business unit that owns the record. */ OwningBusinessUnit: DevKit.WebApi.LookupValueReadonly; /** Unique identifier for the team that owns the record. */ OwningTeam: DevKit.WebApi.LookupValueReadonly; /** Unique identifier for the user that owns the record. */ OwningUser: DevKit.WebApi.LookupValueReadonly; /** Shows the date and time after which email messages that are received will be processed for mailboxes associated with the email server profile. */ ProcessEmailsReceivedAfter_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Select whether to send an email alert if more than 50% of the mailboxes in this email server profile failed to synchronize in an hour period. */ SendEmailAlert: DevKit.WebApi.BooleanValue; /** Select the profile's email server type. */ ServerType: DevKit.WebApi.OptionSetValue; /** Shows whether the email server profile is active or inactive. */ StateCode: DevKit.WebApi.OptionSetValue; /** Select the email server profile's status. */ StatusCode: DevKit.WebApi.OptionSetValue; /** Select whether to timeout a single mailbox. */ TimeoutMailboxConnection: DevKit.WebApi.BooleanValue; /** Type the number of milliseconds to timeout a single mailbox. The upper limit is 100 seconds. */ TimeoutMailboxConnectionAfterAmount: DevKit.WebApi.IntegerValue; /** For internal use only. */ TimeZoneRuleVersionNumber: DevKit.WebApi.IntegerValue; /** Select whether to automatically discover the server location */ UseAutoDiscover: DevKit.WebApi.BooleanValue; /** Select whether to use the Exchange Online Tenant ID obtained from running Microsoft Azure PowerShell cmdlets (highly recommended). If you select No, you can edit this field manually */ UseDefaultTenantId: DevKit.WebApi.BooleanValue; /** Select whether to use the same settings for incoming and outgoing connections. */ UseSameSettingsForOutgoingConnections: DevKit.WebApi.BooleanValue; /** Time zone code that was in use when the record was created. */ UTCConversionTimeZoneCode: DevKit.WebApi.IntegerValue; /** Version number of the email server profile. */ VersionNumber: DevKit.WebApi.BigIntValueReadonly; } } declare namespace OptionSet { namespace EmailServerProfile { enum ExchangeVersion { /** 0 */ Exchange_2007, /** 1 */ Exchange_2007_SP1, /** 2 */ Exchange_2010, /** 3 */ Exchange_2010_SP1, /** 4 */ Exchange_2010_SP2, /** 5 */ Exchange_2013 } enum IncomingAuthenticationProtocol { /** 0 */ Auto_Detect, /** 3 */ Basic, /** 1 */ Negotiate, /** 2 */ NTLM, /** 4 */ OAuth } enum IncomingCredentialRetrieval { /** 7 */ Azure_Active_Directory_OAuth, /** 0 */ Credentials_Specified_by_a_User_or_Queue, /** 1 */ Credentials_Specified_in_Email_Server_Profile, /** 6 */ Exchange_Hybrid_Modern_Auth_HMA, /** 5 */ Gmail_OAuth, /** 2 */ Server_to_Server_Authentication, /** 3 */ Windows_Integrated_Authentication, /** 4 */ Without_Credentials_Anonymous } enum LastAuthorizationStatus { /** 0 */ Failure, /** 1 */ Success } enum LastTestExecutionStatus { /** 0 */ Failure, /** 1 */ Success, /** 2 */ Warning } enum LastTestValidationStatus { /** 0 */ Failure, /** 1 */ Success } enum OutgoingAuthenticationProtocol { /** 0 */ Auto_Detect, /** 3 */ Basic, /** 1 */ Negotiate, /** 2 */ NTLM, /** 4 */ OAuth } enum OutgoingCredentialRetrieval { /** 7 */ Azure_Active_Directory_OAuth, /** 0 */ Credentials_Specified_by_a_User_or_Queue, /** 1 */ Credentials_Specified_in_Email_Server_Profile, /** 6 */ Exchange_Hybrid_Modern_Auth_HMA, /** 5 */ Gmail_OAuth, /** 2 */ Server_to_Server_Authentication, /** 3 */ Windows_Integrated_Authentication, /** 4 */ Without_Credentials_Anonymous } enum ServerType { /** 3 */ Exchange_Online_Hybrid, /** 0 */ Exchange_Server, /** 2 */ Exchange_Server_Hybrid, /** 4 */ IMAPSMTP, /** 1 */ Other_POP3SMTP } enum StateCode { /** 0 */ Active, /** 1 */ Inactive } enum StatusCode { /** 1 */ Active, /** 2 */ Inactive } enum RollupState { /** 0 - Attribute value is yet to be calculated */ NotCalculated, /** 1 - Attribute value has been calculated per the last update time in <AttributeSchemaName>_Date attribute */ Calculated, /** 2 - Attribute value calculation lead to overflow error */ OverflowError, /** 3 - Attribute value calculation failed due to an internal error, next run of calculation job will likely fix it */ OtherError, /** 4 - Attribute value calculation failed because the maximum number of retry attempts to calculate the value were exceeded likely due to high number of concurrency and locking conflicts */ RetryLimitExceeded, /** 5 - Attribute value calculation failed because maximum hierarchy depth limit for calculation was reached */ HierarchicalRecursionLimitReached, /** 6 - Attribute value calculation failed because a recursive loop was detected in the hierarchy of the record */ LoopDetected } } } //{'JsForm':['Information'],'JsWebApi':true,'IsDebugForm':true,'IsDebugWebApi':true,'Version':'2.12.31','JsFormVersion':'v2'}
the_stack
import SObject from '../Core/SObject'; import {SClass} from '../Core/Decorator'; import Observable from '../Core/Observable'; import {IResourceEntity, IResourceState, IInstantOptions} from '../types/Resource'; import ResourceLoader from '../Resource/ResourceLoader'; import {TConstructor} from '../types/Common'; import MemberConflictException from '../Exception/MemberConflictException'; import MissingMemberException from '../Exception/MissingMemberException'; import Game from '../Core/Game'; import StateActor from '../Info/StateActor'; import Debug from '../Debug'; import ResourceLoadException from '../Exception/ResourceLoadException'; import GlTFLoader from './GlTFLoader'; /** * @hidden */ function getExt(url: string) { return '.' + url.split('.').pop(); } /** * 判断一个实例是否为`ResourceManager`。 */ export function isResourceManager(value: SObject): value is ResourceManager { return (value as ResourceManager).isResourceManager; } /** * 资源管理器类。作为资源的集中管理容器,承担着引擎所有的资源加载器的注册、销毁,以及资源的添加、加载和释放。 * * @template IDefaultLoaders 用于标注所有资源的名称以及对应的事件参数类型。 * @noInheritDoc */ @SClass({className: 'ResourceManager'}) export default class ResourceManager<IDefaultLoaders extends {[type: string]: any} = {}> extends SObject { public isResourceManager = true; protected _onError = new Observable<{error: Error, state: IResourceState}>(this); protected _onLoading = new Observable<IResourceState>(this); protected _onLoaded = new Observable<IResourceState>(this); protected _loaders: {[type: string]: ResourceLoader} = {}; protected _loadersFormat: {[format: string]: string} = {}; protected _queue: {[name: string]: {entity: IResourceEntity, pending: Promise<any>}} = {}; protected _store: {[name: string]: IResourceEntity} = {}; protected _state: IResourceState = { totalCount: 0, loadedCount: 0, progress: 0, totalWeight: 0, loadDone: true, current: null }; protected _game: Game; /** * @hidden */ constructor(game: Game) { super(); this._game = game; } /** * 此批资源是否加载完毕。 */ get loadDone() { return this._state.loadDone; } /** * 获取父级Game实例。 */ get parent() { return this._game; } /** * 此批资源加载错误时的可观察实例。 */ get onError() { return this._onError; } /** * 此批资源加载进度更新时的可观察实例。 */ get onLoading() { return this._onLoading; } /** * 此批资源加载完毕时的可观察实例。 */ get onLoaded() { return this._onLoaded; } /** * 获取一个指定的Loader。 */ public getLoader<TKey extends keyof IDefaultLoaders>(type: TKey): IDefaultLoaders[TKey]['loader']; public getLoader<TLoader extends ResourceLoader = ResourceLoader>(type: string): TLoader; public getLoader(type: string): ResourceLoader { return this._loaders[type]; } /** * 获取一个指定格式的Loader。 * * **指定格式需要在`ResourceLoader.EXTENSIONS`静态变量中定义!** */ public getLoaderByFormat<TLoader extends ResourceLoader = ResourceLoader>(format: string): TLoader { return this.getLoader<TLoader>(this._loadersFormat[format]); } /** * 指定资源类型`type`和加载器`LoaderClass`,注册一个Loader。 * 关于加载器,请见[ResourceLoader](../resourceloader)。 */ public register<TKey extends keyof IDefaultLoaders>( type: TKey, LoaderClass?: TConstructor<ResourceLoader<IDefaultLoaders[TKey]['entity']>> ): this; public register<IResource extends IResourceEntity = IResourceEntity>( type: string, LoaderClass?: TConstructor<ResourceLoader<IResource>> ): this; public register( type: string, LoaderClass?: TConstructor<ResourceLoader<IResourceEntity>> ): this { if (this._loaders[type]) { throw new MemberConflictException(this, 'Loader', type, this, 'You should unregister it at first !'); } const loader = new LoaderClass(); loader.game = this._game; this._loaders[type] = loader; ((LoaderClass as any).FORMATS || []).forEach((format: string) => { this._loadersFormat[format] = type; }); return this; } /** * 卸载一个Loader。 */ public unregister<TKey extends keyof IDefaultLoaders>(type: TKey): this; public unregister(type: string): this; public unregister(type: string): this { if (!this._loaders[type]) { return; } ((this._loaders[type].constructor as any) || []).FORMATS.forEach((format: string) => { delete this._loadersFormat[format]; }); delete this._loaders[type]; return this; } /** * 直接添加一个加载过的、或构造好的资源到资源管理器中。 */ public add<TKey extends keyof IDefaultLoaders>( type: TKey, name: string, resource: IDefaultLoaders[TKey]['entity']['result'] ): this; public add<IResource extends IResourceEntity['result'] = IResourceEntity['result']>( type: string, name: string, resource: IResource ): this; public add(type: string, name: string, resource: IResourceEntity['result']): this { if (!this._loaders[type]) { throw new MissingMemberException(this, 'Loader', type, this, 'Register it before adding resource !'); } if (this._store[name]) { return; } this._store[name] = {name, result: resource, type, url: ''}; return this; } /** * 判断一个资源是否已经存在。 */ public has(name: string): boolean { return !!this._store[name]; } /** * 获取一个指定的资源实例。 */ public get<TKey extends keyof IDefaultLoaders>(name: string): IDefaultLoaders[TKey]['entity']['result']; public get<IResource extends IResourceEntity = IResourceEntity>(name: string): IResource['result']; public get(name: string) { if (!this._store[name]) { Debug.warn(`Resource ${name} is not existed !`); return null; } return this._store[name].result; } /** * 释放一个指定的资源。 */ public release(name: string): this { const entity = this._store[name]; if (!entity) { throw new MissingMemberException(this, 'Entity', name, this); } this._loaders[entity.type].release(entity); delete this._store[name]; return this; } /** * 清除所有资源。 */ public clear(): this { for (const name in this._store) { this.release(name); } return this; } /** * 取消特定资源加载。 */ public cancel(name: string): this { if (!this._queue[name]) { return; } const {entity} = this._queue[name]; delete this._queue[name]; this._loaders[entity.type].cancel(entity); this._state.loadedCount += 1; if (this._state.loadedCount === this._state.totalCount) { this.handleLoadDone(); } return this; } /** * 取消当前所有资源加载。 */ public cancelAll(): this { for (const key in this._queue) { this.cancel(key); } return this; } /** * 指定`type`、`name`和`url`等,加载一个资源,实际上会代理到特定加载器的`load`方法。 * 此方法很灵活,其返回一个`Promise`,让你可以在资源加载完成时直接取得。 * 也可以配合`LevelScriptActor`的`onPreload`和`onLoading`声明周期,实现关卡资源的批量预加载。 */ public async load<TKey extends keyof IDefaultLoaders>( entity: IDefaultLoaders[TKey]['entity'] ): Promise<IDefaultLoaders[TKey]['entity']['result']>; public async load<IResource extends IResourceEntity = IResourceEntity>( entity: IResourceEntity ): Promise<IResource['result']>; public async load(entity: IResourceEntity): Promise<IResourceEntity['result']> { if (typeof entity.url === 'function') { entity.url = (entity as any).url(this._game); } let type = entity.type || this._loadersFormat[getExt(entity.url)]; if (!this._loaders[type]) { throw new MissingMemberException(this, 'Loader', type, this, 'Register it before adding resource !'); } if (this._store[entity.name]) { return this._store[entity.name]; } if (this._queue[entity.name]) { return this._queue[entity.name]; } const loader = this._loaders[type]; entity.type = type; entity.weight = entity.weight || 1; entity.preProgress = 0; entity.canceled = false; this._state.totalCount += 1; this._state.totalWeight += entity.weight; this._state.loadDone = false; const pending = new Promise((resolve, reject) => { loader.load(entity, { onLoading: this.handleLoadingOne, onLoaded: () => { // resource has been canceled if (!this._queue[entity.name]) { return; } this.handleLoadedOne(entity); resolve(entity.result); }, onError: (_, error) => { // resource has been canceled if (!this._queue[entity.name]) { return; } const {stack} = error; error = new ResourceLoadException(entity.name, this, error.message); error.stack = stack; this.handleLoadedOne(entity, error); reject(error); } }); }); this._queue[entity.name] = {pending, entity}; return this._queue[entity.name].pending; } /** * 指定资源名和配置,通过资源实例化一个对象。 * 比如你可以指定一个`GlTF`资源,将其实例化为一个具体的`SceneActor`或者`SceneComponent`。 */ public instantiate<TKey extends keyof IDefaultLoaders>( resourceName: string, options?: IDefaultLoaders[TKey]['instantOptions'] ): IDefaultLoaders[TKey]['instantResult']; public instantiate( resourceName: string, options?: IInstantOptions ): any { const entity = this._store[resourceName]; if (!entity) { throw new MissingMemberException(this, 'Entity', resourceName, this); } return this._loaders[entity.type].instantiate(entity, options || {}); } private handleLoadingOne = (entity: IResourceEntity, progress: number) => { // resource has been canceled if (!this._queue[entity.name]) { return; } const {weight, preProgress} = entity; entity.preProgress = progress; this._state.current = entity; this._state.progress += (weight * (progress - preProgress) / this._state.totalWeight); this.onLoading.notify(Object.assign({}, this._state)); } private handleLoadedOne = (entity: IResourceEntity, error: Error = null) => { const {weight, preProgress} = entity; const progress = 1; entity.preProgress = progress; this._state.current = entity; this._state.progress += (weight * (progress - preProgress) / this._state.totalWeight); this._state.loadedCount += 1; delete this._queue[entity.name]; if (!error) { this._store[entity.name] = entity; } else { this.onError.notify({error, state: Object.assign({}, this._state)}); } this.onLoading.notify(Object.assign(error ? {error} : {}, this._state)); if (this._state.loadedCount === this._state.totalCount) { this.handleLoadDone(); } } private handleLoadDone() { (GlTFLoader as any).clearCache(); this._state.loadDone = true; this._state.progress = 1; const state = Object.assign({}, this._state); this._state.totalCount = 0; this._state.loadedCount = 0; this._state.totalWeight = 0; this._state.progress = 0; this._state.current = null; this.onLoaded.notify(state); } /** * 销毁,继承请先`super.onDestroy()`。 */ public onDestroy() { if (!this._state.loadDone) { this.cancelAll(); } this.clear(); this._loaders = {}; } }
the_stack
export default { defaultToken: '', ignoreCase: true, brackets: [ { open: '[', close: ']', token: 'delimiter.square' }, { open: '(', close: ')', token: 'delimiter.parenthesis' } ], keywords: [ 'ACCOUNT', 'ALL', 'ALTER', 'AND', 'ANY', 'AS', 'BETWEEN', 'BY', 'CASE', 'CAST', 'CHECK', 'COLUMN', 'CONNECT', 'CONNECTION', 'CONSTRAINT', 'CREATE', 'CROSS', 'CURRENT', 'CURRENT_DATE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER', 'DATABASE', 'DELETE', 'DISTINCT', 'DROP', 'ELSE', 'EXISTS', 'FALSE', 'FOLLOWING', 'FOR', 'FROM', 'FULL', 'GRANT', 'GROUP', 'GSCLUSTER', 'HAVING', 'ILIKE', 'IN', 'INCREMENT', 'INNER', 'INSERT', 'INTERSECT', 'INTO', 'IS', 'ISSUE', 'JOIN', 'LATERAL', 'LEFT', 'LIKE', 'LOCALTIME', 'LOCALTIMESTAMP', 'MINUS', 'NATURAL', 'NOT', 'NULL', 'OF', 'ON', 'OR', 'ORDER', 'ORGANIZATION', 'QUALIFY', 'REGEXP', 'REVOKE', 'RIGHT', 'RLIKE', 'ROW', 'ROWS', 'SAMPLE', 'SCHEMA', 'SELECT', 'SET', 'SOME', 'START', 'TABLE', 'TABLESAMPLE', 'THEN', 'TO', 'TRIGGER', 'TRUE', 'TRY_CAST', 'UNION', 'UNIQUE', 'UPDATE', 'USING', 'VALUES', 'VIEW', 'WHEN', 'WHENEVER', 'WHERE', 'WITH' ], operators: [ 'AND', 'NOT', 'OR', 'INTERSECT', 'MINUS', 'EXCEPT', 'UNION', 'EXISTS', 'NOT EXISTS', 'ANY', 'ALL', 'IN', 'NOT IN' ], builtinFunctions: [ 'ABS', 'ACOS', 'ACOSH', 'ADD_MONTHS', 'ALL_USER_NAMES', 'ANY_VALUE', 'APPROX_COUNT_DISTINCT', 'APPROX_PERCENTILE', 'APPROX_PERCENTILE_ACCUMULATE', 'APPROX_PERCENTILE_COMBINE', 'APPROX_PERCENTILE_ESTIMATE', 'APPROX_TOP_K', 'APPROX_TOP_K_ACCUMULATE', 'APPROX_TOP_K_COMBINE', 'APPROX_TOP_K_ESTIMATE', 'APPROXIMATE_JACCARD_INDEX', 'APPROXIMATE_SIMILARITY', 'ARRAY_AGG', 'ARRAY_APPEND', 'ARRAY_CAT', 'ARRAY_COMPACT', 'ARRAY_CONSTRUCT', 'ARRAY_CONSTRUCT_COMPACT', 'ARRAY_CONTAINS', 'ARRAY_INSERT', 'ARRAY_INTERSECTION', 'ARRAY_POSITION', 'ARRAY_PREPEND', 'ARRAY_SIZE', 'ARRAY_SLICE', 'ARRAY_TO_STRING', 'ARRAYS_OVERLAP', 'AS_ARRAY', 'AS_BINARY', 'AS_BOOLEAN', 'AS_CHAR', 'AS_VARCHAR', 'AS_DATE', 'AS_DECIMAL', 'AS_NUMBER', 'AS_DOUBLE', 'AS_REAL', 'AS_INTEGER', 'AS_OBJECT', 'AS_TIME', 'ASCII', 'ASIN', 'ASINH', 'ATAN', 'ATAN2', 'ATANH', 'AUTOMATIC_CLUSTERING_HISTORY', 'AVG', 'BASE64_DECODE_BINARY', 'BASE64_DECODE_STRING', 'BASE64_ENCODE', 'BETWEEN', 'NOT BETWEEN', 'BIT_LENGTH', 'BITAND', 'BITAND_AGG', 'BITNOT', 'BITOR', 'BITOR_AGG', 'BITSHIFTLEFT', 'BITSHIFTRIGHT', 'BITXOR', 'BITXOR_AGG', 'BOOLAND', 'BOOLAND_AGG', 'BOOLNOT', 'BOOLOR', 'BOOLOR_AGG', 'BOOLXOR', 'BOOLXOR_AGG', 'CASE', 'CAST', 'CBRT', 'CEIL', 'CHARINDEX', 'CHECK_JSON', 'CHECK_XML', 'CHR', 'CHAR', 'COALESCE', 'COLLATE', 'COLLATION', 'COMPRESS', 'CONCAT', 'CONCAT_WS', 'CONDITIONAL_CHANGE_EVENT', 'CONDITIONAL_TRUE_EVENT', 'CONTAINS', 'CONVERT_TIMEZONE', 'COPY_HISTORY', 'CORR', 'COS', 'COSH', 'COT', 'COUNT', 'COUNT_IF', 'COVAR_POP', 'COVAR_SAMP', 'CUME_DIST', 'CURRENT_ACCOUNT', 'CURRENT_AVAILABLE_ROLES', 'CURRENT_CLIENT', 'CURRENT_DATABASE', 'CURRENT_DATE', 'CURRENT_IP_ADDRESS', 'CURRENT_REGION', 'CURRENT_ROLE', 'CURRENT_SCHEMA', 'CURRENT_SCHEMAS', 'CURRENT_SESSION', 'CURRENT_STATEMENT', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_TRANSACTION', 'CURRENT_USER', 'CURRENT_VERSION', 'CURRENT_WAREHOUSE', 'DATA_TRANSFER_HISTORY', 'DATABASE_REFRESH_HISTORY', 'DATABASE_REFRESH_PROGRESS', 'DATABASE_REFRESH_PROGRESS_BY_JOB', 'DATABASE_STORAGE_USAGE_HISTORY', 'DATE_FROM_PARTS', 'DATE_PART', 'DATE_TRUNC', 'DATEADD', 'DATEDIFF', 'DAYNAME', 'DECODE', 'DECOMPRESS_BINARY', 'DECOMPRESS_STRING', 'DECRYPT', 'DECRYPT_RAW', 'DEGREES', 'DENSE_RANK', 'DIV0', 'EDITDISTANCE', 'ENCRYPT', 'ENCRYPT_RAW', 'ENDSWITH', 'EQUAL_NULL', 'EXP', 'EXPLAIN_JSON', 'EXTERNAL_FUNCTIONS_HISTORY', 'EXTERNAL_TABLE_FILES', 'EXTERNAL_TABLE_FILE_REGISTRATION_HISTORY', 'EXTRACT', 'FACTORIAL', 'FIRST_VALUE', 'FLATTEN', 'FLOOR', 'GENERATE_COLUMN_DESCRIPTION', 'GENERATOR', 'GET', 'GET_DDL', 'GET_IGNORE_CASE', 'GET_OBJECT_REFERENCES', 'GET_PATH', 'GETBIT', 'GREATEST', 'GROUPING', 'GROUPING_ID', 'HASH', 'HASH_AGG', 'HAVERSINE', 'HEX_DECODE_BINARY', 'HEX_DECODE_STRING', 'HEX_ENCODE', 'HLL', 'HLL_ACCUMULATE', 'HLL_COMBINE', 'HLL_ESTIMATE', 'HLL_EXPORT', 'HLL_IMPORT', 'HOUR', 'MINUTE', 'SECOND', 'IFF', 'IFNULL', 'ILIKE', 'ILIKE ANY', 'IN', 'NOT IN', 'INFER_SCHEMA', 'INITCAP', 'INSERT', 'INVOKER_ROLE', 'INVOKER_SHARE', 'IS DISTINCT FROM', 'IS NOT DISTINCT FROM', 'IS NULL', 'IS NOT NULL', 'IS_ARRAY', 'IS_BINARY', 'IS_BOOLEAN', 'IS_CHAR', 'IS_VARCHAR', 'IS_DATE', 'IS_DATE_VALUE', 'IS_DECIMAL', 'IS_DOUBLE', 'IS_REAL', 'IS_GRANTED_TO_INVOKER_ROLE', 'IS_INTEGER', 'IS_NULL_VALUE', 'IS_OBJECT', 'IS_ROLE_IN_SESSION', 'IS_TIME', 'JSON_EXTRACT_PATH_TEXT', 'KURTOSIS', 'LAG', 'LAST_DAY', 'LAST_QUERY_ID', 'LAST_TRANSACTION', 'LAST_VALUE', 'LEAD', 'LEAST', 'LEFT', 'LENGTH', 'LEN', 'LIKE', 'LIKE ALL', 'LIKE ANY', 'LISTAGG', 'LN', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOG', 'LOGIN_HISTORY', 'LOGIN_HISTORY_BY_USER', 'LOWER', 'LPAD', 'LTRIM', 'MATERIALIZED_VIEW_REFRESH_HISTORY', 'MD5', 'MD5_HEX', 'MD5_BINARY', 'MD5_NUMBER_LOWER64', 'MD5_NUMBER_UPPER64', 'MEDIAN', 'MIN / MAX', 'MINHASH', 'MINHASH_COMBINE', 'MOD', 'MODE', 'MONTHNAME', 'MONTHS_BETWEEN', 'NEXT_DAY', 'NORMAL', 'NTH_VALUE', 'NTILE', 'NULLIF', 'NULLIFZERO', 'NVL', 'NVL2', 'OBJECT_AGG', 'OBJECT_CONSTRUCT', 'OBJECT_CONSTRUCT_KEEP_NULL', 'OBJECT_DELETE', 'OBJECT_INSERT', 'OBJECT_KEYS', 'OBJECT_PICK', 'OCTET_LENGTH', 'PARSE_IP', 'PARSE_JSON', 'PARSE_URL', 'PARSE_XML', 'PERCENT_RANK', 'PERCENTILE_CONT', 'PERCENTILE_DISC', 'PI', 'PIPE_USAGE_HISTORY', 'POLICY_REFERENCES', 'POSITION', 'POW, POWER', 'PREVIOUS_DAY', 'QUERY_HISTORY', 'RADIANS', 'RANDOM', 'RANDSTR', 'RANK', 'RATIO_TO_REPORT', 'REGEXP', 'REGEXP_COUNT', 'REGEXP_INSTR', 'REGEXP_LIKE', 'REGEXP_REPLACE', 'REGEXP_SUBSTR', 'REGR_AVGX', 'REGR_AVGY', 'REGR_COUNT', 'REGR_INTERCEPT', 'REGR_R2', 'REGR_SLOPE', 'REGR_SXX', 'REGR_SXY', 'REGR_SYY', 'REGR_VALX', 'REGR_VALY', 'REPEAT', 'REPLACE', 'REPLICATION_USAGE_HISTORY', 'REST_EVENT_HISTORY', 'RESULT_SCAN', 'REVERSE', 'RIGHT', 'RLIKE', 'ROUND', 'ROW_NUMBER', 'RPAD', 'RTRIM', 'RTRIMMED_LENGTH', 'SEARCH_OPTIMIZATION_HISTORY', 'SEQ1', 'SEQ2', 'SEQ4', 'SEQ8', 'SHA1', 'SHA1_HEX', 'SHA1_BINARY', 'SHA2', 'SHA2_HEX', 'SHA2_BINARY', 'SIGN', 'SIN', 'SINH', 'SKEW', 'SOUNDEX', 'SPACE', 'SPLIT', 'SPLIT_PART', 'SPLIT_TO_TABLE', 'SQRT', 'SQUARE', 'ST_AREA', 'ST_ASEWKB', 'ST_ASEWKT', 'ST_ASGEOJSON', 'ST_ASWKB', 'ST_ASBINARY', 'ST_ASWKT', 'ST_ASTEXT', 'ST_AZIMUTH', 'ST_CENTROID', 'ST_COLLECT', 'ST_CONTAINS', 'ST_COVEREDBY', 'ST_COVERS', 'ST_DIMENSION', 'ST_DISJOINT', 'ST_DISTANCE', 'ST_DWITHIN', 'ST_GEOGFROMGEOHASH', 'ST_GEOGPOINTFROMGEOHASH', 'ST_GEOGRAPHYFROMWKB', 'ST_GEOGRAPHYFROMWKT', 'ST_GEOHASH', 'ST_HAUSDORFFDISTANCE', 'ST_INTERSECTS', 'ST_LENGTH', 'ST_MAKELINE', 'ST_MAKEPOINT', 'ST_POINT', 'ST_MAKEPOLYGON', 'ST_POLYGON', 'ST_NPOINTS', 'ST_NUMPOINTS', 'ST_PERIMETER', 'ST_SIMPLIFY', 'ST_SRID', 'ST_WITHIN', 'ST_X', 'ST_XMAX', 'ST_XMIN', 'ST_Y', 'ST_YMAX', 'ST_YMIN', 'STAGE_STORAGE_USAGE_HISTORY', 'STARTSWITH', 'STDDEV', 'STDDEV_POP', 'STDDEV_SAMP', 'STRIP_NULL_VALUE', 'STRTOK', 'STRTOK_SPLIT_TO_TABLE', 'STRTOK_TO_ARRAY', 'SUBSTR', 'SUBSTRING', 'SUM', 'SYSDATE', 'SYSTEM$ABORT_SESSION', 'SYSTEM$ABORT_TRANSACTION', 'SYSTEM$BEHAVIOR_CHANGE_BUNDLE_STATUS', 'SYSTEM$CANCEL_ALL_QUERIES', 'SYSTEM$CANCEL_QUERY', 'SYSTEM$CLUSTERING_DEPTH', 'SYSTEM$CLUSTERING_INFORMATION', 'SYSTEM$CLUSTERING_RATIO — Deprecated', 'SYSTEM$CURRENT_USER_TASK_NAME', 'SYSTEM$DATABASE_REFRESH_HISTORY', 'SYSTEM$DATABASE_REFRESH_PROGRESS', 'SYSTEM$DATABASE_REFRESH_PROGRESS_BY_JOB', 'SYSTEM$DISABLE_BEHAVIOR_CHANGE_BUNDLE', 'SYSTEM$ENABLE_BEHAVIOR_CHANGE_BUNDLE', 'SYSTEM$ESTIMATE_SEARCH_OPTIMIZATION_COSTS', 'SYSTEM$EXPLAIN_JSON_TO_TEXT', 'SYSTEM$EXPLAIN_PLAN_JSON', 'SYSTEM$EXTERNAL_TABLE_PIPE_STATUS', 'SYSTEM$GENERATE_SAML_CSR', 'SYSTEM$GENERATE_SCIM_ACCESS_TOKEN', 'SYSTEM$GET_AWS_SNS_IAM_POLICY', 'SYSTEM$GET_PREDECESSOR_RETURN_VALUE', 'SYSTEM$GET_PRIVATELINK_CONFIG', 'SYSTEM$GET_SNOWFLAKE_PLATFORM_INFO', 'SYSTEM$GLOBAL_ACCOUNT_SET_PARAMETER', 'SYSTEM$LAST_CHANGE_COMMIT_TIME', 'SYSTEM$MIGRATE_SAML_IDP_REGISTRATION', 'SYSTEM$PIPE_FORCE_RESUME', 'SYSTEM$PIPE_STATUS', 'SYSTEM$SET_RETURN_VALUE', 'SYSTEM$SHOW_OAUTH_CLIENT_SECRETS', 'SYSTEM$STREAM_GET_TABLE_TIMESTAMP', 'SYSTEM$STREAM_HAS_DATA', 'SYSTEM$TASK_DEPENDENTS_ENABLE', 'SYSTEM$TYPEOF', 'SYSTEM$USER_TASK_CANCEL_ONGOING_EXECUTIONS', 'SYSTEM$VERIFY_EXTERNAL_OAUTH_TOKEN', 'SYSTEM$WAIT', 'SYSTEM$WHITELIST', 'SYSTEM$WHITELIST_PRIVATELINK', 'TAN', 'TANH', 'TASK_DEPENDENTS', 'TASK_HISTORY', 'TIME_FROM_PARTS', 'TIME_SLICE', 'TIMEADD', 'TIMEDIFF', 'TIMESTAMP_FROM_PARTS', 'TIMESTAMPADD', 'TIMESTAMPDIFF', 'TO_ARRAY', 'TO_BINARY', 'TO_BOOLEAN', 'TO_CHAR', 'TO_VARCHAR', 'TO_DATE', 'DATE', 'TO_DECIMAL', 'TO_NUMBER', 'TO_NUMERIC', 'TO_DOUBLE', 'TO_GEOGRAPHY', 'TO_JSON', 'TO_OBJECT', 'TO_TIME', 'TIME', 'TO_TIMESTAMP', 'TO_VARIANT', 'TO_XML', 'TRANSLATE', 'TRIM', 'TRUNCATE', 'TRUNC', 'TRY_BASE64_DECODE_BINARY', 'TRY_BASE64_DECODE_STRING', 'TRY_CAST', 'TRY_HEX_DECODE_BINARY', 'TRY_HEX_DECODE_STRING', 'TRY_PARSE_JSON', 'TRY_TO_BINARY', 'TRY_TO_BOOLEAN', 'TRY_TO_DATE', 'TRY_TO_DECIMAL', 'TRY_TO_NUMBER', 'TRY_TO_NUMERIC', 'TRY_TO_DOUBLE', 'TRY_TO_GEOGRAPHY', 'TRY_TO_TIME', 'TRY_TO_TIMESTAMP', 'TYPEOF', 'UNICODE', 'UNIFORM', 'UPPER', 'UUID_STRING', 'VALIDATE', 'VALIDATE_PIPE_LOAD', 'VAR_POP', 'VAR_SAMP', 'VARIANCE', 'VARIANCE_SAMP', 'VARIANCE_POP', 'WAREHOUSE_LOAD_HISTORY', 'WAREHOUSE_METERING_HISTORY', 'WIDTH_BUCKET', 'XMLGET', 'YEAR', 'DAY', 'WEEK', 'MONTH', 'QUARTER', 'ZEROIFNULL', 'ZIPF' ], builtinVariables: [ // NOT SUPPORTED ], pseudoColumns: [ // NOT SUPPORTED ], tokenizer: { root: [ { include: '@comments' }, { include: '@whitespace' }, { include: '@pseudoColumns' }, { include: '@numbers' }, { include: '@strings' }, { include: '@complexIdentifiers' }, { include: '@scopes' }, [/[;,.]/, 'delimiter'], [/[()]/, '@brackets'], [/\{\{[0-9a-zA-Z\-_]{21}( as \w+)?\}\}/, 'transclusion'], [ /[\w@#$]+/, { cases: { '@keywords': 'keyword', '@operators': 'operator', '@builtinVariables': 'predefined', '@builtinFunctions': 'predefined', '@default': 'identifier' } } ], [/[<>=!%&+\-*/|~^]/, 'operator'] ], whitespace: [[/\s+/, 'white']], comments: [ [/--+.*/, 'comment'], [/\/\*/, { token: 'comment.quote', next: '@comment' }] ], comment: [ [/[^*/]+/, 'comment'], // Not supporting nested comments, as nested comments seem to not be standard? // i.e. http://stackoverflow.com/questions/728172/are-there-multiline-comment-delimiters-in-sql-that-are-vendor-agnostic // [/\/\*/, { token: 'comment.quote', next: '@push' }], // nested comment not allowed :-( [/\*\//, { token: 'comment.quote', next: '@pop' }], [/./, 'comment'] ], pseudoColumns: [ [ /[$][A-Za-z_][\w@#$]*/, { cases: { '@pseudoColumns': 'predefined', '@default': 'identifier' } } ] ], numbers: [ [/0[xX][0-9a-fA-F]*/, 'number'], [/[$][+-]*\d*(\.\d*)?/, 'number'], [/((\d+(\.\d*)?)|(\.\d+))([eE][-+]?\d+)?/, 'number'] ], strings: [[/'/, { token: 'string', next: '@string' }]], string: [ [/[^']+/, 'string'], [/''/, 'string'], [/'/, { token: 'string', next: '@pop' }] ], complexIdentifiers: [[/"/, { token: 'identifier.quote', next: '@quotedIdentifier' }]], quotedIdentifier: [ [/[^"]+/, 'identifier'], [/""/, 'identifier'], [/"/, { token: 'identifier.quote', next: '@pop' }] ], scopes: [ [/(BEGIN|CASE)\b/i, { token: 'keyword.block' }], [/END\b/i, { token: 'keyword.block' }], [/WHEN\b/i, { token: 'keyword.choice' }], [/THEN\b/i, { token: 'keyword.choice' }] ] } }
the_stack
import {isNullOrUndefined, isUndefined} from 'util'; import {Injectable, Injector} from '@angular/core'; import {TranslateService} from '@ngx-translate/core'; import {CommonUtil} from '@common/util/common.util'; import {StringUtil} from '@common/util/string.util'; import {Loading} from '@common/util/loading.util'; import {Page} from '@domain/common/page'; import {PopupService} from '@common/service/popup.service'; import {AbstractService} from '@common/service/abstract.service'; import {Dataflows, PrDataflow} from '@domain/data-preparation/pr-dataflow'; import {Datasets, PrDataset} from '@domain/data-preparation/pr-dataset'; import {PreparationAlert} from '../../util/preparation-alert.util'; import {SnapShotCreateDomain} from '../../component/create-snapshot-popup.component'; @Injectable() export class DataflowService extends AbstractService { private dataflowListParams: any = null; constructor(protected injector: Injector, private popupService: PopupService, public translateService: TranslateService ) { super(injector); } public getParamsForDataflowList() { return this.dataflowListParams ? this.dataflowListParams : {}; } public setParamsForDataflowList(params: any) { this.dataflowListParams = params; } // 데이터 플로우 목록 조회 public getDataflows(searchText: string, page: Page, projection: string): Promise<Dataflows> { let url = this.API_URL + `preparationdataflows/search/findByDfNameContaining?dfName=${encodeURIComponent(searchText)}&project=${projection}`; url += '&' + CommonUtil.objectToUrlString(page); return this.get(url); } public getDataflowList(param) { let url = this.API_URL + `preparationdataflows/search/findByDfNameContaining?`; url += CommonUtil.objectToUrlString(param); return this.get(url); } // 데이터 플로우 상세조회 public getDataflow(dfId: string): Promise<PrDataflow> { const url = this.API_URL + 'preparationdataflows/' + dfId + '?projection=detail'; return this.get(url); } // 업스트림 조회 public getUpstreams(dfId: string, isUpdate?: boolean): Promise<any> { let url = this.API_URL + 'preparationdataflows/' + dfId + '/upstreammap'; if (isUpdate) { url += '?forUpdate=' + isUpdate; } return this.get(url); } // 데이터셋 목록 조회 public getDatasets(searchText: string, page: Page, _projection?: string, dsType: string = '', _importType: string = ''): Promise<Datasets> { let url = this.API_URL + `preparationdatasets/search/`; if (StringUtil.isNotEmpty(dsType)) { url += `findByDsNameContainingAndDsType?dsType=${dsType}`; } else { url += 'findByDsNameContaining?'; } url += `&dsName=${encodeURIComponent(searchText)}`; url += '&' + CommonUtil.objectToUrlString(page); return this.get(url); } // 데이터셋 상세 조회 public getDataset(dsId: string): Promise<PrDataset> { const url = this.API_URL + 'preparationdatasets/' + dsId + '?preview=true'; return this.get(url); } // 데이터 플로우 생성 public createDataflow(dataflow: PrDataflow) { const popupService = this.popupService; return this.post(this.API_URL + 'preparationdataflows', dataflow) .catch((error) => { if (true !== isUndefined(error.code) && error.code === 'PR5102') { Loading.hide(); PreparationAlert.success(this.translateService.instant(error.details)); popupService.notiPopup({name: 'update-dataflow', data: null}); return Promise.reject(null); } throw error; }); } // 데이터 플로우 삭제 public deleteDataflow(dfId: string) { const popupService = this.popupService; return this.delete(this.API_URL + 'preparationdataflows/' + dfId) .catch((error) => { if (true !== isUndefined(error.code) && error.code === 'PR5102') { Loading.hide(); PreparationAlert.success(this.translateService.instant(error.details)); popupService.notiPopup({name: 'update-dataflow', data: null}); return Promise.reject(null); } throw error; }); } // 데이터 플로우 수정 public updateDataflow(dataflow: any): Promise<PrDataflow> { const popupService = this.popupService; return this.patch(this.API_URL + 'preparationdataflows/' + dataflow.dfId, dataflow) .catch((error) => { if (true !== isUndefined(error.code) && error.code === 'PR5102') { Loading.hide(); PreparationAlert.success(this.translateService.instant(error.details)); popupService.notiPopup({name: 'update-dataflow', data: null}); return Promise.reject(null); } throw error; }); } // 데이터 플로우 룰생성 public createWrangledDataset(datasetId: string, dataflowId: string): Promise<any> { const popupService = this.popupService; const params = {dfId: dataflowId}; return this.post(this.API_URL + `preparationdatasets/${datasetId}/transform`, params) .catch((error) => { if (true !== isUndefined(error.code) && error.code === 'PR5102') { Loading.hide(); PreparationAlert.success(this.translateService.instant(error.details)); popupService.notiPopup({name: 'update-dataflow', data: null}); return Promise.reject(null); } throw error; }); } // transform action GET or PUT public transformAction(id: string, method: string, params: any): Promise<any> { if (method === 'put') { return this.put(this.API_URL + `preparationdatasets/${id}/transform`, params) } else { let url = this.API_URL + `preparationdatasets/${id}/transform`; const param: string[] = []; if (isNullOrUndefined(params.ruleIdx)) { (param.push(`ruleIdx=`)); } else { (param.push(`ruleIdx=${params.ruleIdx}`)); } (isNullOrUndefined(params.offset)) || (param.push(`offset=${params.offset}`)); (isNullOrUndefined(params.count)) || (param.push(`count=${params.count}`)); (0 < param.length) && (url = url + '?' + param.join('&')); return this.get(url); } } // function - transformAction // Wrangled 데이터셋 조회 (조인과 유니온에서 사용) public getDatasetWrangledData(datasetId: string, count?: number, pageNum?: number, _ruleIdx?: number): Promise<any> { // TODO : temp value count = 1000; pageNum = 0; let url = this.API_URL + `preparationdatasets/${datasetId}/transform`; const params: string[] = []; (params.push(`ruleIdx=`)); (isNullOrUndefined(pageNum)) || (params.push(`offset=${pageNum}`)); (isNullOrUndefined(count)) || (params.push(`count=${count}`)); (0 < params.length) && (url = url + '?' + params.join('&')); return this.get(url); } // 데이터셋 추가 public updateDataSets(dataflowId: string, dsIds: any): Promise<any> { return this.put(this.API_URL + `preparationdataflows/${dataflowId}/update_datasets`, dsIds); } // fileUri등 설정 정보 public getConfiguration(datasetId: string): Promise<any> { const popupService = this.popupService; return this.get(this.API_URL + `preparationdatasets/${datasetId}/transform/configuration`) .catch((error) => { if (true !== isUndefined(error.code) && error.code === 'PR5102') { Loading.hide(); PreparationAlert.success(this.translateService.instant(error.details)); popupService.notiPopup({name: 'update-dataflow', data: null}); return Promise.reject(null); } throw error; }); } // 데이터 스냅샷 생성 public createDataSnapshot(datasetId: string, datasnapshot: SnapShotCreateDomain): Promise<any> { const popupService = this.popupService; return this.post(this.API_URL + `preparationdatasets/${datasetId}/transform/snapshot`, datasnapshot) .catch((error) => { if (true !== isUndefined(error.code) && error.code === 'PR5102') { Loading.hide(); PreparationAlert.success(this.translateService.instant(error.details)); popupService.notiPopup({name: 'update-dataflow', data: null}); return Promise.reject(null); } throw error; }); } // 룰 적용 public applyRules(datasetId: string, param: any): Promise<any> { const popupService = this.popupService; param['count'] = 100; return this.put(this.API_URL + `preparationdatasets/${datasetId}/transform`, param) .catch((error) => { if (true !== isUndefined(error.code) && error.code === 'PR5102') { Loading.hide(); PreparationAlert.success(this.translateService.instant(error.details)); popupService.notiPopup({name: 'update-dataflow', data: null}); return Promise.reject(null); } throw error; }); } /* searchText 제거 public getSearchCountDataSets(datasetId: string, searchWord: string, count: number) { let popupService = this.popupService; return this.get(this.API_URL + `preparationdatasets/${datasetId}/transform?searchWord=` + encodeURIComponent(searchWord) + '&targetLines=' + count) */ public getSearchCountDataSets(datasetId: string, ruleIdx?: number, pageNum?: number, count?: number) { const popupService = this.popupService; let url = this.API_URL + `preparationdatasets/${datasetId}/transform`; const params: string[] = []; if (isNullOrUndefined(ruleIdx)) { (params.push(`ruleIdx=`)); } else { (params.push(`ruleIdx=${ruleIdx}`)); } (isNullOrUndefined(pageNum)) || (params.push(`offset=${pageNum}`)); (isNullOrUndefined(count)) || (params.push(`count=${count}`)); (0 < params.length) && (url = url + '?' + params.join('&')); return this.get(url) .catch((error) => { if (true !== isUndefined(error.code) && error.code === 'PR5102') { Loading.hide(); PreparationAlert.success(this.translateService.instant(error.details)); popupService.notiPopup({name: 'update-dataflow', data: null}); return Promise.reject(null); } throw error; }); } // when editing, getting previous data public fetchPreviousData(datasetId: string, op: any): Promise<any> { const popupService = this.popupService; return this.put(this.API_URL + `preparationdatasets/${datasetId}/transform`, op) .catch((error) => { if (true !== isUndefined(error.code) && error.code === 'PR5102') { Loading.hide(); PreparationAlert.success(this.translateService.instant(error.details)); popupService.notiPopup({name: 'update-dataflow', data: null}); return Promise.reject(null); } throw error; }); } public deleteChainDataflow(dfId: string, dsId: string) { return this.delete(this.API_URL + 'preparationdataflows/delete_chain/' + dfId + '/' + dsId); } /** * Clone dataSet * @param {string} dsId * @return {Promise<any>} */ public cloneWrangledDataset(dsId: string): Promise<any> { const params = {}; return this.post(this.API_URL + `preparationdatasets/${dsId}/clone`, params); } public autoComplete(ruleString: string, ruleCommand: string, rulePart: string): Promise<any> { const params = { ruleString: ruleString, ruleCommand: ruleCommand, rulePart: rulePart }; return this.post(this.API_URL + `preparationdatasets/autocomplete`, params); } /** * Validate expression from advanced input popup * @param exprString */ public validateExpr(exprString: string): Promise<any> { return this.post(this.API_URL + `preparationdatasets/validate_expr`, exprString); } /** * 각 컬럼별 히스토그램 정보 조회 * @param {string} dsId * @param {any} params * { * ruleIdx : 몇 번째 ruleIdx에 해당하는 histogram을 얻을 것인지 * colnos[] : histogram을 얻으려는 column number array * colWidths[] : 위의 각 column의 확정된 column 폭 * } * @returns {Promise<any>} */ public getHistogramInfo(dsId: string, params: any): Promise<any> { return this.post(this.API_URL + `preparationdatasets/${dsId}/transform/histogram`, params) } /** * 타임스탬프 format 중 가장 유사한 format을 추천해주는 리스트 * @param {string} datasetId * @param colNames * @return {Promise<any>} */ public getTimestampFormatSuggestions(datasetId: string, colNames: any): Promise<any> { const url = this.API_URL + `preparationdatasets/${datasetId}/transform/timestampFormat`; return this.post(url, colNames); } /** * 룰 편집 화면에서 스냅샷 탭 목록 불러오기 * @return {Promise<any>} */ public getWorkList(params): Promise<any> { const url = this.API_URL + 'preparationsnapshots/' + params.dsId + '/work_list'; return this.get(url); } /** * Fetch function list used in Advanced Function popup * @returns {Promise<any>} */ public getFunctionList(): Promise<any> { const url = this.API_URL + `preparationdatasets/function_list`; return this.get(url); } public swapDataset(param: any): Promise<any> { const url = this.API_URL + `preparationdataflows/${param.dfId}/swap_upstream`; delete param.dfId; return this.post(url, param); } }
the_stack
import React, { useEffect, useMemo } from "react"; import Table from "./Table"; import { ColumnTypes, CompactMode, ReactTableColumnProps, ReactTableFilter, } from "./Constants"; import { Row } from "react-table"; import { EventType } from "constants/AppsmithActionConstants/ActionConstants"; import { isEqual } from "lodash"; export interface ColumnMenuOptionProps { content: string | JSX.Element; closeOnClick?: boolean; isSelected?: boolean; editColumnName?: boolean; columnAccessor?: string; id?: string; category?: boolean; options?: ColumnMenuSubOptionProps[]; onClick?: (columnIndex: number, isSelected: boolean) => void; } export interface ColumnMenuSubOptionProps { content: string | JSX.Element; isSelected?: boolean; closeOnClick?: boolean; onClick?: (columnIndex: number) => void; id?: string; category?: boolean; isHeader?: boolean; } interface ReactTableComponentProps { widgetId: string; widgetName: string; searchKey: string; isDisabled?: boolean; isVisible?: boolean; isLoading: boolean; editMode: boolean; width: number; height: number; pageSize: number; totalRecordsCount?: number; tableData: Array<Record<string, unknown>>; disableDrag: (disable: boolean) => void; onRowClick: (rowData: Record<string, unknown>, rowIndex: number) => void; onCommandClick: (dynamicTrigger: string, onComplete: () => void) => void; selectAllRow: (pageData: Row<Record<string, unknown>>[]) => void; unSelectAllRow: (pageData: Row<Record<string, unknown>>[]) => void; updatePageNo: (pageNo: number, event?: EventType) => void; sortTableColumn: (column: string, asc: boolean) => void; nextPageClick: () => void; prevPageClick: () => void; pageNo: number; serverSidePaginationEnabled: boolean; selectedRowIndex: number; selectedRowIndices: number[]; multiRowSelection?: boolean; hiddenColumns?: string[]; triggerRowSelection: boolean; columnSizeMap?: { [key: string]: number }; handleResizeColumn: (columnSizeMap: { [key: string]: number }) => void; handleReorderColumn: (columnOrder: string[]) => void; searchTableData: (searchKey: any) => void; filters?: ReactTableFilter[]; applyFilter: (filters: ReactTableFilter[]) => void; columns: ReactTableColumnProps[]; compactMode?: CompactMode; isVisibleSearch?: boolean; isVisibleFilters?: boolean; isVisibleDownload?: boolean; isVisiblePagination?: boolean; delimiter: string; } function ReactTableComponent(props: ReactTableComponentProps) { const { applyFilter, columns, columnSizeMap, compactMode, delimiter, disableDrag, editMode, filters, handleReorderColumn, handleResizeColumn, height, isLoading, isVisibleDownload, isVisibleFilters, isVisiblePagination, isVisibleSearch, multiRowSelection, nextPageClick, onRowClick, pageNo, pageSize, prevPageClick, searchKey, searchTableData, selectAllRow, selectedRowIndex, selectedRowIndices, serverSidePaginationEnabled, sortTableColumn: _sortTableColumn, tableData, totalRecordsCount, triggerRowSelection, unSelectAllRow, updatePageNo, widgetId, widgetName, width, } = props; const { columnOrder, hiddenColumns } = useMemo(() => { const order: string[] = []; const hidden: string[] = []; columns.forEach((item) => { if (item.isHidden) { hidden.push(item.accessor); } else { order.push(item.accessor); } }); return { columnOrder: order, hiddenColumns: hidden }; }, [columns]); useEffect(() => { let dragged = -1; const headers = Array.prototype.slice.call( document.querySelectorAll(`#table${widgetId} .draggable-header`), ); headers.forEach((header, i) => { header.setAttribute("draggable", true); header.ondragstart = (e: React.DragEvent<HTMLDivElement>) => { header.style = "background: #efefef; border-radius: 4px; z-index: 100; width: 100%; text-overflow: none; overflow: none;"; e.stopPropagation(); dragged = i; }; header.ondrag = (e: React.DragEvent<HTMLDivElement>) => { e.stopPropagation(); }; header.ondragend = (e: React.DragEvent<HTMLDivElement>) => { header.style = ""; e.stopPropagation(); setTimeout(() => (dragged = -1), 1000); }; // the dropped header header.ondragover = (e: React.DragEvent<HTMLDivElement>) => { if (i !== dragged && dragged !== -1) { if (dragged > i) { header.parentElement.className = "th header-reorder highlight-left"; } else if (dragged < i) { header.parentElement.className = "th header-reorder highlight-right"; } } e.preventDefault(); }; header.ondragenter = (e: React.DragEvent<HTMLDivElement>) => { if (i !== dragged && dragged !== -1) { if (dragged > i) { header.parentElement.className = "th header-reorder highlight-left"; } else if (dragged < i) { header.parentElement.className = "th header-reorder highlight-right"; } } e.preventDefault(); }; header.ondragleave = (e: React.DragEvent<HTMLDivElement>) => { header.parentElement.className = "th header-reorder"; e.preventDefault(); }; header.ondrop = (e: React.DragEvent<HTMLDivElement>) => { header.style = ""; header.parentElement.className = "th header-reorder"; if (i !== dragged && dragged !== -1) { e.preventDefault(); const newColumnOrder = [...columnOrder]; // The dragged column const movedColumnName = newColumnOrder.splice(dragged, 1); // If the dragged column exists if (movedColumnName && movedColumnName.length === 1) { newColumnOrder.splice(i, 0, movedColumnName[0]); } handleReorderColumn([...newColumnOrder, ...hiddenColumns]); } else { dragged = -1; } }; }); }); const sortTableColumn = (columnIndex: number, asc: boolean) => { if (columnIndex === -1) { _sortTableColumn("", asc); } else { const column = columns[columnIndex]; const columnType = column.metaProperties?.type || ColumnTypes.TEXT; if ( columnType !== ColumnTypes.IMAGE && columnType !== ColumnTypes.VIDEO ) { _sortTableColumn(column.accessor, asc); } } }; const selectTableRow = (row: { original: Record<string, unknown>; index: number; }) => { onRowClick(row.original, row.index); }; const toggleAllRowSelect = ( isSelect: boolean, pageData: Row<Record<string, unknown>>[], ) => { if (isSelect) { selectAllRow(pageData); } else { unSelectAllRow(pageData); } }; return ( <Table applyFilter={applyFilter} columnSizeMap={columnSizeMap} columns={columns} compactMode={compactMode} data={tableData} delimiter={delimiter} disableDrag={() => { disableDrag(true); }} editMode={editMode} enableDrag={() => { disableDrag(false); }} filters={filters} handleResizeColumn={handleResizeColumn} height={height} isLoading={isLoading} isVisibleDownload={isVisibleDownload} isVisibleFilters={isVisibleFilters} isVisiblePagination={isVisiblePagination} isVisibleSearch={isVisibleSearch} multiRowSelection={multiRowSelection} nextPageClick={nextPageClick} pageNo={pageNo - 1} pageSize={pageSize || 1} prevPageClick={prevPageClick} searchKey={searchKey} searchTableData={searchTableData} selectTableRow={selectTableRow} selectedRowIndex={selectedRowIndex} selectedRowIndices={selectedRowIndices} serverSidePaginationEnabled={serverSidePaginationEnabled} sortTableColumn={sortTableColumn} toggleAllRowSelect={toggleAllRowSelect} totalRecordsCount={totalRecordsCount} triggerRowSelection={triggerRowSelection} updatePageNo={updatePageNo} widgetId={widgetId} widgetName={widgetName} width={width} /> ); } export default React.memo(ReactTableComponent, (prev, next) => { return ( prev.applyFilter === next.applyFilter && prev.compactMode === next.compactMode && prev.delimiter === next.delimiter && prev.disableDrag === next.disableDrag && prev.editMode === next.editMode && prev.filters === next.filters && prev.handleReorderColumn === next.handleReorderColumn && prev.handleResizeColumn === next.handleResizeColumn && prev.height === next.height && prev.isLoading === next.isLoading && prev.isVisibleDownload === next.isVisibleDownload && prev.isVisibleFilters === next.isVisibleFilters && prev.isVisiblePagination === next.isVisiblePagination && prev.isVisibleSearch === next.isVisibleSearch && prev.nextPageClick === next.nextPageClick && prev.onRowClick === next.onRowClick && prev.pageNo === next.pageNo && prev.pageSize === next.pageSize && prev.prevPageClick === next.prevPageClick && prev.searchKey === next.searchKey && prev.searchTableData === next.searchTableData && prev.selectedRowIndex === next.selectedRowIndex && prev.selectedRowIndices === next.selectedRowIndices && prev.serverSidePaginationEnabled === next.serverSidePaginationEnabled && prev.sortTableColumn === next.sortTableColumn && prev.totalRecordsCount === next.totalRecordsCount && prev.triggerRowSelection === next.triggerRowSelection && prev.updatePageNo === next.updatePageNo && prev.widgetId === next.widgetId && prev.widgetName === next.widgetName && prev.width === next.width && isEqual(prev.columnSizeMap, next.columnSizeMap) && isEqual(prev.tableData, next.tableData) && // Using JSON stringify becuase isEqual doesnt work with functions, // and we are not changing the columns manually. JSON.stringify(prev.columns) === JSON.stringify(next.columns) ); });
the_stack
import {Interface, Result} from '@ethersproject/abi'; import {Log, TransactionResponse} from '@ethersproject/abstract-provider'; import {BigNumber} from '@ethersproject/bignumber'; import {hexDataSlice} from '@ethersproject/bytes'; import {ApiPromise} from '@polkadot/api'; import {EthTransaction, EvmLog, ExitReason} from '@polkadot/types/interfaces'; import { SubqlDatasourceProcessor, SubqlCustomDatasource, SubqlHandlerKind, SubqlNetworkFilter, SubstrateEvent, SecondLayerHandlerProcessor, SubstrateExtrinsic, SubqlCustomHandler, SubqlMapping, DictionaryQueryEntry, } from '@subql/types'; import {plainToClass} from 'class-transformer'; import { IsOptional, validateSync, ValidatorConstraint, ValidatorConstraintInterface, Validate, IsEthereumAddress, IsString, } from 'class-validator'; import {eventToTopic, functionToSighash, hexStringEq, stringNormalizedEq} from './utils'; type TopicFilter = string | null | undefined; export type MoonbeamDatasource = SubqlCustomDatasource< 'substrate/Moonbeam', SubqlNetworkFilter, SubqlMapping<SubqlCustomHandler>, MoonbeamProcessorOptions >; export interface MoonbeamEventFilter { topics?: [TopicFilter, TopicFilter, TopicFilter, TopicFilter]; } export interface MoonbeamCallFilter { from?: string; function?: string; } export type MoonbeamEvent<T extends Result = Result> = Log & {args?: T; blockTimestamp: Date}; export type MoonbeamCall<T extends Result = Result> = Omit<TransactionResponse, 'wait' | 'confirmations'> & { args?: T; success: boolean; }; @ValidatorConstraint({name: 'topifFilterValidator', async: false}) export class TopicFilterValidator implements ValidatorConstraintInterface { validate(value: TopicFilter): boolean { try { return !value || (typeof value === 'string' ? !!eventToTopic(value) : false); } catch (e) { return false; } } defaultMessage(): string { return 'Value must be either null, undefined, hex string or hex string[]'; } } class MoonbeamProcessorOptions { @IsOptional() @IsString() abi?: string; @IsOptional() @IsEthereumAddress() address?: string; } class MoonbeamEventFilterImpl implements MoonbeamEventFilter { @IsOptional() @Validate(TopicFilterValidator, {each: true}) topics?: [TopicFilter, TopicFilter, TopicFilter, TopicFilter]; } class MoonbeamCallFilterImpl implements MoonbeamCallFilter { @IsOptional() @IsEthereumAddress() from?: string; @IsOptional() @IsString() function?: string; } type RawEvent = { address: string; topics: string[]; data: string; }; type RawTransaction = { nonce: number; gasPrice: string; gasLimit: string; action: { call: string; // hex string }; value: string; input: string; // hex string signature: { v: number; r: string; // hex string s: string; // hex string }; }; type ExecutionEvent = { from: string; to?: string; // Can be undefined for contract creation hash: string; status: ExitReason; }; function getExecutionEvent(extrinsic: SubstrateExtrinsic): ExecutionEvent { const executionEvent = extrinsic.events.find( (evt) => evt.event.section === 'ethereum' && evt.event.method === 'Executed' ); if (!executionEvent) { throw new Error('eth execution failed'); } const [from, to, hash, status] = executionEvent.event.data; return { from: from.toHex(), to: to.toHex(), hash: hash.toHex(), status: status as ExitReason, }; } async function getEtheruemBlockHash(api: ApiPromise, blockNumber: number): Promise<string> { return undefined; // This is too expensive to call for each call/event, we need to find a more efficient approach // In the mean time blockNumber can be used // See https://github.com/subquery/subql/issues/568 for more info const block = await api.rpc.eth.getBlockByNumber(blockNumber, false); return block.unwrap().blockHash.toHex(); } const contractInterfaces: Record<string, Interface> = {}; function buildInterface(ds: MoonbeamDatasource, assets: Record<string, string>): Interface | undefined { const abi = ds.processor?.options?.abi; if (!abi) { return; } if (!ds.assets?.get(abi)) { throw new Error(`ABI named "${abi}" not referenced in assets`); } // This assumes that all datasources have a different abi name or they are the same abi if (!contractInterfaces[abi]) { // Constructing the interface validates the ABI try { contractInterfaces[abi] = new Interface(assets[abi]); } catch (e) { (global as any).logger.error(`Unable to parse ABI: ${e.message}`); throw new Error('ABI is invalid'); } } return contractInterfaces[abi]; } const EventProcessor: SecondLayerHandlerProcessor< SubqlHandlerKind.Event, MoonbeamEventFilter, MoonbeamEvent, MoonbeamDatasource > = { baseFilter: [{module: 'evm', method: 'Log'}], baseHandlerKind: SubqlHandlerKind.Event, async transformer( original: SubstrateEvent, ds: MoonbeamDatasource, api: ApiPromise, assets: Record<string, string> ): Promise<MoonbeamEvent> { const [eventData] = original.event.data; const baseFilter = Array.isArray(EventProcessor.baseFilter) ? EventProcessor.baseFilter : [EventProcessor.baseFilter]; const evmEvents = original.extrinsic?.events.filter((evt) => baseFilter.find((filter) => filter.module === evt.event.section && filter.method === evt.event.method) ) ?? []; const {hash} = getExecutionEvent(original.extrinsic); // shouldn't fail here const log: MoonbeamEvent = { ...(eventData.toJSON() as unknown as RawEvent), blockNumber: original.block.block.header.number.toNumber(), blockHash: await getEtheruemBlockHash(api, original.block.block.header.number.toNumber()), blockTimestamp: original.block.timestamp, transactionIndex: original.extrinsic?.idx ?? -1, transactionHash: hash, removed: false, logIndex: evmEvents.indexOf(original), }; try { const iface = buildInterface(ds, assets); log.args = iface?.parseLog(log).args; } catch (e) { // TODO setup ts config with global defs (global as any).logger.warn(`Unable to parse log arguments, will be omitted from result: ${e.message}`); } return log; }, filterProcessor(filter: MoonbeamEventFilter | undefined, input: SubstrateEvent, ds: MoonbeamDatasource): boolean { const [eventData] = input.event.data; const rawEvent = eventData as EvmLog; if ( ds.processor?.options?.address && !stringNormalizedEq(ds.processor.options.address, rawEvent.address.toString()) ) { return false; } // Follows bloom filters https://docs.ethers.io/v5/concepts/events/#events--filters if (filter?.topics) { for (let i = 0; i < Math.min(filter.topics.length, 4); i++) { const topic = filter.topics[i]; if (!topic) { continue; } if (!hexStringEq(eventToTopic(topic), rawEvent.topics[i].toHex())) { return false; } } } return true; }, filterValidator(filter?: MoonbeamEventFilter): void { if (!filter) return; const filterCls = plainToClass(MoonbeamEventFilterImpl, filter); const errors = validateSync(filterCls, {whitelist: true, forbidNonWhitelisted: true}); if (errors?.length) { const errorMsgs = errors.map((e) => e.toString()).join('\n'); throw new Error(`Invalid Moonbeam event filter.\n${errorMsgs}`); } }, dictionaryQuery(filter: MoonbeamEventFilter, ds: MoonbeamDatasource): DictionaryQueryEntry { const queryEntry: DictionaryQueryEntry = { entity: 'evmLogs', conditions: [], }; if (ds.processor?.options?.address) { queryEntry.conditions.push({field: 'address', value: ds.processor?.options?.address}); } else { return; } // Follows bloom filters https://docs.ethers.io/v5/concepts/events/#events--filters if (filter?.topics) { for (let i = 0; i < Math.min(filter.topics.length, 4); i++) { const topic = filter.topics[i]; if (!topic) { continue; } const field = `topics${i}`; queryEntry.conditions.push({field, value: eventToTopic(topic)}); } } return queryEntry; }, }; const CallProcessor: SecondLayerHandlerProcessor< SubqlHandlerKind.Call, MoonbeamCallFilter, MoonbeamCall, MoonbeamDatasource > = { baseFilter: [{module: 'ethereum', method: 'transact'}], baseHandlerKind: SubqlHandlerKind.Call, async transformer( original: SubstrateExtrinsic, ds: MoonbeamDatasource, api: ApiPromise, assets: Record<string, string> ): Promise<MoonbeamCall> { const [tx] = original.extrinsic.method.args as [EthTransaction]; const rawTx = tx.toJSON() as unknown as RawTransaction; let from, hash, to, success; try { const executionEvent = getExecutionEvent(original); from = executionEvent.from; to = executionEvent.to; hash = executionEvent.hash; success = executionEvent.status.isSucceed; } catch (e) { success = false; } const call: MoonbeamCall = { // Transaction properties from, to, // when contract creation nonce: rawTx.nonce, gasLimit: BigNumber.from(rawTx.gasLimit), gasPrice: BigNumber.from(rawTx.gasPrice), data: rawTx.input, value: BigNumber.from(rawTx.value), chainId: undefined, // TODO ...rawTx.signature, // Transaction response properties hash, blockNumber: original.block.block.header.number.toNumber(), blockHash: await getEtheruemBlockHash(api, original.block.block.header.number.toNumber()), timestamp: Math.round(original.block.timestamp.getTime() / 1000), success, }; try { const iface = buildInterface(ds, assets); call.args = iface?.decodeFunctionData(iface.getFunction(hexDataSlice(call.data, 0, 4)), call.data); } catch (e) { // TODO setup ts config with global defs (global as any).logger.warn(`Unable to parse call arguments, will be omitted from result`); } return call; }, filterProcessor(filter: MoonbeamCallFilter | undefined, input: SubstrateExtrinsic, ds: MoonbeamDatasource): boolean { try { const {from, to} = getExecutionEvent(input); if (filter?.from && !stringNormalizedEq(filter.from, from)) { return false; } const [tx] = input.extrinsic.method.args as [EthTransaction]; // if `to` is null then we handle contract creation if ( (ds.processor?.options?.address && !stringNormalizedEq(ds.processor.options.address, to)) || (ds.processor?.options?.address === null && !tx.action.isCreate) ) { return false; } if (filter?.function && tx.input.toHex().indexOf(functionToSighash(filter.function)) !== 0) { return false; } return true; } catch (e) { (global as any).logger.warn('Unable to properly filter input'); return false; } }, filterValidator(filter?: MoonbeamCallFilter): void { if (!filter) return; const filterCls = plainToClass(MoonbeamCallFilterImpl, filter); const errors = validateSync(filterCls, {whitelist: true, forbidNonWhitelisted: true}); if (errors?.length) { const errorMsgs = errors.map((e) => e.toString()).join('\n'); throw new Error(`Invalid Moonbeam call filter.\n${errorMsgs}`); } }, dictionaryQuery(filter: MoonbeamCallFilter, ds: MoonbeamDatasource): DictionaryQueryEntry { const queryEntry: DictionaryQueryEntry = { entity: 'evmTransactions', conditions: [], }; if (ds.processor?.options?.address) { queryEntry.conditions.push({field: 'to', value: ds.processor?.options?.address}); } if (filter?.from) { queryEntry.conditions.push({field: 'from', value: filter?.from}); } if (filter?.function) { queryEntry.conditions.push({field: 'func', value: functionToSighash(filter.function)}); } return queryEntry; }, }; export const MoonbeamDatasourcePlugin: SubqlDatasourceProcessor< 'substrate/Moonbeam', SubqlNetworkFilter, MoonbeamDatasource > = { kind: 'substrate/Moonbeam', validate(ds: MoonbeamDatasource, assets: Record<string, string>): void { if (ds.processor.options) { const opts = plainToClass(MoonbeamProcessorOptions, ds.processor.options); const errors = validateSync(opts, {whitelist: true, forbidNonWhitelisted: true}); if (errors?.length) { const errorMsgs = errors.map((e) => e.toString()).join('\n'); throw new Error(`Invalid Moonbeam call filter.\n${errorMsgs}`); } } buildInterface(ds, assets); // Will throw if unable to construct return; }, dsFilterProcessor(ds: MoonbeamDatasource): boolean { return ds.kind === this.kind; }, handlerProcessors: { 'substrate/MoonbeamEvent': EventProcessor, 'substrate/MoonbeamCall': CallProcessor, }, }; export default MoonbeamDatasourcePlugin;
the_stack
import * as React from "react"; import * as data from "./data"; import * as sui from "./sui"; import * as simulator from "./simulator"; import * as screenshot from "./screenshot"; import * as qr from "./qr"; import { fireClickOnEnter } from "./util"; type ISettingsProps = pxt.editor.ISettingsProps; export enum ShareMode { Code, Url, Editor, Simulator } export interface ShareEditorProps extends ISettingsProps { loading?: boolean; } export enum ShareRecordingState { None, ScreenshotSnap, GifLoading, GifRecording, GifRendering } // This Component overrides shouldComponentUpdate, be sure to update that if the state is updated export interface ShareEditorState { mode?: ShareMode; pubId?: string; visible?: boolean; sharingError?: Error; loading?: boolean; projectName?: string; projectNameChanged?: boolean; thumbnails?: boolean; screenshotUri?: string; recordingState?: ShareRecordingState; recordError?: string; qrCodeUri?: string; qrCodeExpanded?: boolean; title?: string; } export class ShareEditor extends data.Component<ShareEditorProps, ShareEditorState> { private loanedSimulator: HTMLElement; private _gifEncoder: screenshot.GifEncoder; constructor(props: ShareEditorProps) { super(props); this.state = { pubId: undefined, visible: false, screenshotUri: undefined, recordingState: ShareRecordingState.None, recordError: undefined, title: undefined } this.hide = this.hide.bind(this); this.setAdvancedMode = this.setAdvancedMode.bind(this); this.handleProjectNameChange = this.handleProjectNameChange.bind(this); this.restartSimulator = this.restartSimulator.bind(this); this.handleRecordClick = this.handleRecordClick.bind(this); this.handleScreenshotClick = this.handleScreenshotClick.bind(this); this.handleScreenshotMessage = this.handleScreenshotMessage.bind(this); this.handleCreateGitHubRepository = this.handleCreateGitHubRepository.bind(this); this.handleQrCodeClick = this.handleQrCodeClick.bind(this); } hide() { if (this.state.qrCodeExpanded) { pxt.tickEvent('share.qrtoggle'); const { qrCodeExpanded } = this.state; this.setState({ qrCodeExpanded: !qrCodeExpanded }); return; } if (this._gifEncoder) { this._gifEncoder.cancel(); this._gifEncoder = undefined; } if (this.loanedSimulator) { simulator.driver.unloanSimulator(); this.loanedSimulator = undefined; this.props.parent.popScreenshotHandler(); simulator.driver.stopRecording(); } this.setState({ visible: false, screenshotUri: undefined, projectName: undefined, projectNameChanged: false, recordingState: ShareRecordingState.None, recordError: undefined, qrCodeUri: undefined, title: undefined }); } show(title?: string) { const { header } = this.props.parent.state; if (!header) return; // TODO investigate why edge does not render well // upon hiding dialog, the screen does not redraw properly const thumbnails = pxt.appTarget.cloud && pxt.appTarget.cloud.thumbnails && (pxt.appTarget.appTheme.simScreenshot || pxt.appTarget.appTheme.simGif); if (thumbnails) { this.loanedSimulator = simulator.driver.loanSimulator(); this.props.parent.pushScreenshotHandler(this.handleScreenshotMessage); } this.setState({ thumbnails, visible: true, mode: ShareMode.Code, pubId: undefined, sharingError: undefined, screenshotUri: undefined, qrCodeUri: undefined, qrCodeExpanded: false, title, projectName: header.name }, thumbnails ? (() => this.props.parent.startSimulator()) : undefined); } handleScreenshotMessage(msg: pxt.editor.ScreenshotData) { const { visible } = this.state; if (!msg || !visible) return; if (msg.event === "start") { switch (this.state.recordingState) { case ShareRecordingState.None: this.gifRecord(); break; default: // ignore break; } return; } else if (msg.event == "stop") { switch (this.state.recordingState) { case ShareRecordingState.GifRecording: this.gifRender(); break; default: // ignore break; } return; } if (this.state.recordingState == ShareRecordingState.GifRecording) { if (this._gifEncoder.addFrame(msg.data, msg.delay)) this.gifRender(); } else if (this.state.recordingState == ShareRecordingState.ScreenshotSnap || this.state.recordingState === ShareRecordingState.None) { // received a screenshot this.setState({ screenshotUri: pxt.BrowserUtils.imageDataToPNG(msg.data), recordingState: ShareRecordingState.None, recordError: undefined }) } else { // ignore // make sure simulator is stopped simulator.driver.stopRecording(); } } UNSAFE_componentWillReceiveProps(newProps: ShareEditorProps) { const newState: ShareEditorState = {} if (!this.state.projectNameChanged && newProps.parent.state.projectName != this.state.projectName) { newState.projectName = newProps.parent.state.projectName; } if (newProps.loading != this.state.loading) { newState.loading = newProps.loading; } if (Object.keys(newState).length > 0) { this.setState(newState); } } componentDidMount() { document.addEventListener("keydown", this.handleKeyDown); } componentWillUnmount() { document.removeEventListener("keydown", this.handleKeyDown); } shouldComponentUpdate(nextProps: ShareEditorProps, nextState: ShareEditorState, nextContext: any): boolean { return this.state.visible != nextState.visible || this.state.mode != nextState.mode || this.state.pubId != nextState.pubId || this.state.sharingError !== nextState.sharingError || this.state.projectName != nextState.projectName || this.state.projectNameChanged != nextState.projectNameChanged || this.state.loading != nextState.loading || this.state.recordingState != nextState.recordingState || this.state.screenshotUri != nextState.screenshotUri || this.state.qrCodeUri != nextState.qrCodeUri || this.state.qrCodeExpanded != nextState.qrCodeExpanded || this.state.title != nextState.title ; } private setAdvancedMode(mode: ShareMode) { this.setState({ mode: mode }); } handleProjectNameChange(name: string) { this.setState({ projectName: name, projectNameChanged: true }); } restartSimulator() { pxt.tickEvent('share.restart', undefined, { interactiveConsent: true }); this.props.parent.restartSimulator(); } handleQrCodeClick(e: React.MouseEvent<HTMLImageElement>) { pxt.tickEvent('share.qrtoggle'); e.stopPropagation(); const { qrCodeExpanded } = this.state; this.setState({ qrCodeExpanded: !qrCodeExpanded }); } handleScreenshotClick() { pxt.tickEvent("share.takescreenshot", { view: 'computer', collapsedTo: '' + !this.props.parent.state.collapseEditorTools }, { interactiveConsent: true }); if (this.state.recordingState != ShareRecordingState.None) return; this.setState({ recordingState: ShareRecordingState.ScreenshotSnap, recordError: undefined }, () => this.screenshotAsync()); } screenshotAsync = () => { return this.props.parent.requestScreenshotAsync() .then(img => { const st: ShareEditorState = { recordingState: ShareRecordingState.None, recordError: undefined }; if (img) st.screenshotUri = img; else st.recordError = lf("Oops, screenshot failed. Please try again.") this.setState(st); }); } handleRecordClick() { switch (this.state.recordingState) { case ShareRecordingState.None: this.gifRecord(); break; case ShareRecordingState.GifRecording: this.gifRender(); break; default: // ignore break; } } private loadEncoderAsync(): Promise<screenshot.GifEncoder> { if (this._gifEncoder) return Promise.resolve(this._gifEncoder); return screenshot.loadGifEncoderAsync() .then(encoder => this._gifEncoder = encoder); } gifRecord() { pxt.tickEvent("share.gifrecord", { view: 'computer', collapsedTo: '' + !this.props.parent.state.collapseEditorTools }, { interactiveConsent: true }); if (this.state.recordingState != ShareRecordingState.None) return; this.setState({ recordingState: ShareRecordingState.GifLoading, screenshotUri: undefined }, () => this.loadEncoderAsync() .then(encoder => { if (!encoder) { this.setState({ recordingState: ShareRecordingState.None, recordError: lf("Oops, gif encoder could not load. Please try again.") }); } else { encoder.start(); const gifwidth = pxt.appTarget.appTheme.simGifWidth || 160; this.setState({ recordingState: ShareRecordingState.GifRecording }, () => simulator.driver.startRecording(gifwidth)); } }) .catch(e => { pxt.reportException(e); this.setState({ recordingState: ShareRecordingState.None, recordError: lf("Oops, gif recording failed. Please try again.") }); if (this._gifEncoder) { this._gifEncoder.cancel(); } }) ); } gifRender() { pxt.debug(`render gif`) simulator.driver.stopRecording(); if (!this._gifEncoder) return; this.setState({ recordingState: ShareRecordingState.GifRendering, recordError: undefined }, () => { this.props.parent.stopSimulator(); this._gifEncoder.renderAsync() .then(uri => { pxt.log(`gif: ${uri ? uri.length : 0} chars`) const maxSize = pxt.appTarget.appTheme.simScreenshotMaxUriLength; let recordError: string = undefined; if (uri) { if (maxSize && uri.length > maxSize) { pxt.tickEvent(`gif.toobig`, { size: uri.length }); uri = undefined; recordError = lf("Gif is too big, try recording a shorter time."); } else pxt.tickEvent(`gif.ok`, { size: uri.length }); } this.setState({ recordingState: ShareRecordingState.None, screenshotUri: uri, recordError }) // give a breather to the browser to render the gif pxt.Util.delay(1000).then(() => this.props.parent.startSimulator()); }) }); } handleCreateGitHubRepository() { pxt.tickEvent("share.github.create", undefined, { interactiveConsent: true }); this.hide(); this.props.parent.createGitHubRepositoryAsync(); } renderCore() { const { visible, projectName: newProjectName, loading, recordingState, screenshotUri, thumbnails, recordError, pubId, qrCodeUri, qrCodeExpanded, title, sharingError } = this.state; const targetTheme = pxt.appTarget.appTheme; const header = this.props.parent.state.header; const hideEmbed = !!targetTheme.hideShareEmbed || qrCodeExpanded; const socialOptions = targetTheme.socialOptions; const showSocialIcons = !!socialOptions && !pxt.BrowserUtils.isUwpEdge() && !qrCodeExpanded; const ready = !!pubId; let mode = this.state.mode; let url = ''; let embed = ''; let shareUrl = pxt.appTarget.appTheme.shareUrl || "https://makecode.com/"; if (!/\/$/.test(shareUrl)) shareUrl += '/'; let rootUrl = pxt.appTarget.appTheme.embedUrl if (!/\/$/.test(rootUrl)) rootUrl += '/'; const verPrefix = pxt.webConfig.verprefix || ''; if (header) { if (ready) { url = `${shareUrl}${pubId}`; let editUrl = `${rootUrl}${verPrefix}#pub:${pubId}`; switch (mode) { case ShareMode.Code: embed = pxt.docs.codeEmbedUrl(`${rootUrl}${verPrefix}`, pubId); break; case ShareMode.Editor: embed = pxt.docs.embedUrl(`${rootUrl}${verPrefix}`, "pub", pubId); break; case ShareMode.Simulator: let padding = '81.97%'; // TODO: parts aspect ratio let simulatorRunString = `${verPrefix}---run`; if (pxt.webConfig.runUrl) { if (pxt.webConfig.isStatic) { simulatorRunString = pxt.webConfig.runUrl; } else { // Always use live, not /beta etc. simulatorRunString = pxt.webConfig.runUrl.replace(pxt.webConfig.relprefix, "/---") } } if (pxt.appTarget.simulator) padding = (100 / pxt.appTarget.simulator.aspectRatio).toPrecision(4) + '%'; const runUrl = rootUrl + simulatorRunString.replace(/^\//, ''); embed = pxt.docs.runUrl(runUrl, padding, pubId); break; case ShareMode.Url: embed = editUrl; break; } } } const publish = () => { pxt.tickEvent("menu.embed.publish", undefined, { interactiveConsent: true }); this.setState({ sharingError: undefined, loading: true }); let p = Promise.resolve(); if (newProjectName && this.props.parent.state.projectName != newProjectName) { // save project name if we've made a change change p = this.props.parent.updateHeaderNameAsync(newProjectName); } // if screenshots are enabled, always take one if (targetTheme.simScreenshot && !screenshotUri) { p = p.then(this.screenshotAsync); } p.then(() => this.props.parent.anonymousPublishAsync(this.state.screenshotUri)) .then((id) => { this.setState({ pubId: id, qrCodeUri: undefined, qrCodeExpanded: false }); if (pxt.appTarget.appTheme.qrCode) qr.renderAsync(`${shareUrl}${id}`) .then(qruri => { if (this.state.pubId == id) // race this.setState({ qrCodeUri: qruri }); }); this.forceUpdate(); }) .catch((e: Error) => { pxt.tickEvent("menu.embed.error", { code: (e as any).statusCode }) this.setState({ pubId: undefined, sharingError: e, qrCodeUri: undefined, qrCodeExpanded: false }); }); this.forceUpdate(); } const formats = [ { mode: ShareMode.Code, label: lf("Code") }, { mode: ShareMode.Editor, label: lf("Editor") }, { mode: ShareMode.Simulator, label: lf("Simulator") }, ]; const action = !ready ? lf("Publish project") : undefined; const actionLoading = loading && !this.state.sharingError; let actions: sui.ModalButton[] = []; if (action) { actions.push({ label: action, onclick: publish, icon: 'share alternate', loading: actionLoading, className: 'primary', disabled: recordingState != ShareRecordingState.None }) } const light = !!pxt.options.light; const disclaimer = lf("You need to publish your project to share it or embed it in other web pages.") + " " + lf("You acknowledge having consent to publish this project."); const screenshotDisabled = actionLoading || recordingState != ShareRecordingState.None; const screenshotText = this.loanedSimulator && targetTheme.simScreenshotKey ? lf("Take Screenshot (shortcut: {0})", targetTheme.simScreenshotKey) : lf("Take Screenshot"); const screenshot = targetTheme.simScreenshot; const gif = !light && !!targetTheme.simGif; const isGifRecording = recordingState == ShareRecordingState.GifRecording; const isGifRendering = recordingState == ShareRecordingState.GifRendering; const gifIcon = isGifRecording ? "stop" : "circle"; const gifTitle = isGifRecording ? (targetTheme.simGifKey ? lf("Stop recording (shortcut: {0})", targetTheme.simGifKey) : lf("Stop recording")) : isGifRendering ? lf("Cancel rendering") : (targetTheme.simGifKey ? lf("Start recording (shortcut: {0})", targetTheme.simGifKey) : lf("Start recording")); const gifRecordingClass = isGifRecording ? "glow" : ""; const gifDisabled = actionLoading; const gifLoading = recordingState == ShareRecordingState.GifLoading || isGifRendering; const screenshotMessage = recordError ? recordError : isGifRecording ? lf("Recording in progress...") : isGifRendering ? lf("Rendering gif...") : undefined; const screenshotMessageClass = recordError ? "warning" : ""; const tooBigErrorSuggestGitHub = sharingError && (sharingError as any).statusCode === 413 && pxt.appTarget?.cloud?.cloudProviders?.github; const unknownError = sharingError && !tooBigErrorSuggestGitHub; const qrCodeFull = !!qrCodeUri && qrCodeExpanded; const classes = this.props.parent.createModalClasses("sharedialog"); return ( <sui.Modal isOpen={visible} className={classes} size={thumbnails ? "" : "small"} onClose={this.hide} dimmer={true} header={title || lf("Share Project")} closeIcon={true} buttons={actions} closeOnDimmerClick closeOnDocumentClick closeOnEscape> <div className={`ui form`}> {action && !this.loanedSimulator ? <div className="ui field"> <div> <sui.Input ref="filenameinput" placeholder={lf("Name")} autoFocus={!pxt.BrowserUtils.isMobile()} id={"projectNameInput"} ariaLabel={lf("Type a name for your project")} autoComplete={false} value={newProjectName || ''} onChange={this.handleProjectNameChange} /> </div> </div> : undefined} {action && this.loanedSimulator ? <div className="ui fields"> <div id="shareLoanedSimulator" className={`simulator ui six wide field landscape only ${gifRecordingClass}`}></div> <div className="ui ten wide field"> <sui.Input ref="filenameinput" placeholder={lf("Name")} autoFocus={!pxt.BrowserUtils.isMobile()} id={"projectNameInput"} ariaLabel={lf("Type a name for your project")} autoComplete={false} value={newProjectName || ''} onChange={this.handleProjectNameChange} /> <label></label> <div className="ui buttons landscape only"> <sui.Button icon="refresh" title={lf("Restart")} ariaLabel={lf("Restart")} onClick={this.restartSimulator} disabled={screenshotDisabled} /> {screenshot ? <sui.Button icon="camera" title={screenshotText} ariaLabel={screenshotText} onClick={this.handleScreenshotClick} disabled={screenshotDisabled} /> : undefined} {gif ? <sui.Button icon={gifIcon} title={gifTitle} loading={gifLoading} onClick={this.handleRecordClick} disabled={gifDisabled} /> : undefined} </div> {screenshotUri || screenshotMessage ? <div className={`ui ${screenshotMessageClass} segment landscape only`}>{ (screenshotUri && !screenshotMessage) ? <img className="ui small centered image" src={screenshotUri} alt={lf("Recorded gif")} /> : <p className="no-select">{screenshotMessage}</p>}</div> : undefined} <p className="ui tiny message info">{disclaimer}</p> </div> </div> : undefined} {action && !this.loanedSimulator ? <p className="ui tiny message info">{disclaimer}</p> : undefined} {tooBigErrorSuggestGitHub && <p className="ui orange inverted segment">{lf("Oops! Your project is too big. You can create a GitHub repository to share it.")} <sui.Button className="inverted basic" text={lf("Create")} icon="github" onClick={this.handleCreateGitHubRepository} /> </p>} {unknownError && <p className="ui red inverted segment">{lf("Oops! There was an error. Please ensure you are connected to the Internet and try again.")}</p>} {url && ready ? <div> {!qrCodeFull && <p>{lf("Your project is ready! Use the address below to share your projects.")}</p>} {!qrCodeFull && <sui.Input id="projectUri" class="mini" readOnly={true} lines={1} value={url} copy={true} autoFocus={!pxt.BrowserUtils.isMobile()} selectOnClick={true} aria-describedby="projectUriLabel" autoComplete={false} />} {!qrCodeFull && <label htmlFor="projectUri" id="projectUriLabel" className="accessible-hidden">{lf("This is the read-only internet address of your project.")}</label>} {!!qrCodeUri && <img className={`ui ${qrCodeFull ? "huge" : "small"} image ${qrCodeExpanded ? "centered" : "floated right"} button pixelart`} alt={lf("QR Code of the saved program")} src={qrCodeUri} onClick={this.handleQrCodeClick} title={lf("Click to expand or collapse.")} tabIndex={0} aria-label={lf("QR Code of the saved program")} onKeyDown={fireClickOnEnter}/>} {showSocialIcons ? <div className="social-icons"> <SocialButton url={url} ariaLabel="Facebook" type='facebook' heading={lf("Share on Facebook")} /> <SocialButton url={url} ariaLabel="Twitter" type='twitter' heading={lf("Share on Twitter")} /> {socialOptions.discourse ? <SocialButton url={url} icon={"comments"} ariaLabel={lf("Post to Forum")} type='discourse' heading={lf("Share on Forum")} /> : undefined} </div> : undefined} </div> : undefined} {(ready && !hideEmbed) && <div> <div className="ui divider"></div> <sui.ExpandableMenu title={lf("Embed")}> <sui.Menu pointing secondary> {formats.map(f => <EmbedMenuItem key={`tab${f.label}`} onClick={this.setAdvancedMode} currentMode={mode} {...f} />)} </sui.Menu> <sui.Field> <sui.Input id="embedCode" class="mini" readOnly={true} lines={4} value={embed} copy={ready} disabled={!ready} selectOnClick={true} autoComplete={false} /> <label htmlFor="embedCode" id="embedCodeLabel" className="accessible-hidden">{lf("This is the read-only code for the selected tab.")}</label> </sui.Field> </sui.ExpandableMenu> </div>} </div> </sui.Modal > ) } componentDidUpdate() { const container = document.getElementById("shareLoanedSimulator"); if (container && this.loanedSimulator && !this.loanedSimulator.parentNode) container.appendChild(this.loanedSimulator); } protected handleKeyDown = (e: KeyboardEvent) => { const { visible } = this.state; const targetTheme = pxt.appTarget.appTheme; const pressed = e.key.toLocaleLowerCase(); // Don't fire events if component is hidden or if they are typing in a name if (!visible || (document.activeElement && document.activeElement.tagName === "INPUT")) return; if (targetTheme.simScreenshotKey && pressed === targetTheme.simScreenshotKey.toLocaleLowerCase()) { this.handleScreenshotClick(); } else if (targetTheme.simGifKey && pressed === targetTheme.simGifKey.toLocaleLowerCase()) { this.handleRecordClick(); } } } interface SocialButtonProps { url?: string; type?: "facebook" | "twitter" | "discourse"; icon?: string; // override type label?: string; ariaLabel?: string; heading?: string; } class SocialButton extends data.Component<SocialButtonProps, {}> { constructor(props: SocialButtonProps) { super(props); this.state = { } this.handleClick = this.handleClick.bind(this); } handleClick(e: React.MouseEvent<any>) { const { type, url: shareUrl, heading } = this.props; const socialOptions = pxt.appTarget.appTheme.socialOptions; pxt.tickEvent(`share.${type}`, undefined, { interactiveConsent: true }) let url = ''; switch (type) { case "facebook": { url = `https://www.facebook.com/sharer/sharer.php?u=${encodeURIComponent(shareUrl)}`; break; } case "twitter": { let twitterText = lf("Check out what I made!"); if (socialOptions.twitterHandle && socialOptions.orgTwitterHandle) { twitterText = lf("Check out what I made with @{0} and @{1}!", socialOptions.twitterHandle, socialOptions.orgTwitterHandle); } else if (socialOptions.twitterHandle) { twitterText = lf("Check out what I made with @{0}!", socialOptions.twitterHandle); } else if (socialOptions.orgTwitterHandle) { twitterText = lf("Check out what I made with @{0}!", socialOptions.orgTwitterHandle); } url = `https://twitter.com/intent/tweet?url=${encodeURIComponent(shareUrl)}` + `&text=${encodeURIComponent(twitterText)}` + (socialOptions.hashtags ? `&hashtags=${encodeURIComponent(socialOptions.hashtags)}` : '') + (socialOptions.related ? `&related=${encodeURIComponent(socialOptions.related)}` : ''); break; } case "discourse": { // https://meta.discourse.org/t/compose-a-new-pre-filled-topic-via-url/28074 url = `${socialOptions.discourse || "https://forum.makecode.com/"}new-topic?title=${encodeURIComponent(shareUrl)}`; if (socialOptions.discourseCategory) url += `&category=${encodeURIComponent(socialOptions.discourseCategory)}`; break; } } pxt.BrowserUtils.popupWindow(url, heading, 600, 600); e.preventDefault(); } renderCore() { const { type, label, ariaLabel, icon } = this.props; return <a role="button" className={`ui button large ${label ? "labeled" : ""} icon ${type}`} tabIndex={0} aria-label={ariaLabel} onClick={this.handleClick} onKeyDown={fireClickOnEnter}><sui.Icon icon={icon || type} />{label}</a> } } interface EmbedMenuItemProps { label: string; mode: ShareMode; currentMode: ShareMode; onClick: (mode: ShareMode) => void; } class EmbedMenuItem extends sui.StatelessUIElement<EmbedMenuItemProps> { constructor(props: EmbedMenuItemProps) { super(props); this.handleClick = this.handleClick.bind(this); } handleClick() { this.props.onClick(this.props.mode); } renderCore() { const { label, mode, currentMode } = this.props; return <sui.MenuItem id={`tab${mode}`} active={currentMode == mode} tabIndex={0} name={label} onClick={this.handleClick} /> } }
the_stack
import { BoxProps, Slider, Typography, useTheme } from "@material-ui/core"; import classNames from "classnames"; import Map from "components/Map"; import MapSearch from "components/MapSearch"; import TextField from "components/TextField"; import maplibregl, { GeoJSONSource, LngLat, MapMouseEvent, MapTouchEvent, } from "maplibre-gl"; import React, { useRef, useState } from "react"; import makeStyles from "utils/makeStyles"; import { userLocationMaxRadius, userLocationMinRadius } from "../constants"; import { DISPLAY_LOCATION, DISPLAY_LOCATION_NOT_EMPTY, getRadiusText, INVALID_COORDINATE, LOCATION_ACCURACY, LOCATION_PUBLICLY_VISIBLE, LOCATION_WARN, MAP_IS_BLANK, } from "./constants"; const useStyles = makeStyles({ root: { margin: "auto", maxWidth: 700, }, map: { height: 400, position: "relative", }, grow: { height: "100%", width: "100%", }, displayLocation: { width: "100%", }, }); export interface ApproximateLocation { address: string; lat: number; lng: number; radius: number; } export interface EditLocationMapProps extends BoxProps { initialLocation?: ApproximateLocation; // this function is called on mouse release updateLocation: (value: ApproximateLocation | null) => void; grow?: boolean; // whether to hide the radius slider showRadiusSlider?: boolean; // whether we are selecting an exact point (for pages, etc) or approx circle, doesn't maeks ense with radius slider exact?: boolean; } export default function EditLocationMap({ initialLocation, updateLocation, className, grow, showRadiusSlider, exact, ...otherProps }: EditLocationMapProps) { const classes = useStyles(); const theme = useTheme(); const [error, setError] = useState(""); const map = useRef<maplibregl.Map | null>(null); // map is imperative so these don't need to cause re-render const location = useRef<ApproximateLocation>({ address: initialLocation?.address ?? "", radius: initialLocation?.radius ?? (exact ? 0 : 250), lat: initialLocation?.lat ?? 0, lng: initialLocation?.lng ?? 0, }); // have not selected a location in any way yet const isBlank = useRef<boolean>( !(initialLocation?.lng || initialLocation?.lat) ); const locationDisplayRef = useRef<HTMLInputElement>(null); const [shrinkLabel, setShrinkLabel] = useState( location.current.address !== "" ); const onCircleMouseDown = (e: MapMouseEvent | MapTouchEvent) => { if (!map.current) return; // Prevent the default map drag behavior. e.preventDefault(); map.current.getCanvas().style.cursor = "grab"; if (e.type === "touchstart") { const handleTouchMove = (e: MapTouchEvent) => onCircleMove(e); map.current.on("touchmove", handleTouchMove); map.current.once("touchend", (e) => handleCoordinateMoved(e, handleTouchMove) ); } else { const handleMove = (e: MapMouseEvent) => onCircleMove(e); map.current.on("mousemove", handleMove); map.current.once("mouseup", (e) => handleCoordinateMoved(e, handleMove)); } }; const onCircleMove = (e: MapMouseEvent | MapTouchEvent) => { const wrapped = e.lngLat.wrap(); commit( { lat: wrapped.lat, lng: wrapped.lng, }, false ); redrawMap(); }; const handleCoordinateMoved = ( e: MapMouseEvent | MapTouchEvent, moveHandler: (x: any) => void = () => null ) => { if (!map.current) return; map.current.off("mousemove", moveHandler); map.current.off("touchmove", moveHandler); map.current.getCanvas().style.cursor = "move"; const wrapped = e.lngLat.wrap(); commit({ lat: wrapped.lat, lng: wrapped.lng, }); if (!isBlank.current) { map.current.setLayoutProperty("circle", "visibility", "visible"); } redrawMap(); }; const redrawMap = () => { if (!map.current) return; if (!exact) { (map.current.getSource("circle") as GeoJSONSource).setData( circleGeoJson(extractLngLat(location.current), location.current.radius) ); } else { (map.current.getSource("circle") as GeoJSONSource).setData( pointGeoJson(extractLngLat(location.current)) ); } }; const commit = ( updates: Partial<ApproximateLocation>, shouldUpdate = true ) => { const addressNotEmpty = !!updates.address; if (updates.address !== undefined) { setShrinkLabel(addressNotEmpty); location.current.address = updates.address; } if (updates.radius !== undefined && !exact) { location.current.radius = updates.radius; } if (updates.lat !== undefined && updates.lng !== undefined) { location.current.lat = updates.lat; location.current.lng = updates.lng; isBlank.current = false; } if (shouldUpdate) { if (isBlank.current) { // haven't selected a location yet setError(addressNotEmpty ? MAP_IS_BLANK : ""); updateLocation(null); } else if (location.current.lat === 0 && location.current.lng === 0) { // somehow have lat/lng == 0 setError(INVALID_COORDINATE); updateLocation(null); } else if (location.current.address === "") { // missing display address setError(DISPLAY_LOCATION_NOT_EMPTY); setShrinkLabel(false); updateLocation(null); } else { setError(""); setShrinkLabel(true); updateLocation({ ...location.current }); } } }; const initializeMap = (mapRef: maplibregl.Map) => { map.current = mapRef; map.current.once("load", () => { if (!map.current) return; if (!exact) { map.current.addSource("circle", { data: circleGeoJson( extractLngLat(location.current), location.current.radius ), type: "geojson", }); map.current.addLayer({ id: "circle", layout: { visibility: isBlank.current ? "none" : "visible", }, paint: { "fill-color": theme.palette.primary.main, "fill-opacity": 0.5, }, source: "circle", type: "fill", }); } else { map.current.addSource("circle", { data: pointGeoJson(extractLngLat(location.current)), type: "geojson", }); map.current.addLayer({ id: "circle", layout: { visibility: isBlank.current ? "none" : "visible", }, paint: { "circle-color": theme.palette.primary.main, "circle-radius": 8, "circle-stroke-color": "#fff", "circle-stroke-width": 1, }, source: "circle", type: "circle", }); } // if no user is specified, ask to get the location from browser if (!initialLocation && navigator.geolocation) { navigator.geolocation.getCurrentPosition((position) => { flyToSearch( new LngLat(position.coords.longitude, position.coords.latitude) ); }); } }); const onDblClick = (e: MapMouseEvent & maplibregl.EventData) => { e.preventDefault(); handleCoordinateMoved(e); }; map.current.on("dblclick", onDblClick); const onCircleTouch = ( e: MapTouchEvent & { features?: maplibregl.MapboxGeoJSONFeature[] | undefined; } & maplibregl.EventData ) => { if (e.points.length !== 1) return; onCircleMouseDown(e); }; map.current.on("mousedown", "circle", onCircleMouseDown); map.current.on("touchstart", "circle", onCircleTouch); const canvas = map.current.getCanvas(); const setCursorMove = () => (canvas.style.cursor = "move"); const unsetCursor = () => (canvas.style.cursor = ""); map.current.on("mouseenter", "circle", setCursorMove); map.current.on("mouseleave", "circle", unsetCursor); }; const flyToSearch = (coords: LngLat) => { if (!map.current) return; map.current.flyTo({ center: coords, zoom: 12.5 }); if (!exact) { const randomizedLocation = displaceLngLat( coords, Math.random() * location.current.radius, Math.random() * 2 * Math.PI ); handleCoordinateMoved({ lngLat: randomizedLocation, } as MapMouseEvent); } else { handleCoordinateMoved({ lngLat: coords, } as MapMouseEvent); } }; return ( <> <div className={classNames( classes.root, { [classes.grow]: grow }, className )} > <div className={classNames(classes.map)}> <Map // (10, 35, 0.5) is just a pretty view initialZoom={isBlank.current ? 0.5 : 12.5} initialCenter={ isBlank.current ? new LngLat(10, 35) : extractLngLat(location.current) } postMapInitialize={initializeMap} grow {...otherProps} /> <MapSearch setError={setError} setResult={(coordinate, _, simplified) => { commit({ address: simplified }, false); if (locationDisplayRef.current) { locationDisplayRef.current.value = simplified; setShrinkLabel(true); } flyToSearch(coordinate); }} /> </div> {showRadiusSlider && ( <RadiusSlider commit={commit} initialRadius={location.current.radius} redrawMap={redrawMap} /> )} <TextField defaultValue={location.current.address} onChange={(e) => { commit({ address: e.target.value }); }} error={error !== ""} id="display-address" inputRef={locationDisplayRef} InputLabelProps={{ shrink: shrinkLabel }} fullWidth variant="standard" label={DISPLAY_LOCATION} helperText={error !== "" ? error : LOCATION_PUBLICLY_VISIBLE} onFocus={() => setShrinkLabel(true)} onBlur={() => !location.current.address && setShrinkLabel(false)} /> </div> </> ); } interface RadiusSliderProps { commit(updates: Partial<ApproximateLocation>, shouldUpdate?: boolean): void; initialRadius: number; redrawMap(): void; } function RadiusSlider({ commit, initialRadius, redrawMap }: RadiusSliderProps) { const [radius, setRadius] = useState(initialRadius); return ( <> <Typography variant="body2" gutterBottom> {LOCATION_WARN} </Typography> <Typography id="location-radius" gutterBottom> {LOCATION_ACCURACY} </Typography> <Slider aria-labelledby="location-radius" aria-valuetext={getRadiusText(radius)} value={radius} step={5} min={userLocationMinRadius} max={userLocationMaxRadius} onChange={(_, value) => { setRadius(value as number); commit({ radius: value as number }, false); redrawMap(); }} onChangeCommitted={(_, value) => { commit({ radius: value as number }); redrawMap(); }} /> </> ); } function extractLngLat(loc: ApproximateLocation): LngLat { return new LngLat(loc.lng, loc.lat); } function pointGeoJson( coords: LngLat ): GeoJSON.FeatureCollection<GeoJSON.Geometry> { return { features: [ { geometry: { coordinates: coords.toArray(), type: "Point", }, properties: {}, type: "Feature", }, ], type: "FeatureCollection", }; } function circleGeoJson( coords: LngLat, radius: number ): GeoJSON.FeatureCollection<GeoJSON.Geometry> { return { features: [ { geometry: { //create a circle of 60 points coordinates: [ [ ...Array(60) .fill(0) .map((_, index) => { return displaceLngLat( coords, radius, (index * 2 * Math.PI) / 60 ).toArray(); }), displaceLngLat(coords, radius, 0).toArray(), ], ], type: "Polygon", }, properties: {}, type: "Feature", }, ], type: "FeatureCollection", }; } function displaceLngLat(coords: LngLat, distance: number, angle: number) { // see https://gis.stackexchange.com/a/2964 // 111111 m ~ 1 degree let lat = coords.lat + (1 / 111111) * distance * Math.cos(angle); let lng = coords.lng + (1 / (111111 * Math.cos((coords.lat / 360) * 2 * Math.PI))) * distance * Math.sin(angle); return new LngLat(lng, lat); }
the_stack
import { PipelineResponse, PipelineRequest, SendRequest, PipelinePolicy, RestError, } from "@azure/core-rest-pipeline"; import { OperationRequest, OperationResponseMap, FullOperationResponse, OperationSpec, SerializerOptions, XmlOptions, XML_CHARKEY, RequiredSerializerOptions, } from "./interfaces"; import { MapperTypeNames } from "./serializer"; import { getOperationRequestInfo } from "./operationHelpers"; const defaultJsonContentTypes = ["application/json", "text/json"]; const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; /** * The programmatic identifier of the deserializationPolicy. */ export const deserializationPolicyName = "deserializationPolicy"; /** * Options to configure API response deserialization. */ export interface DeserializationPolicyOptions { /** * Configures the expected content types for the deserialization of * JSON and XML response bodies. */ expectedContentTypes?: DeserializationContentTypes; /** * A function that is able to parse XML. Required for XML support. */ parseXML?: (str: string, opts?: XmlOptions) => Promise<any>; /** * Configures behavior of xml parser and builder. */ serializerOptions?: SerializerOptions; } /** * The content-types that will indicate that an operation response should be deserialized in a * particular way. */ export interface DeserializationContentTypes { /** * The content-types that indicate that an operation response should be deserialized as JSON. * Defaults to [ "application/json", "text/json" ]. */ json?: string[]; /** * The content-types that indicate that an operation response should be deserialized as XML. * Defaults to [ "application/xml", "application/atom+xml" ]. */ xml?: string[]; } /** * This policy handles parsing out responses according to OperationSpecs on the request. */ export function deserializationPolicy(options: DeserializationPolicyOptions = {}): PipelinePolicy { const jsonContentTypes = options.expectedContentTypes?.json ?? defaultJsonContentTypes; const xmlContentTypes = options.expectedContentTypes?.xml ?? defaultXmlContentTypes; const parseXML = options.parseXML; const serializerOptions = options.serializerOptions; const updatedOptions: RequiredSerializerOptions = { xml: { rootName: serializerOptions?.xml.rootName ?? "", includeRoot: serializerOptions?.xml.includeRoot ?? false, xmlCharKey: serializerOptions?.xml.xmlCharKey ?? XML_CHARKEY, }, }; return { name: deserializationPolicyName, async sendRequest(request: PipelineRequest, next: SendRequest): Promise<PipelineResponse> { const response = await next(request); return deserializeResponseBody( jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML ); }, }; } function getOperationResponseMap( parsedResponse: PipelineResponse ): undefined | OperationResponseMap { let result: OperationResponseMap | undefined; const request: OperationRequest = parsedResponse.request; const operationInfo = getOperationRequestInfo(request); const operationSpec = operationInfo?.operationSpec; if (operationSpec) { if (!operationInfo?.operationResponseGetter) { result = operationSpec.responses[parsedResponse.status]; } else { result = operationInfo?.operationResponseGetter(operationSpec, parsedResponse); } } return result; } function shouldDeserializeResponse(parsedResponse: PipelineResponse): boolean { const request: OperationRequest = parsedResponse.request; const operationInfo = getOperationRequestInfo(request); const shouldDeserialize = operationInfo?.shouldDeserialize; let result: boolean; if (shouldDeserialize === undefined) { result = true; } else if (typeof shouldDeserialize === "boolean") { result = shouldDeserialize; } else { result = shouldDeserialize(parsedResponse); } return result; } async function deserializeResponseBody( jsonContentTypes: string[], xmlContentTypes: string[], response: PipelineResponse, options: RequiredSerializerOptions, parseXML?: (str: string, opts?: XmlOptions) => Promise<any> ): Promise<PipelineResponse> { const parsedResponse = await parse( jsonContentTypes, xmlContentTypes, response, options, parseXML ); if (!shouldDeserializeResponse(parsedResponse)) { return parsedResponse; } const operationInfo = getOperationRequestInfo(parsedResponse.request); const operationSpec = operationInfo?.operationSpec; if (!operationSpec || !operationSpec.responses) { return parsedResponse; } const responseSpec = getOperationResponseMap(parsedResponse); const { error, shouldReturnResponse } = handleErrorResponse( parsedResponse, operationSpec, responseSpec ); if (error) { throw error; } else if (shouldReturnResponse) { return parsedResponse; } // An operation response spec does exist for current status code, so // use it to deserialize the response. if (responseSpec) { if (responseSpec.bodyMapper) { let valueToDeserialize: any = parsedResponse.parsedBody; if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperTypeNames.Sequence) { valueToDeserialize = typeof valueToDeserialize === "object" ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!] : []; } try { parsedResponse.parsedBody = operationSpec.serializer.deserialize( responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody" ); } catch (deserializeError) { const restError = new RestError( `Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { statusCode: parsedResponse.status, request: parsedResponse.request, response: parsedResponse, } ); throw restError; } } else if (operationSpec.httpMethod === "HEAD") { // head methods never have a body, but we return a boolean to indicate presence/absence of the resource parsedResponse.parsedBody = response.status >= 200 && response.status < 300; } if (responseSpec.headersMapper) { parsedResponse.parsedHeaders = operationSpec.serializer.deserialize( responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders" ); } } return parsedResponse; } function isOperationSpecEmpty(operationSpec: OperationSpec): boolean { const expectedStatusCodes = Object.keys(operationSpec.responses); return ( expectedStatusCodes.length === 0 || (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default") ); } function handleErrorResponse( parsedResponse: FullOperationResponse, operationSpec: OperationSpec, responseSpec: OperationResponseMap | undefined ): { error: RestError | null; shouldReturnResponse: boolean } { const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; const isExpectedStatusCode: boolean = isOperationSpecEmpty(operationSpec) ? isSuccessByStatus : !!responseSpec; if (isExpectedStatusCode) { if (responseSpec) { if (!responseSpec.isError) { return { error: null, shouldReturnResponse: false }; } } else { return { error: null, shouldReturnResponse: false }; } } const errorResponseSpec = responseSpec ?? operationSpec.responses.default; const initialErrorMessage = parsedResponse.request.streamResponseStatusCodes?.has( parsedResponse.status ) ? `Unexpected status code: ${parsedResponse.status}` : (parsedResponse.bodyAsText as string); const error = new RestError(initialErrorMessage, { statusCode: parsedResponse.status, request: parsedResponse.request, response: parsedResponse, }); // If the item failed but there's no error spec or default spec to deserialize the error, // we should fail so we just throw the parsed response if (!errorResponseSpec) { throw error; } const defaultBodyMapper = errorResponseSpec.bodyMapper; const defaultHeadersMapper = errorResponseSpec.headersMapper; try { // If error response has a body, try to deserialize it using default body mapper. // Then try to extract error code & message from it if (parsedResponse.parsedBody) { const parsedBody = parsedResponse.parsedBody; let deserializedError; if (defaultBodyMapper) { let valueToDeserialize: any = parsedBody; if (operationSpec.isXML && defaultBodyMapper.type.name === MapperTypeNames.Sequence) { valueToDeserialize = []; const elementName = defaultBodyMapper.xmlElementName; if (typeof parsedBody === "object" && elementName) { valueToDeserialize = parsedBody[elementName]; } } deserializedError = operationSpec.serializer.deserialize( defaultBodyMapper, valueToDeserialize, "error.response.parsedBody" ); } const internalError: any = parsedBody.error || deserializedError || parsedBody; error.code = internalError.code; if (internalError.message) { error.message = internalError.message; } if (defaultBodyMapper) { (error.response! as FullOperationResponse).parsedBody = deserializedError; } } // If error response has headers, try to deserialize it using default header mapper if (parsedResponse.headers && defaultHeadersMapper) { (error.response! as FullOperationResponse).parsedHeaders = operationSpec.serializer.deserialize( defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders" ); } } catch (defaultError) { error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; } return { error, shouldReturnResponse: false }; } async function parse( jsonContentTypes: string[], xmlContentTypes: string[], operationResponse: FullOperationResponse, opts: RequiredSerializerOptions, parseXML?: (str: string, opts?: XmlOptions) => Promise<any> ): Promise<FullOperationResponse> { if ( !operationResponse.request.streamResponseStatusCodes?.has(operationResponse.status) && operationResponse.bodyAsText ) { const text = operationResponse.bodyAsText; const contentType: string = operationResponse.headers.get("Content-Type") || ""; const contentComponents: string[] = !contentType ? [] : contentType.split(";").map((component) => component.toLowerCase()); try { if ( contentComponents.length === 0 || contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1) ) { operationResponse.parsedBody = JSON.parse(text); return operationResponse; } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { if (!parseXML) { throw new Error("Parsing XML not supported."); } const body = await parseXML(text, opts.xml); operationResponse.parsedBody = body; return operationResponse; } } catch (err) { const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; const errCode = err.code || RestError.PARSE_ERROR; const e = new RestError(msg, { code: errCode, statusCode: operationResponse.status, request: operationResponse.request, response: operationResponse, }); throw e; } } return operationResponse; }
the_stack
import * as tg from './core/types/typegram' import * as tt from './telegram-types' import { Deunionize, PropOr, UnionKeys } from './deunionize' import ApiClient from './core/network/client' import Telegram from './telegram' type Tail<T> = T extends [unknown, ...infer U] ? U : never type Shorthand<FName extends Exclude<keyof Telegram, keyof ApiClient>> = Tail< Parameters<Telegram[FName]> > export class Context<U extends Deunionize<tg.Update> = tg.Update> { readonly state: Record<string | symbol, any> = {} constructor( readonly update: U, readonly tg: Telegram, readonly botInfo: tg.UserFromGetMe ) {} get updateType() { const types = Object.keys(this.update).filter( (k) => typeof this.update[k as keyof U] === 'object' ) if (types.length !== 1) { throw new Error( `Cannot determine \`updateType\` of ${JSON.stringify(this.update)}` ) } return types[0] as UpdateTypes<U> } get me() { return this.botInfo?.username } get telegram() { return this.tg } get message() { return this.update.message as PropOr<U, 'message'> } get editedMessage() { return this.update.edited_message as PropOr<U, 'edited_message'> } get inlineQuery() { return this.update.inline_query as PropOr<U, 'inline_query'> } get shippingQuery() { return this.update.shipping_query as PropOr<U, 'shipping_query'> } get preCheckoutQuery() { return this.update.pre_checkout_query as PropOr<U, 'pre_checkout_query'> } get chosenInlineResult() { return this.update.chosen_inline_result as PropOr<U, 'chosen_inline_result'> } get channelPost() { return this.update.channel_post as PropOr<U, 'channel_post'> } get editedChannelPost() { return this.update.edited_channel_post as PropOr<U, 'edited_channel_post'> } get callbackQuery() { return this.update.callback_query as PropOr<U, 'callback_query'> } get poll() { return this.update.poll as PropOr<U, 'poll'> } get pollAnswer() { return this.update.poll_answer as PropOr<U, 'poll_answer'> } get myChatMember() { return this.update.my_chat_member as PropOr<U, 'my_chat_member'> } get chatMember() { return this.update.chat_member as PropOr<U, 'chat_member'> } get chatJoinRequest() { return this.update.chat_join_request } get chat(): Getter<U, 'chat'> { return ( this.chatMember ?? this.myChatMember ?? this.chatJoinRequest ?? getMessageFromAnySource(this) )?.chat as Getter<U, 'chat'> } get senderChat() { return getMessageFromAnySource(this)?.sender_chat as Getter< U, 'sender_chat' > } get from() { return ( this.callbackQuery ?? this.inlineQuery ?? this.shippingQuery ?? this.preCheckoutQuery ?? this.chosenInlineResult ?? this.chatMember ?? this.myChatMember ?? this.chatJoinRequest ?? getMessageFromAnySource(this) )?.from as Getter<U, 'from'> } get inlineMessageId() { return (this.callbackQuery ?? this.chosenInlineResult)?.inline_message_id } get passportData() { if (this.message == null) return undefined if (!('passport_data' in this.message)) return undefined return this.message?.passport_data } /** * @deprecated use {@link Telegram.webhookReply} */ get webhookReply(): boolean { return this.tg.webhookReply } set webhookReply(enable: boolean) { this.tg.webhookReply = enable } private assert<T extends string | object>( value: T | undefined, method: string ): asserts value is T { if (value === undefined) { throw new TypeError( `Telegraf: "${method}" isn't available for "${this.updateType}"` ) } } /** * @see https://core.telegram.org/bots/api#answerinlinequery */ answerInlineQuery(this: Context, ...args: Shorthand<'answerInlineQuery'>) { this.assert(this.inlineQuery, 'answerInlineQuery') return this.telegram.answerInlineQuery(this.inlineQuery.id, ...args) } /** * @see https://core.telegram.org/bots/api#answercallbackquery */ answerCbQuery(this: Context, ...args: Shorthand<'answerCbQuery'>) { this.assert(this.callbackQuery, 'answerCbQuery') return this.telegram.answerCbQuery(this.callbackQuery.id, ...args) } /** * @see https://core.telegram.org/bots/api#answercallbackquery */ answerGameQuery(this: Context, ...args: Shorthand<'answerGameQuery'>) { this.assert(this.callbackQuery, 'answerGameQuery') return this.telegram.answerGameQuery(this.callbackQuery.id, ...args) } /** * @see https://core.telegram.org/bots/api#answershippingquery */ answerShippingQuery( this: Context, ...args: Shorthand<'answerShippingQuery'> ) { this.assert(this.shippingQuery, 'answerShippingQuery') return this.telegram.answerShippingQuery(this.shippingQuery.id, ...args) } /** * @see https://core.telegram.org/bots/api#answerprecheckoutquery */ answerPreCheckoutQuery( this: Context, ...args: Shorthand<'answerPreCheckoutQuery'> ) { this.assert(this.preCheckoutQuery, 'answerPreCheckoutQuery') return this.telegram.answerPreCheckoutQuery( this.preCheckoutQuery.id, ...args ) } /** * @see https://core.telegram.org/bots/api#editmessagetext */ editMessageText( this: Context, text: string, extra?: tt.ExtraEditMessageText ) { this.assert(this.callbackQuery ?? this.inlineMessageId, 'editMessageText') return this.telegram.editMessageText( this.chat?.id, this.callbackQuery?.message?.message_id, this.inlineMessageId, text, extra ) } /** * @see https://core.telegram.org/bots/api#editmessagecaption */ editMessageCaption( this: Context, caption: string | undefined, extra?: tt.ExtraEditMessageCaption ) { this.assert( this.callbackQuery ?? this.inlineMessageId, 'editMessageCaption' ) return this.telegram.editMessageCaption( this.chat?.id, this.callbackQuery?.message?.message_id, this.inlineMessageId, caption, extra ) } /** * @see https://core.telegram.org/bots/api#editmessagemedia */ editMessageMedia( this: Context, media: tg.InputMedia, extra?: tt.ExtraEditMessageMedia ) { this.assert(this.callbackQuery ?? this.inlineMessageId, 'editMessageMedia') return this.telegram.editMessageMedia( this.chat?.id, this.callbackQuery?.message?.message_id, this.inlineMessageId, media, extra ) } /** * @see https://core.telegram.org/bots/api#editmessagereplymarkup */ editMessageReplyMarkup( this: Context, markup: tg.InlineKeyboardMarkup | undefined ) { this.assert( this.callbackQuery ?? this.inlineMessageId, 'editMessageReplyMarkup' ) return this.telegram.editMessageReplyMarkup( this.chat?.id, this.callbackQuery?.message?.message_id, this.inlineMessageId, markup ) } /** * @see https://core.telegram.org/bots/api#editmessagelivelocation */ editMessageLiveLocation( this: Context, latitude: number, longitude: number, extra?: tt.ExtraEditMessageLiveLocation ) { this.assert( this.callbackQuery ?? this.inlineMessageId, 'editMessageLiveLocation' ) return this.telegram.editMessageLiveLocation( this.chat?.id, this.callbackQuery?.message?.message_id, this.inlineMessageId, latitude, longitude, extra ) } /** * @see https://core.telegram.org/bots/api#stopmessagelivelocation */ stopMessageLiveLocation(this: Context, markup?: tg.InlineKeyboardMarkup) { this.assert( this.callbackQuery ?? this.inlineMessageId, 'stopMessageLiveLocation' ) return this.telegram.stopMessageLiveLocation( this.chat?.id, this.callbackQuery?.message?.message_id, this.inlineMessageId, markup ) } /** * @see https://core.telegram.org/bots/api#sendmessage */ reply(this: Context, ...args: Shorthand<'sendMessage'>) { this.assert(this.chat, 'reply') return this.telegram.sendMessage(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#getchat */ getChat(this: Context, ...args: Shorthand<'getChat'>) { this.assert(this.chat, 'getChat') return this.telegram.getChat(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#exportchatinvitelink */ exportChatInviteLink( this: Context, ...args: Shorthand<'exportChatInviteLink'> ) { this.assert(this.chat, 'exportChatInviteLink') return this.telegram.exportChatInviteLink(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#createchatinvitelink */ createChatInviteLink( this: Context, ...args: Shorthand<'createChatInviteLink'> ) { this.assert(this.chat, 'createChatInviteLink') return this.telegram.createChatInviteLink(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#editchatinvitelink */ editChatInviteLink(this: Context, ...args: Shorthand<'editChatInviteLink'>) { this.assert(this.chat, 'editChatInviteLink') return this.telegram.editChatInviteLink(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#revokechatinvitelink */ revokeChatInviteLink( this: Context, ...args: Shorthand<'revokeChatInviteLink'> ) { this.assert(this.chat, 'revokeChatInviteLink') return this.telegram.revokeChatInviteLink(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#banchatmember */ banChatMember(this: Context, ...args: Shorthand<'banChatMember'>) { this.assert(this.chat, 'banChatMember') return this.telegram.banChatMember(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#banchatmember * @deprecated since API 5.3. Use {@link Context.banChatMember} */ get kickChatMember() { return this.banChatMember } /** * @see https://core.telegram.org/bots/api#unbanchatmember */ unbanChatMember(this: Context, ...args: Shorthand<'unbanChatMember'>) { this.assert(this.chat, 'unbanChatMember') return this.telegram.unbanChatMember(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#restrictchatmember */ restrictChatMember(this: Context, ...args: Shorthand<'restrictChatMember'>) { this.assert(this.chat, 'restrictChatMember') return this.telegram.restrictChatMember(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#promotechatmember */ promoteChatMember(this: Context, ...args: Shorthand<'promoteChatMember'>) { this.assert(this.chat, 'promoteChatMember') return this.telegram.promoteChatMember(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#setchatadministratorcustomtitle */ setChatAdministratorCustomTitle( this: Context, ...args: Shorthand<'setChatAdministratorCustomTitle'> ) { this.assert(this.chat, 'setChatAdministratorCustomTitle') return this.telegram.setChatAdministratorCustomTitle(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#setchatphoto */ setChatPhoto(this: Context, ...args: Shorthand<'setChatPhoto'>) { this.assert(this.chat, 'setChatPhoto') return this.telegram.setChatPhoto(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#deletechatphoto */ deleteChatPhoto(this: Context, ...args: Shorthand<'deleteChatPhoto'>) { this.assert(this.chat, 'deleteChatPhoto') return this.telegram.deleteChatPhoto(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#setchattitle */ setChatTitle(this: Context, ...args: Shorthand<'setChatTitle'>) { this.assert(this.chat, 'setChatTitle') return this.telegram.setChatTitle(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#setchatdescription */ setChatDescription(this: Context, ...args: Shorthand<'setChatDescription'>) { this.assert(this.chat, 'setChatDescription') return this.telegram.setChatDescription(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#pinchatmessage */ pinChatMessage(this: Context, ...args: Shorthand<'pinChatMessage'>) { this.assert(this.chat, 'pinChatMessage') return this.telegram.pinChatMessage(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#unpinchatmessage */ unpinChatMessage(this: Context, ...args: Shorthand<'unpinChatMessage'>) { this.assert(this.chat, 'unpinChatMessage') return this.telegram.unpinChatMessage(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#unpinallchatmessages */ unpinAllChatMessages( this: Context, ...args: Shorthand<'unpinAllChatMessages'> ) { this.assert(this.chat, 'unpinAllChatMessages') return this.telegram.unpinAllChatMessages(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#leavechat */ leaveChat(this: Context, ...args: Shorthand<'leaveChat'>) { this.assert(this.chat, 'leaveChat') return this.telegram.leaveChat(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#setchatpermissions */ setChatPermissions(this: Context, ...args: Shorthand<'setChatPermissions'>) { this.assert(this.chat, 'setChatPermissions') return this.telegram.setChatPermissions(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#getchatadministrators */ getChatAdministrators( this: Context, ...args: Shorthand<'getChatAdministrators'> ) { this.assert(this.chat, 'getChatAdministrators') return this.telegram.getChatAdministrators(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#getchatmember */ getChatMember(this: Context, ...args: Shorthand<'getChatMember'>) { this.assert(this.chat, 'getChatMember') return this.telegram.getChatMember(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#getchatmembercount */ getChatMembersCount( this: Context, ...args: Shorthand<'getChatMembersCount'> ) { this.assert(this.chat, 'getChatMembersCount') return this.telegram.getChatMembersCount(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#setpassportdataerrors */ setPassportDataErrors( this: Context, errors: readonly tg.PassportElementError[] ) { this.assert(this.from, 'setPassportDataErrors') return this.telegram.setPassportDataErrors(this.from.id, errors) } /** * @see https://core.telegram.org/bots/api#replywithphoto */ replyWithPhoto(this: Context, ...args: Shorthand<'sendPhoto'>) { this.assert(this.chat, 'replyWithPhoto') return this.telegram.sendPhoto(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithmediagroup */ replyWithMediaGroup(this: Context, ...args: Shorthand<'sendMediaGroup'>) { this.assert(this.chat, 'replyWithMediaGroup') return this.telegram.sendMediaGroup(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithaudio */ replyWithAudio(this: Context, ...args: Shorthand<'sendAudio'>) { this.assert(this.chat, 'replyWithAudio') return this.telegram.sendAudio(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithdice */ replyWithDice(this: Context, ...args: Shorthand<'sendDice'>) { this.assert(this.chat, 'replyWithDice') return this.telegram.sendDice(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithdocument */ replyWithDocument(this: Context, ...args: Shorthand<'sendDocument'>) { this.assert(this.chat, 'replyWithDocument') return this.telegram.sendDocument(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithsticker */ replyWithSticker(this: Context, ...args: Shorthand<'sendSticker'>) { this.assert(this.chat, 'replyWithSticker') return this.telegram.sendSticker(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithvideo */ replyWithVideo(this: Context, ...args: Shorthand<'sendVideo'>) { this.assert(this.chat, 'replyWithVideo') return this.telegram.sendVideo(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithanimation */ replyWithAnimation(this: Context, ...args: Shorthand<'sendAnimation'>) { this.assert(this.chat, 'replyWithAnimation') return this.telegram.sendAnimation(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithvideonote */ replyWithVideoNote(this: Context, ...args: Shorthand<'sendVideoNote'>) { this.assert(this.chat, 'replyWithVideoNote') return this.telegram.sendVideoNote(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithinvoice */ replyWithInvoice(this: Context, ...args: Shorthand<'sendInvoice'>) { this.assert(this.chat, 'replyWithInvoice') return this.telegram.sendInvoice(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithgame */ replyWithGame(this: Context, ...args: Shorthand<'sendGame'>) { this.assert(this.chat, 'replyWithGame') return this.telegram.sendGame(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithvoice */ replyWithVoice(this: Context, ...args: Shorthand<'sendVoice'>) { this.assert(this.chat, 'replyWithVoice') return this.telegram.sendVoice(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithpoll */ replyWithPoll(this: Context, ...args: Shorthand<'sendPoll'>) { this.assert(this.chat, 'replyWithPoll') return this.telegram.sendPoll(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithquiz */ replyWithQuiz(this: Context, ...args: Shorthand<'sendQuiz'>) { this.assert(this.chat, 'replyWithQuiz') return this.telegram.sendQuiz(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#stoppoll */ stopPoll(this: Context, ...args: Shorthand<'stopPoll'>) { this.assert(this.chat, 'stopPoll') return this.telegram.stopPoll(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithchataction */ replyWithChatAction(this: Context, ...args: Shorthand<'sendChatAction'>) { this.assert(this.chat, 'replyWithChatAction') return this.telegram.sendChatAction(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithlocation */ replyWithLocation(this: Context, ...args: Shorthand<'sendLocation'>) { this.assert(this.chat, 'replyWithLocation') return this.telegram.sendLocation(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithvenue */ replyWithVenue(this: Context, ...args: Shorthand<'sendVenue'>) { this.assert(this.chat, 'replyWithVenue') return this.telegram.sendVenue(this.chat.id, ...args) } /** * @see https://core.telegram.org/bots/api#replywithcontact */ replyWithContact(this: Context, ...args: Shorthand<'sendContact'>) { this.assert(this.chat, 'replyWithContact') return this.telegram.sendContact(this.chat.id, ...args) } /** * @deprecated use {@link Telegram.getStickerSet} * @see https://core.telegram.org/bots/api#getstickerset */ getStickerSet(this: Context, setName: string) { return this.telegram.getStickerSet(setName) } /** * @see https://core.telegram.org/bots/api#setchatstickerset */ setChatStickerSet(this: Context, setName: string) { this.assert(this.chat, 'setChatStickerSet') return this.telegram.setChatStickerSet(this.chat.id, setName) } /** * @see https://core.telegram.org/bots/api#deletechatstickerset */ deleteChatStickerSet(this: Context) { this.assert(this.chat, 'deleteChatStickerSet') return this.telegram.deleteChatStickerSet(this.chat.id) } /** * @deprecated use {@link Telegram.setStickerPositionInSet} * @see https://core.telegram.org/bots/api#setstickerpositioninset */ setStickerPositionInSet(this: Context, sticker: string, position: number) { return this.telegram.setStickerPositionInSet(sticker, position) } /** * @deprecated use {@link Telegram.setStickerSetThumb} * @see https://core.telegram.org/bots/api#setstickersetthumb */ setStickerSetThumb( this: Context, ...args: Parameters<Telegram['setStickerSetThumb']> ) { return this.telegram.setStickerSetThumb(...args) } /** * @deprecated use {@link Telegram.deleteStickerFromSet} * @see https://core.telegram.org/bots/api#deletestickerfromset */ deleteStickerFromSet(this: Context, sticker: string) { return this.telegram.deleteStickerFromSet(sticker) } /** * @see https://core.telegram.org/bots/api#uploadstickerfile */ uploadStickerFile(this: Context, ...args: Shorthand<'uploadStickerFile'>) { this.assert(this.from, 'uploadStickerFile') return this.telegram.uploadStickerFile(this.from.id, ...args) } /** * @see https://core.telegram.org/bots/api#createnewstickerset */ createNewStickerSet( this: Context, ...args: Shorthand<'createNewStickerSet'> ) { this.assert(this.from, 'createNewStickerSet') return this.telegram.createNewStickerSet(this.from.id, ...args) } /** * @see https://core.telegram.org/bots/api#addstickertoset */ addStickerToSet(this: Context, ...args: Shorthand<'addStickerToSet'>) { this.assert(this.from, 'addStickerToSet') return this.telegram.addStickerToSet(this.from.id, ...args) } /** * @deprecated use {@link Telegram.getMyCommands} * @see https://core.telegram.org/bots/api#getmycommands */ getMyCommands(this: Context) { return this.telegram.getMyCommands() } /** * @deprecated use {@link Telegram.setMyCommands} * @see https://core.telegram.org/bots/api#setmycommands */ setMyCommands(this: Context, commands: readonly tg.BotCommand[]) { return this.telegram.setMyCommands(commands) } /** * @see https://core.telegram.org/bots/api#sendmessage */ replyWithMarkdown( this: Context, markdown: string, extra?: tt.ExtraReplyMessage ) { return this.reply(markdown, { parse_mode: 'Markdown', ...extra }) } /** * @see https://core.telegram.org/bots/api#sendmessage */ replyWithMarkdownV2( this: Context, markdown: string, extra?: tt.ExtraReplyMessage ) { return this.reply(markdown, { parse_mode: 'MarkdownV2', ...extra }) } /** * @see https://core.telegram.org/bots/api#sendmessage */ replyWithHTML(this: Context, html: string, extra?: tt.ExtraReplyMessage) { return this.reply(html, { parse_mode: 'HTML', ...extra }) } /** * @see https://core.telegram.org/bots/api#deletemessage */ deleteMessage(this: Context, messageId?: number) { this.assert(this.chat, 'deleteMessage') if (typeof messageId !== 'undefined') { return this.telegram.deleteMessage(this.chat.id, messageId) } const message = getMessageFromAnySource(this) this.assert(message, 'deleteMessage') return this.telegram.deleteMessage(this.chat.id, message.message_id) } /** * @see https://core.telegram.org/bots/api#forwardmessage */ forwardMessage( this: Context, chatId: string | number, extra?: { disable_notification?: boolean } ) { const message = getMessageFromAnySource(this) this.assert(message, 'forwardMessage') return this.telegram.forwardMessage( chatId, message.chat.id, message.message_id, extra ) } /** * @see https://core.telegram.org/bots/api#copymessage */ copyMessage( this: Context, chatId: string | number, extra?: tt.ExtraCopyMessage ) { const message = getMessageFromAnySource(this) this.assert(message, 'copyMessage') return this.telegram.copyMessage( chatId, message.chat.id, message.message_id, extra ) } /** * @see https://core.telegram.org/bots/api#approvechatjoinrequest */ approveChatJoinRequest( this: Context, chatId: number | string, userId: number ) { const message = getMessageFromAnySource(this) this.assert(message, 'approveChatJoinRequest') return this.telegram.approveChatJoinRequest(chatId, userId) } /** * @see https://core.telegram.org/bots/api#declinechatjoinrequest */ declineChatJoinRequest( this: Context, chatId: number | string, userId: number ) { const message = getMessageFromAnySource(this) this.assert(message, 'declineChatJoinRequest') return this.telegram.declineChatJoinRequest(chatId, userId) } /** * @see https://core.telegram.org/bots/api#banchatsenderchat */ banChatSenderChat(this: Context, senderChatId: number) { this.assert(this.chat, 'banChatSenderChat') return this.telegram.banChatSenderChat(this.chat.id, senderChatId) } /** * @see https://core.telegram.org/bots/api#unbanchatsenderchat */ unbanChatSenderChat(this: Context, senderChatId: number) { this.assert(this.chat, 'unbanChatSenderChat') return this.telegram.unbanChatSenderChat(this.chat.id, senderChatId) } } export default Context type UpdateTypes<U extends Deunionize<tg.Update>> = Extract< UnionKeys<U>, tt.UpdateType > export type GetUpdateContent< U extends tg.Update > = U extends tg.Update.CallbackQueryUpdate ? U['callback_query']['message'] : U[UpdateTypes<U>] type Getter<U extends Deunionize<tg.Update>, P extends string> = PropOr< GetUpdateContent<U>, P > function getMessageFromAnySource<U extends tg.Update>(ctx: Context<U>) { return ( ctx.message ?? ctx.editedMessage ?? ctx.callbackQuery?.message ?? ctx.channelPost ?? ctx.editedChannelPost ) }
the_stack
import * as o from '../../x86/operand'; import {Code} from '../../x86/x64/code'; import {Abi} from '../../abi'; import * as t from '../../../typebase/typebase'; import {UInt64} from '../../util'; var StaticBuffer = require('../../../static-buffer/buffer').StaticBuffer; var libsys = require('../../../libsys/libsys'); // Create a queue where syscall parameters written to memory, threads run in the background // and execute the syscalls and write result back to the blocks. Block format: // // <---------- 32 bits ----------> <---------- 32 bits -----------> // +================================================================+ // | Lock | Block ID | Block 0 // +----------------------------------------------------------------+ // | Syscall number | // +----------------------------------------------------------------+ // | Argument 1 | // +----------------------------------------------------------------+ // | Argument 2 | // +----------------------------------------------------------------+ // | Argument 3 | // +----------------------------------------------------------------+ // | Argument 4 | // +----------------------------------------------------------------+ // | Argument 5 / Thread ID | // +----------------------------------------------------------------+ // | Argument 6 / Result | // +================================================================+ // | .... | Block 1 // +----------------------------------------------------------------+ // + .... | const __DEBUG__ = true; const enum CONST { STDOUT = 1, } const enum SYS { write = 1, mmap = 9, clone = 56, exit = 60, sched_yield = 24, getuid = 102, getpid = 39, } const enum CLONE { VM = 0x00000100, FS = 0x00000200, FILES = 0x00000400, SIGHAND = 0x00000800, PARENT = 0x00008000, THREAD = 0x00010000, IO = 0x80000000, THREAD_FLAGS = CLONE.VM | CLONE.FS | CLONE.FILES | CLONE.SIGHAND | CLONE.PARENT | CLONE.THREAD | CLONE.IO, } const enum LOCK { UNINITIALIZED = 0, // Block not used yet. FREE = 1, // Block ready to be acquired by a thread. LOCKED = 2, // Block locked by a thread, thread is executing syscall. DONE = 3, // Thread done executing syscall, result stored at offset 8. EXIT = 4, // Thread has to perform SYS_exit syscall. } const MAP_GROWSDOWN = 0x0100; const MAP_ANONYMOUS = 0x0020; const MAP_PRIVATE = 0x0002; const PROT_READ = 0x1; const PROT_WRITE = 0x2; const PROT_EXEC = 0x4; export class Asyscall { sbuf: any = null; threads: number = 0; queue: number = 100; intsize = 8; stackSize = 10 * this.intsize; stacksSize = 0; queueBlockSize = 8 * this.intsize; // control INT + syscall num + 6 args queueLength = 0; queueSize = 0; id: number = 0; // ID of syscall incrementing every call. offset: number = 0; // Offset of the next block to be written in offsetStart: number = 0; // Offset of the first block offsetEnd: number = 0; // Offset of the last block errorTimeout = UInt64.toNumber64(-1); nextId() { return (this.id + 1) % 0x7FFFFFFF; } nextOffset() { var offset = this.offset + this.queueBlockSize; if(offset > this.offsetEnd) return this.offsetStart; else return offset; } nextTick(callback) { setImmediate(callback, 1); } build(threads = 4, queue = 100) { this.threads = threads; this.stacksSize = this.threads * this.stackSize; this.queue = queue; this.queueSize = this.queue * this.queueBlockSize; this.compile(); this.sbuf.call([]); } exec(num, arg1?, arg2?, arg3?, arg4?, arg5?, arg6?, callback?); exec() { var id = this.id = this.nextId(); var offset = this.offset; var buf = this.sbuf; // Mark lock of next block as UNINITIALIZED so that threads stop at // that and wait until something is written there. buf.writeInt32LE(LOCK.UNINITIALIZED, this.nextOffset()); // Block ID -- each block has a unique ID, in case queue is overfilled, blocks determine that // they time-out by their ID. buf.writeInt32LE(id, offset + 4); // Write arguments to block and find callback function. var offset_args = offset + 8; var callback; for(var j = 0; j < arguments.length; j++) { var arg = arguments[j]; if(typeof arg === 'function') { callback = arg; break; } else { if(typeof arg === 'number') { var [lo, hi] = UInt64.toNumber64(arg); buf.writeInt32LE(lo, offset_args + (j * 8)); buf.writeInt32LE(hi, offset_args + (j * 8) + 4); } else if(arg instanceof Array) { buf.writeInt32LE(arg[0], offset_args + (j * 8)); buf.writeInt32LE(arg[1], offset_args + (j * 8) + 4); } else if(typeof arg === 'string') { // ... } } } // Fill the rest of the block with 0x00 for(var j = arguments.length; j < 7; j++) { buf.writeInt32LE(0, offset_args + (j * 8)); buf.writeInt32LE(0, offset_args + (j * 8) + 4); } // The last thing we do, is mark this block as available for threads. buf.writeInt32LE(LOCK.FREE, offset); this.offset = this.nextOffset(); var poll = () => { // console.log('polling'); this.nextTick(() => { // Check ID first, if ID does not match, then our queue has overflown // and we timeout this call. var id_read = buf.readInt32LE(offset + 4); if(id_read !== id) { callback(this.errorTimeout); return; } var lock = buf[offset]; if(lock === LOCK.DONE) { var result = [buf.readInt32LE(offset + (7 * 8)), buf.readInt32LE(offset + (7 * 8) + 4)]; // var thread_id = buf.readInt32LE(offset + (6 * 8)); // callback(result, thread_id); callback(result); } else poll(); }); }; poll(); } stop() { for(var offset = this.offsetStart; offset <= this.offsetEnd; offset += this.queueBlockSize) { this.sbuf.writeInt32LE(LOCK.EXIT, offset); this.id = this.nextId(); this.sbuf.writeInt32LE(this.id, offset + 4); } // this.sbuf.free(); } compile(): number[] { var _ = new Code; var abi = new Abi(_); var func_create_thread = abi.func('func_create_thread', false, [o.rax, o.rsi, o.rcx, o.rdx]); var func_thread = abi.func('func_thread'); var lbl_stacks = _.lbl('stacks'); var lbl_queue = _.lbl('queue'); // main() for(var j = 1; j <= this.threads; j++) { abi.call(func_create_thread, [j], []); } _._('ret'); func_create_thread._(() => { _._('mov', [o.rax, o.rdi]); // Thread index, starting from 1 _._('mov', [o.rcx, this.stackSize]); // Stack size _._('mul', o.rcx); // Stack offset _._('lea', [o.rsi, o.rip.disp(lbl_stacks.rel(-this.intsize * 2))]); // Address of stack frame bottom + 1 _._('add', [o.rsi, o.rax]); // Address of stack top for this thread, RSI second arg to syscall _._('lea', [o.rdx, o.rip.disp(func_thread.lbl)]); // Address of thread function code in top of stack _._('mov', [o.rsi.ref(), o.rdx]); // Top of stack, RET address _._('mov', [o.rsi.disp(this.intsize), o.rdi]); // Thread ID in bottom of stack // long clone(unsigned long flags, void *child_stack); abi.syscall([SYS.clone, CLONE.THREAD_FLAGS]); // 2nd arg RSI, stack top address // When thread starts the address of its starting function is // stored on its stack, the next instruction here is `RET` so it // jumps to that address. }); func_thread._(() => { var r_block = o.r13; // Current block address var r_first_block = o.r14; var r_last_block = o.r15; var thread_stop = _.lbl('thread_stop'); _._('lea', [r_first_block, o.rip.disp(lbl_queue)]); // R14 = Queue start address _._('mov', [r_last_block, r_first_block]); _._('add', [r_last_block, this.queueSize - this.queueBlockSize]); // R15 = Last block address _._('mov', [r_block, r_first_block]); // R13 = Current block address var loop = _.label('loop'); // loop start (() => { var lbl_process_block = _.lbl('process_block'); var lbl_execute_block = _.lbl('execute_block'); var lbl_skip_to_next_block = _.lbl('skip_to_next_block'); _._('cmp', [r_block, r_last_block]); // check iterator bounds _._('jbe', lbl_process_block); _._('mov', [r_block, r_first_block]); _.insert(lbl_process_block); _._('mov', [o.eax, r_block.ref()]); // Lock in RAX _._('cmp', [o.eax, LOCK.EXIT]); // if(lock == LOCK.EXIT) -> stop thread _._('je', thread_stop); _._('cmp', [o.eax, LOCK.UNINITIALIZED]); // Wait for this block until something is written to it _._('jne', lbl_execute_block); abi.syscall([SYS.sched_yield]); // yield and ... _._('jmp', lbl_process_block); // ... try this block again _.insert(lbl_execute_block); _._('cmp', [o.eax, LOCK.FREE]); // Check block is possibly available _._('jne', lbl_skip_to_next_block); _._('mov', [o.edx, LOCK.LOCKED]); _._('cmpxchg', [r_block.ref(), o.edx]).lock(); // Try to acquire lock for this block _._('cmp', [r_block.ref(), LOCK.LOCKED], 32); // Check we actually got the lock _._('jne', lbl_skip_to_next_block); abi.syscall([ // Execute the syscall r_block.disp(this.intsize), r_block.disp(this.intsize * 2), r_block.disp(this.intsize * 3), r_block.disp(this.intsize * 4), r_block.disp(this.intsize * 5), r_block.disp(this.intsize * 6), r_block.disp(this.intsize * 7), ]); _._('mov', [r_block.disp(this.intsize * 7), o.rax]); // Store syscall result in memory, in place of 6th argument _._('mov', [r_block.ref(), LOCK.DONE], 32); // Mark block as DONE // Store ID of this thread in place of 5th argument, for DEBUG purposes // _._('mov', [o.rax, o.rsp.ref()]); // _._('mov', [r_block.disp(this.intsize * 6), o.rax]); _.insert(lbl_skip_to_next_block); _._('add', [r_block, this.queueBlockSize]); // r_block += block_size _._('jmp', loop); })(); _.insert(thread_stop); _._('mov', [r_block.disp(8), 0xBABE]); abi.syscall([SYS.exit]); }); _.align(8); _.dq(0xFF); _.insert(lbl_stacks); _.db(0, this.stacksSize); _.align(8); _.dq(0xFF); _.insert(lbl_queue); _.db(0, this.queueSize); var bin = _.compile(); this.sbuf = StaticBuffer.from(bin, 'rwe'); this.offsetStart = this.sbuf.length - this.queueSize; this.offset = this.offsetStart; this.offsetEnd = this.sbuf.length - this.queueBlockSize; console.log(_.toString()); return bin; } } var asyscall = new Asyscall; asyscall.build(4, 10); console.log(asyscall.errorTimeout); asyscall.exec(SYS.getpid, function(res, thread) { console.log('result pid:', res, thread); }); asyscall.exec(SYS.getpid, function(res, thread) { console.log('result pid:', res, thread); }); asyscall.exec(SYS.getpid, function(res, thread) { console.log('result pid:', res, thread); }); asyscall.exec(SYS.getpid, function(res, thread) { console.log('result pid:', res, thread); }); asyscall.exec(SYS.getpid, function(res, thread) { console.log('result pid:', res, thread); }); asyscall.exec(SYS.getpid, function(res, thread) { console.log('result pid:', res, thread); }); var buf = new Buffer("Hello World\n"); var addr = libsys.addressBuffer64(buf); asyscall.exec(SYS.write, 1, addr, buf.length, function(res, thread) { console.log('write: ', res, thread); }); asyscall.exec(SYS.write, 1, addr, buf.length, function(res, thread) { console.log('write: ', res, thread); }); asyscall.exec(SYS.write, 1, addr, buf.length, function(res, thread) { console.log('write: ', res, thread); }); console.log('id', asyscall.id); setTimeout(() => { asyscall.stop(); setTimeout(() => { asyscall.sbuf.print(); }, 100); }, 100);
the_stack
import { JsxAttribute, JsxAttributes, JsxChild, JsxClosingElement, JsxClosingFragment, JsxElement, JsxExpression, JsxFragment, JsxOpeningElement, JsxOpeningFragment, JsxSelfClosingElement, JsxSpreadAttribute, JsxTagNameExpression, JsxText, Node, SyntaxKind, JsxAttributeLike, } from 'typescript'; import { emptyArray, ILogger, } from '@aurelia/kernel'; import { Realm, ExecutionContext, } from '../realm.js'; import { $Any, $AnyNonEmpty, } from '../types/_shared.js'; import { I$Node, Context, $identifier, $$AssignmentExpressionOrHigher, $assignmentExpression, $AssignmentExpressionNode, $$JsxOpeningLikeElement, $i, } from './_shared.js'; import { $$ESModuleOrScript, } from './modules.js'; import { $Identifier, $PropertyAccessExpression, $ThisExpression, } from './expressions.js'; import { $StringLiteral, } from './literals.js'; export type $$JsxParent = ( $JsxElement | $JsxFragment ); export type $$JsxChild = ( $JsxText | $JsxExpression | $JsxElement | $JsxSelfClosingElement | $JsxFragment ); export function $$jsxChildList( nodes: readonly JsxChild[], parent: $$JsxParent, ctx: Context, ): readonly $$JsxChild[] { if (nodes === void 0 || nodes.length === 0) { return emptyArray; } const len = nodes.length; const $nodes: $$JsxChild[] = Array(len); for (let i = 0; i < len; ++i) { switch (nodes[i].kind) { case SyntaxKind.JsxText: $nodes[i] = new $JsxText(nodes[i] as JsxText, parent, ctx, i); break; case SyntaxKind.JsxExpression: $nodes[i] = new $JsxExpression(nodes[i] as JsxExpression, parent, ctx, i); break; case SyntaxKind.JsxElement: $nodes[i] = new $JsxElement(nodes[i] as JsxElement, parent, ctx, i); break; case SyntaxKind.JsxSelfClosingElement: $nodes[i] = new $JsxSelfClosingElement(nodes[i] as JsxSelfClosingElement, parent, ctx, i); break; case SyntaxKind.JsxFragment: $nodes[i] = new $JsxFragment(nodes[i] as JsxFragment, parent, ctx, i); break; } } return $nodes; } export class $JsxElement implements I$Node { public get $kind(): SyntaxKind.JsxElement { return SyntaxKind.JsxElement; } public readonly $openingElement: $JsxOpeningElement; public readonly $children: readonly $$JsxChild[]; public readonly $closingElement: $JsxClosingElement; public constructor( public readonly node: JsxElement, public readonly parent: $$JsxParent, public readonly ctx: Context, public readonly idx: number, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}${$i(idx)}.JsxElement`, ) { this.$openingElement = new $JsxOpeningElement(node.openingElement, this, ctx); this.$children = $$jsxChildList(node.children, this, ctx); this.$closingElement = new $JsxClosingElement(node.closingElement, this, ctx); } public Evaluate( ctx: ExecutionContext, ): $AnyNonEmpty { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.undefined; // TODO: implement this } } export type $$JsxNamed = ( $JsxOpeningElement | $JsxClosingElement | $JsxSelfClosingElement ); export type $$JsxTagNamePropertyAccess = $PropertyAccessExpression & { expression: $$JsxTagNameExpression; }; export type $$JsxTagNameExpression = ( $Identifier | $ThisExpression | $$JsxTagNamePropertyAccess ); export function $$jsxTagNameExpression( node: JsxTagNameExpression, parent: $$JsxNamed, ctx: Context, idx: number, ): $$JsxTagNameExpression { switch (node.kind) { case SyntaxKind.Identifier: return new $Identifier(node, parent, ctx, idx); case SyntaxKind.ThisKeyword: return new $ThisExpression(node, parent, ctx, idx); case SyntaxKind.PropertyAccessExpression: return new $PropertyAccessExpression(node, parent, ctx, idx) as $$JsxTagNamePropertyAccess; default: throw new Error(`Unexpected syntax node: ${SyntaxKind[(node as Node).kind]}.`); } } export class $JsxSelfClosingElement implements I$Node { public get $kind(): SyntaxKind.JsxSelfClosingElement { return SyntaxKind.JsxSelfClosingElement; } public readonly $tagName: $$JsxTagNameExpression; public readonly $attributes: $JsxAttributes; public constructor( public readonly node: JsxSelfClosingElement, public readonly parent: $$JsxParent, public readonly ctx: Context, public readonly idx: number, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}${$i(idx)}.JsxSelfClosingElement`, ) { this.$tagName = $$jsxTagNameExpression(node.tagName, this, ctx, -1); this.$attributes = new $JsxAttributes(node.attributes, this, ctx); } public Evaluate( ctx: ExecutionContext, ): $AnyNonEmpty { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.undefined; // TODO: implement this } } export class $JsxFragment implements I$Node { public get $kind(): SyntaxKind.JsxFragment { return SyntaxKind.JsxFragment; } public readonly $openingFragment: $JsxOpeningFragment; public readonly $children: readonly $$JsxChild[]; public readonly $closingFragment: $JsxClosingFragment; public constructor( public readonly node: JsxFragment, public readonly parent: $$JsxParent, public readonly ctx: Context, public readonly idx: number, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}${$i(idx)}.JsxFragment`, ) { this.$openingFragment = new $JsxOpeningFragment(node.openingFragment, this, ctx); this.$children = $$jsxChildList(node.children, this, ctx); this.$closingFragment = new $JsxClosingFragment(node.closingFragment, this, ctx); } public Evaluate( ctx: ExecutionContext, ): $AnyNonEmpty { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.undefined; // TODO: implement this } } export class $JsxText implements I$Node { public get $kind(): SyntaxKind.JsxText { return SyntaxKind.JsxText; } public constructor( public readonly node: JsxText, public readonly parent: $$JsxParent, public readonly ctx: Context, public readonly idx: number, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}${$i(idx)}.JsxText`, ) {} public Evaluate( ctx: ExecutionContext, ): $Any { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.empty; // TODO: implement this } } export class $JsxOpeningElement implements I$Node { public get $kind(): SyntaxKind.JsxOpeningElement { return SyntaxKind.JsxOpeningElement; } public readonly $tagName: $$JsxTagNameExpression; public readonly $attributes: $JsxAttributes; public constructor( public readonly node: JsxOpeningElement, public readonly parent: $JsxElement, public readonly ctx: Context, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}.JsxOpeningElement`, ) { this.$tagName = $$jsxTagNameExpression(node.tagName, this, ctx, -1); this.$attributes = new $JsxAttributes(node.attributes, this, ctx); } public Evaluate( ctx: ExecutionContext, ): $Any { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.empty; // TODO: implement this } } export class $JsxClosingElement implements I$Node { public get $kind(): SyntaxKind.JsxClosingElement { return SyntaxKind.JsxClosingElement; } public readonly $tagName: $$JsxTagNameExpression; public constructor( public readonly node: JsxClosingElement, public readonly parent: $JsxElement, public readonly ctx: Context, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}.JsxClosingElement`, ) { this.$tagName = $$jsxTagNameExpression(node.tagName, this, ctx, -1); } public Evaluate( ctx: ExecutionContext, ): $Any { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.empty; // TODO: implement this } } export class $JsxOpeningFragment implements I$Node { public get $kind(): SyntaxKind.JsxOpeningFragment { return SyntaxKind.JsxOpeningFragment; } public constructor( public readonly node: JsxOpeningFragment, public readonly parent: $JsxFragment, public readonly ctx: Context, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}.JsxOpeningFragment`, ) {} public Evaluate( ctx: ExecutionContext, ): $Any { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.empty; // TODO: implement this } } export class $JsxClosingFragment implements I$Node { public get $kind(): SyntaxKind.JsxClosingFragment { return SyntaxKind.JsxClosingFragment; } public constructor( public readonly node: JsxClosingFragment, public readonly parent: $JsxFragment, public readonly ctx: Context, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}.JsxClosingFragment`, ) {} public Evaluate( ctx: ExecutionContext, ): $Any { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.empty; // TODO: implement this } } export class $JsxAttribute implements I$Node { public get $kind(): SyntaxKind.JsxAttribute { return SyntaxKind.JsxAttribute; } public readonly $name: $Identifier; public readonly $initializer: $StringLiteral | $JsxExpression | undefined; public constructor( public readonly node: JsxAttribute, public readonly parent: $JsxAttributes, public readonly ctx: Context, public readonly idx: number, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}${$i(idx)}.JsxAttribute`, ) { this.$name = $identifier(node.name, this, ctx, -1); if (node.initializer === void 0) { this.$initializer = void 0; } else { if (node.initializer.kind === SyntaxKind.StringLiteral) { this.$initializer = new $StringLiteral(node.initializer, this, ctx, -1); } else { this.$initializer = new $JsxExpression(node.initializer, this, ctx, -1); } } } } export type $$JsxAttributeLike = ( $JsxAttribute | $JsxSpreadAttribute ); export function $$jsxAttributeLikeList( nodes: readonly JsxAttributeLike[], parent: $JsxAttributes, ctx: Context, ): readonly $$JsxAttributeLike[] { if (nodes === void 0 || nodes.length === 0) { return emptyArray; } const len = nodes.length; const $nodes: $$JsxAttributeLike[] = Array(len); for (let i = 0; i < len; ++i) { switch (nodes[i].kind) { case SyntaxKind.JsxAttribute: $nodes[i] = new $JsxAttribute(nodes[i] as JsxAttribute, parent, ctx, i); break; case SyntaxKind.JsxSpreadAttribute: $nodes[i] = new $JsxSpreadAttribute(nodes[i] as JsxSpreadAttribute, parent, ctx, i); break; } } return $nodes; } export class $JsxAttributes implements I$Node { public get $kind(): SyntaxKind.JsxAttributes { return SyntaxKind.JsxAttributes; } public readonly $properties: readonly $$JsxAttributeLike[]; public constructor( public readonly node: JsxAttributes, public readonly parent: $$JsxOpeningLikeElement, public readonly ctx: Context, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}.JsxAttributes`, ) { this.$properties = $$jsxAttributeLikeList(node.properties, this, ctx); } public Evaluate( ctx: ExecutionContext, ): $Any { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.empty; // TODO: implement this } } export class $JsxSpreadAttribute implements I$Node { public get $kind(): SyntaxKind.JsxSpreadAttribute { return SyntaxKind.JsxSpreadAttribute; } public readonly $expression: $$AssignmentExpressionOrHigher; public constructor( public readonly node: JsxSpreadAttribute, public readonly parent: $JsxAttributes, public readonly ctx: Context, public readonly idx: number, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}${$i(idx)}.JsxSpreadAttribute`, ) { this.$expression = $assignmentExpression(node.expression as $AssignmentExpressionNode, this, ctx, -1); } public Evaluate( ctx: ExecutionContext, ): $Any { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.empty; // TODO: implement this } } export class $JsxExpression implements I$Node { public get $kind(): SyntaxKind.JsxExpression { return SyntaxKind.JsxExpression; } public readonly $expression: $$AssignmentExpressionOrHigher | undefined; public constructor( public readonly node: JsxExpression, public readonly parent: $$JsxParent | $$JsxAttributeLike, public readonly ctx: Context, public readonly idx: number, public readonly mos: $$ESModuleOrScript = parent.mos, public readonly realm: Realm = parent.realm, public readonly depth: number = parent.depth + 1, public readonly logger: ILogger = parent.logger, public readonly path: string = `${parent.path}${$i(idx)}.JsxExpression`, ) { this.$expression = $assignmentExpression(node.expression as $AssignmentExpressionNode, this, ctx, -1); } public Evaluate( ctx: ExecutionContext, ): $Any { ctx.checkTimeout(); const realm = ctx.Realm; const intrinsics = realm['[[Intrinsics]]']; this.logger.debug(`${this.path}.Evaluate(#${ctx.id})`); return intrinsics.empty; // TODO: implement this } }
the_stack
import { Direction, Directionality } from '@angular/cdk/bidi'; import { Component, ViewChild } from '@angular/core'; import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'; import { By } from '@angular/platform-browser'; import { DOWN_ARROW, END, HOME, LEFT_ARROW, PAGE_DOWN, PAGE_UP, RIGHT_ARROW, UP_ARROW } from '@ptsecurity/cdk/keycodes'; import { dispatchFakeEvent, dispatchKeyboardEvent } from '@ptsecurity/cdk/testing'; import { McMomentDateModule } from '@ptsecurity/mosaic-moment-adapter/adapter'; import { McCalendarBody } from './calendar-body.component'; import { McYearView } from './year-view.component'; // Depending on whether rollup is used, moment needs to be imported differently. // Since Moment.js doesn't have a default export, we normally need to import using the `* as` // syntax. However, rollup creates a synthetic default module and we thus need to import it using // the `default as` syntax. // tslint:disable-next-line:ordered-imports import * as _moment from 'moment'; // @ts-ignore // tslint:disable-next-line:no-duplicate-imports import { default as _rollupMoment, Moment } from 'moment'; // tslint:disable-next-line const moment = _rollupMoment || _moment; describe('McYearView', () => { let dir: { value: Direction }; beforeEach(waitForAsync(() => { TestBed.configureTestingModule({ imports: [ McMomentDateModule ], declarations: [ McCalendarBody, McYearView, // Test components. StandardYearView, YearViewWithDateFilter ], providers: [ { provide: Directionality, useFactory: () => dir = { value: 'ltr' } } ] }); TestBed.compileComponents(); })); describe('standard year view', () => { let fixture: ComponentFixture<StandardYearView>; let testComponent: StandardYearView; let yearViewNativeElement: Element; beforeEach(() => { fixture = TestBed.createComponent(StandardYearView); fixture.detectChanges(); const yearViewDebugElement = fixture.debugElement.query(By.directive(McYearView)); yearViewNativeElement = yearViewDebugElement.nativeElement; testComponent = fixture.componentInstance; }); it('has correct year label', () => { const labelEl = yearViewNativeElement.querySelector('.mc-calendar__body-label')!; expect(labelEl.innerHTML.trim()).toBe('2017'); }); it('has 12 months', () => { const cellEls = yearViewNativeElement.querySelectorAll('.mc-calendar__body-cell'); expect(cellEls.length).toBe(12); }); it('shows selected month if in same year', () => { const selectedEl = yearViewNativeElement.querySelector('.mc-selected')!; expect(selectedEl.innerHTML.trim()).toBe('Mar'); }); it('does not show selected month if in different year', () => { testComponent.selected = moment([2016, 2, 10]); fixture.detectChanges(); const selectedEl = yearViewNativeElement.querySelector('.mc-selected'); expect(selectedEl).toBeNull(); }); it('fires selected change event on cell clicked', () => { const cellEls = yearViewNativeElement.querySelectorAll('.mc-calendar__body-cell'); (cellEls[cellEls.length - 1] as HTMLElement).click(); fixture.detectChanges(); const selectedEl = yearViewNativeElement.querySelector('.mc-selected')!; expect(selectedEl.innerHTML.trim()).toBe('Dec'); }); it('should emit the selected month on cell clicked', () => { const cellEls = yearViewNativeElement.querySelectorAll('.mc-calendar__body-cell'); (cellEls[cellEls.length - 1] as HTMLElement).click(); fixture.detectChanges(); const normalizedMonth: Moment = fixture.componentInstance.selectedMonth; expect(normalizedMonth.month()).toEqual(11); }); it('should mark active date', () => { const cellEls = yearViewNativeElement.querySelectorAll('.mc-calendar__body-cell'); expect((cellEls[0] as HTMLElement).innerText.trim()).toBe('Jan'); expect(cellEls[0].classList).toContain('mc-calendar__body_active'); }); it('should allow selection of month with less days than current active date', () => { testComponent.date = moment([2017, 6, 31]); fixture.detectChanges(); // tslint:disable-next-line:no-void-expression expect(testComponent.yearView.onMonthSelected(5)); fixture.detectChanges(); expect(testComponent.selected.toDate()).toEqual(new Date(2017, 5, 30)); }); describe('a11y', () => { describe('calendar body', () => { let calendarBodyEl: HTMLElement; let calendarInstance: StandardYearView; beforeEach(() => { calendarInstance = fixture.componentInstance; calendarBodyEl = fixture.debugElement.nativeElement.querySelector('.mc-calendar__body') as HTMLElement; expect(calendarBodyEl).not.toBeNull(); dir.value = 'ltr'; fixture.componentInstance.date = moment([2017, 0, 5]); dispatchFakeEvent(calendarBodyEl, 'focus'); fixture.detectChanges(); }); it('should decrement month on left arrow press', () => { dispatchKeyboardEvent(calendarBodyEl, 'keydown', LEFT_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2016, 11, 5)); dispatchKeyboardEvent(calendarBodyEl, 'keydown', LEFT_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2016, 10, 5)); }); it('should increment month on left arrow press in rtl', () => { dir.value = 'rtl'; dispatchKeyboardEvent(calendarBodyEl, 'keydown', LEFT_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 1, 5)); dispatchKeyboardEvent(calendarBodyEl, 'keydown', LEFT_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 2, 5)); }); it('should increment month on right arrow press', () => { dispatchKeyboardEvent(calendarBodyEl, 'keydown', RIGHT_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 1, 5)); dispatchKeyboardEvent(calendarBodyEl, 'keydown', RIGHT_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 2, 5)); }); it('should decrement month on right arrow press in rtl', () => { dir.value = 'rtl'; dispatchKeyboardEvent(calendarBodyEl, 'keydown', RIGHT_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2016, 11, 5)); dispatchKeyboardEvent(calendarBodyEl, 'keydown', RIGHT_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2016, 10, 5)); }); it('should go up a row on up arrow press', () => { dispatchKeyboardEvent(calendarBodyEl, 'keydown', UP_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2016, 8, 5)); calendarInstance.date = moment([2017, 6, 1]); fixture.detectChanges(); dispatchKeyboardEvent(calendarBodyEl, 'keydown', UP_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 2, 1)); calendarInstance.date = moment([2017, 11, 10]); fixture.detectChanges(); dispatchKeyboardEvent(calendarBodyEl, 'keydown', UP_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 7, 10)); }); it('should go down a row on down arrow press', () => { dispatchKeyboardEvent(calendarBodyEl, 'keydown', DOWN_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 4, 5)); calendarInstance.date = moment([2017, 5, 1]); fixture.detectChanges(); dispatchKeyboardEvent(calendarBodyEl, 'keydown', DOWN_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 9, 1)); calendarInstance.date = moment([2017, 8, 30]); fixture.detectChanges(); dispatchKeyboardEvent(calendarBodyEl, 'keydown', DOWN_ARROW); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2018, 0, 30)); }); it('should go to first month of the year on home press', () => { calendarInstance.date = moment([2017, 8, 30]); fixture.detectChanges(); dispatchKeyboardEvent(calendarBodyEl, 'keydown', HOME); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 0, 30)); dispatchKeyboardEvent(calendarBodyEl, 'keydown', HOME); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 0, 30)); }); it('should go to last month of the year on end press', () => { calendarInstance.date = moment([2017, 9, 31]); fixture.detectChanges(); dispatchKeyboardEvent(calendarBodyEl, 'keydown', END); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 11, 31)); dispatchKeyboardEvent(calendarBodyEl, 'keydown', END); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 11, 31)); }); it('should go back one year on page up press', () => { calendarInstance.date = moment([2016, 1, 29]); fixture.detectChanges(); dispatchKeyboardEvent(calendarBodyEl, 'keydown', PAGE_UP); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2015, 1, 28)); dispatchKeyboardEvent(calendarBodyEl, 'keydown', PAGE_UP); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2014, 1, 28)); }); it('should go forward one year on page down press', () => { calendarInstance.date = moment([2016, 1, 29]); fixture.detectChanges(); dispatchKeyboardEvent(calendarBodyEl, 'keydown', PAGE_DOWN); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2017, 1, 28)); dispatchKeyboardEvent(calendarBodyEl, 'keydown', PAGE_DOWN); fixture.detectChanges(); expect(calendarInstance.date.toDate()).toEqual(new Date(2018, 1, 28)); }); }); }); }); describe('year view with date filter', () => { let fixture: ComponentFixture<YearViewWithDateFilter>; let yearViewNativeElement: Element; beforeEach(() => { fixture = TestBed.createComponent(YearViewWithDateFilter); fixture.detectChanges(); const yearViewDebugElement = fixture.debugElement.query(By.directive(McYearView)); yearViewNativeElement = yearViewDebugElement.nativeElement; }); it('should disable months with no enabled days', () => { const cells = yearViewNativeElement.querySelectorAll('.mc-calendar__body-cell'); expect(cells[0].classList).not.toContain('mc-calendar__body_disabled'); expect(cells[1].classList).toContain('mc-calendar__body_disabled'); }); }); }); @Component({ template: ` <mc-year-view [(activeDate)]="date" [(selected)]="selected" (monthSelected)="selectedMonth=$event"></mc-year-view>` }) class StandardYearView { date = moment([2017, 0, 5]); selected = moment([2017, 2, 10]); selectedMonth: Moment; @ViewChild(McYearView, {static: false}) yearView: McYearView<Moment>; } @Component({ template: ` <mc-year-view [activeDate]="activeDate" [dateFilter]="dateFilter"></mc-year-view>` }) class YearViewWithDateFilter { activeDate = moment([2017, 0, 1]); dateFilter(date: Moment) { if (date.month() === 0) { return date.date() === 10; } if (date.month() === 1) { return false; } return true; } }
the_stack
export default { defaultToken: '', ignoreCase: true, brackets: [ { open: '[', close: ']', token: 'delimiter.square' }, { open: '(', close: ')', token: 'delimiter.parenthesis' } ], keywords: [ 'ALL', 'AND', 'ANY', 'ARRAY', 'AS', 'ASC', 'ASSERT_ROWS_MODIFIED', 'AT', 'BETWEEN', 'BY', 'CASE', 'CAST', 'COLLATE', 'CONTAINS', 'CREATE', 'CROSS', 'CUBE', 'CURRENT', 'DEFAULT', 'DEFINE', 'DESC', 'DISTINCT', 'ELSE', 'END', 'ENUM', 'ESCAPE', 'EXCEPT', 'EXCLUDE', 'EXISTS', 'EXTRACT', 'FALSE', 'FETCH', 'FOLLOWING', 'FOR', 'FROM', 'FULL', 'GROUP', 'GROUPING', 'GROUPS', 'HASH', 'HAVING', 'IF', 'IGNORE', 'IN', 'INNER', 'INTERSECT', 'INTERVAL', 'INTO', 'IS', 'JOIN', 'LATERAL', 'LEFT', 'LIKE', 'LIMIT', 'LOOKUP', 'MERGE', 'NATURAL', 'NEW', 'NO', 'NOT', 'NULL', 'NULLS', 'OF', 'ON', 'OR', 'ORDER', 'OUTER', 'OVER', 'PARTITION', 'PRECEDING', 'PROTO', 'RANGE', 'RECURSIVE', 'RESPECT', 'RIGHT', 'ROLLUP', 'ROWS', 'SELECT', 'SET', 'SOME', 'STRUCT', 'TABLESAMPLE', 'THEN', 'TO', 'TREAT', 'TRUE', 'UNBOUNDED', 'UNION', 'UNNEST', 'USING', 'WHEN', 'WHERE', 'WINDOW', 'WITH', 'WITHIN' ], operators: [ 'LIKE', 'NOT LIKE', 'BETWEEN', 'NOT BETWEEN', 'IN', 'NOT IN', 'IS NULL', 'IS NOT NULL', 'IS TRUE', 'IS NOT TRUE', 'IS FALSE', 'IS NOT FALSE', 'NOT', 'AND', 'OR' ], builtinFunctions: [ 'ANY_VALUE', 'ARRAY_AGG', 'ARRAY_CONCAT_AGG', 'AVG', 'BIT_AND', 'BIT_OR', 'BIT_XOR', 'COUNT', 'COUNTIF', 'LOGICAL_AND', 'LOGICAL_OR', 'MAX', 'MIN', 'STRING_AGG', 'SUM', 'CORR', 'COVAR_POP', 'COVAR_SAMP', 'STDDEV_POP', 'STDDEV_SAMP', 'STDDEV', 'VAR_POP', 'VAR_SAMP', 'VARIANCE', 'APPROX_COUNT_DISTINCT', 'APPROX_QUANTILES', 'APPROX_TOP_COUNT', 'APPROX_TOP_SUM', 'HLL_COUNT.INIT', 'HLL_COUNT.MERGE', 'HLL_COUNT.MERGE_PARTIAL', 'HLL_COUNT.EXTRACT', 'RANK', 'DENSE_RANK', 'PERCENT_RANK', 'CUME_DIST', 'NTILE', 'ROW_NUMBER', 'BIT_COUNT', 'CAST AS ARRAY', 'CAST AS BIGNUMERIC', 'CAST AS BOOL', 'CAST AS BYTES', 'CAST AS DATE', 'CAST AS DATETIME', 'CAST AS FLOAT64', 'CAST AS INT64', 'CAST AS NUMERIC', 'CAST AS STRING', 'CAST AS STRUCT', 'CAST AS TIME', 'CAST AS TIMESTAMP', 'PARSE_BIGNUMERIC', 'PARSE_NUMERIC', 'SAFE_CAST', 'ABS', 'SIGN', 'IS_INF', 'IS_NAN', 'IEEE_DIVIDE', 'RAND', 'SQRT', 'POW', 'POWER', 'EXP', 'LN', 'LOG', 'LOG10', 'GREATEST', 'LEAST', 'DIV', 'SAFE_DIVIDE', 'SAFE_MULTIPLY', 'SAFE_NEGATE', 'SAFE_ADD', 'SAFE_SUBTRACT', 'MOD', 'ROUND', 'TRUNC', 'CEIL', 'CEILING', 'FLOOR', 'COS', 'COSH', 'ACOS', 'ACOSH', 'SIN', 'SINH', 'ASIN', 'ASINH', 'TAN', 'TANH', 'ATAN', 'ATANH', 'ATAN2', 'RANGE_BUCKET', 'FIRST_VALUE', 'LAST_VALUE', 'NTH_VALUE', 'LEAD', 'LAG', 'PERCENTILE_CONT', 'PERCENTILE_DISC', 'FARM_FINGERPRINT', 'MD5', 'SHA1', 'SHA256', 'SHA512', 'ASCII', 'BYTE_LENGTH', 'CHAR_LENGTH', 'CHARACTER_LENGTH', 'CHR', 'CODE_POINTS_TO_BYTES', 'CODE_POINTS_TO_STRING', 'CONCAT', 'CONTAINS_SUBSTR', 'ENDS_WITH', 'FORMAT', 'FROM_BASE32', 'FROM_BASE64', 'FROM_HEX', 'INITCAP', 'INSTR', 'LEFT', 'LENGTH', 'LPAD', 'LOWER', 'LTRIM', 'NORMALIZE', 'NORMALIZE_AND_CASEFOLD', 'OCTET_LENGTH', 'REGEXP_CONTAINS', 'REGEXP_EXTRACT', 'REGEXP_EXTRACT_ALL', 'REGEXP_INSTR', 'REGEXP_REPLACE', 'REGEXP_SUBSTR', 'REPLACE', 'REPEAT', 'REVERSE', 'RIGHT', 'RPAD', 'RTRIM', 'SAFE_CONVERT_BYTES_TO_STRING', 'SOUNDEX', 'SPLIT', 'STARTS_WITH', 'STRPOS', 'SUBSTR', 'SUBSTRING', 'TO_BASE32', 'TO_BASE64', 'TO_CODE_POINTS', 'TO_HEX', 'TRANSLATE', 'TRIM', 'UNICODE', 'UPPER', 'JSON_EXTRACT', 'JSON_QUERY', 'JSON_EXTRACT_SCALAR', 'JSON_VALUE', 'JSON_EXTRACT_ARRAY', 'JSON_QUERY_ARRAY', 'JSON_EXTRACT_STRING_ARRAY', 'JSON_VALUE_ARRAY', 'TO_JSON_STRING', 'ARRAY', 'ARRAY_CONCAT', 'ARRAY_LENGTH', 'ARRAY_TO_STRING', 'GENERATE_ARRAY', 'GENERATE_DATE_ARRAY', 'GENERATE_TIMESTAMP_ARRAY', 'OFFSET', 'ORDINAL', 'ARRAY_REVERSE', 'SAFE_OFFSET', 'SAFE_ORDINAL', 'CURRENT_DATE', 'EXTRACT', 'DATE', 'DATE_ADD', 'DATE_SUB', 'DATE_DIFF', 'DATE_TRUNC', 'DATE_FROM_UNIX_DATE', 'FORMAT_DATE', 'LAST_DAY', 'PARSE_DATE', 'UNIX_DATE', 'CURRENT_DATETIME', 'DATETIME', 'EXTRACT', 'DATETIME_ADD', 'DATETIME_SUB', 'DATETIME_DIFF', 'DATETIME_TRUNC', 'FORMAT_DATETIME', 'LAST_DAY', 'PARSE_DATETIME', 'CURRENT_TIME', 'TIME', 'EXTRACT', 'TIME_ADD', 'TIME_SUB', 'TIME_DIFF', 'TIME_TRUNC', 'FORMAT_TIME', 'PARSE_TIME', 'CURRENT_TIMESTAMP', 'EXTRACT', 'STRING', 'TIMESTAMP', 'TIMESTAMP_ADD', 'TIMESTAMP_SUB', 'TIMESTAMP_DIFF', 'TIMESTAMP_TRUNC', 'FORMAT_TIMESTAMP', 'PARSE_TIMESTAMP', 'TIMESTAMP_SECONDS', 'TIMESTAMP_MILLIS', 'TIMESTAMP_MICROS', 'UNIX_SECONDS', 'UNIX_MILLIS', 'UNIX_MICROS', 'ST_AREA', 'ST_ASBINARY', 'ST_ASGEOJSON', 'ST_ASTEXT', 'ST_BOUNDARY', 'ST_CENTROID', 'ST_CENTROID_AGG', 'ST_CLOSESTPOINT', 'ST_CLUSTERDBSCAN', 'ST_CONTAINS', 'ST_CONVEXHULL', 'ST_COVEREDBY', 'ST_COVERS', 'ST_DIFFERENCE', 'ST_DIMENSION', 'ST_DISJOINT', 'ST_DISTANCE', 'ST_DUMP', 'ST_DWITHIN', 'ST_ENDPOINT', 'ST_EQUALS', 'ST_EXTERIORRING', 'ST_GEOGFROM', 'ST_GEOGFROMGEOJSON', 'ST_GEOGFROMTEXT', 'ST_GEOGFROMWKB', 'ST_GEOGPOINT', 'ST_GEOGPOINTFROMGEOHASH', 'ST_GEOHASH', 'ST_INTERSECTION', 'ST_INTERSECTS', 'ST_INTERSECTSBOX', 'ST_ISCOLLECTION', 'ST_ISEMPTY', 'ST_LENGTH', 'ST_MAKELINE', 'ST_MAKEPOLYGON', 'ST_MAKEPOLYGONORIENTED', 'ST_MAXDISTANCE', 'ST_NPOINTS', 'ST_NUMPOINTS', 'ST_PERIMETER', 'ST_POINTN', 'ST_SIMPLIFY', 'ST_SNAPTOGRID', 'ST_STARTPOINT', 'ST_TOUCHES', 'ST_UNION', 'ST_UNION_AGG', 'ST_WITHIN', 'ST_X', 'ST_Y', 'SESSION_USER', 'GENERATE_UUID', 'NET.IP_FROM_STRING', 'NET.SAFE_IP_FROM_STRING', 'NET.IP_TO_STRING', 'NET.IP_NET_MASK', 'NET.IP_TRUNC', 'NET.IPV4_FROM_INT64', 'NET.IPV4_TO_INT64', 'NET.HOST', 'NET.PUBLIC_SUFFIX', 'NET.REG_DOMAIN', 'CASE', 'COALESCE', 'IF', 'IFNULL', 'NULLIF' ], builtinVariables: [], pseudoColumns: [], tokenizer: { root: [ { include: '@comments' }, { include: '@whitespace' }, { include: '@pseudoColumns' }, { include: '@numbers' }, { include: '@strings' }, { include: '@complexIdentifiers' }, { include: '@scopes' }, [/[;,.]/, 'delimiter'], [/[()]/, '@brackets'], [/\{\{[0-9a-zA-Z\-_]{21}( as \w+)?\}\}/, 'transclusion'], [ /[\w@#$]+/, { cases: { '@keywords': 'keyword', '@operators': 'operator', '@builtinVariables': 'predefined', '@builtinFunctions': 'predefined', '@default': 'identifier' } } ], [/[<>=!%&+\-*/|~^]/, 'operator'] ], whitespace: [[/\s+/, 'white']], comments: [ [/--+.*/, 'comment'], [/\/\*/, { token: 'comment.quote', next: '@comment' }] ], comment: [ [/[^*/]+/, 'comment'], // Not supporting nested comments, as nested comments seem to not be standard? // i.e. http://stackoverflow.com/questions/728172/are-there-multiline-comment-delimiters-in-sql-that-are-vendor-agnostic // [/\/\*/, { token: 'comment.quote', next: '@push' }], // nested comment not allowed :-( [/\*\//, { token: 'comment.quote', next: '@pop' }], [/./, 'comment'] ], pseudoColumns: [ [ /[$][A-Za-z_][\w@#$]*/, { cases: { '@pseudoColumns': 'predefined', '@default': 'identifier' } } ] ], numbers: [ [/0[xX][0-9a-fA-F]*/, 'number'], [/[$][+-]*\d*(\.\d*)?/, 'number'], [/((\d+(\.\d*)?)|(\.\d+))([eE][-+]?\d+)?/, 'number'] ], strings: [ [/[rR]?'/, { token: 'string', next: '@singleQuotedString' }], [/[rR]?"/, { token: 'string', next: '@doubleQuotedString' }], [/[rR]?'{3}/, { token: 'string', next: '@tripleQuotedString' }], [/[rR]?"{3}/, { token: 'string', next: '@tripleQuotedString2' }] ], singleQuotedString: [ [/[rR]?[^']+/, 'string'], [/''/, 'string'], [/'/, { token: 'string', next: '@pop' }] ], doubleQuotedString: [ [/[rR]?[^"]+/, 'string'], [/""/, 'string'], [/"/, { token: 'string', next: '@pop' }] ], tripleQuotedString: [ [/[rR]?[^'{3}]+/, 'string'], [/'{6}/, 'string'], [/'{3}/, { token: 'string', next: '@pop' }] ], tripleQuotedString2: [ [/[rR]?[^"{3}]+/, 'string'], [/"{6}/, 'string'], [/"{3}/, { token: 'string', next: '@pop' }] ], complexIdentifiers: [['`', { token: 'identifier.quote', next: '@backquotedIdentifier' }]], backquotedIdentifier: [ [/[^`]+/, 'identifier'], [/``/, 'identifier'], [/`/, { token: 'identifier.quote', next: '@pop' }] ], scopes: [ [/(BEGIN|CASE)\b/i, { token: 'keyword.block' }], [/END\b/i, { token: 'keyword.block' }], [/WHEN\b/i, { token: 'keyword.choice' }], [/THEN\b/i, { token: 'keyword.choice' }] ] } }
the_stack
import { Component, Event, EventEmitter, Listen, Prop, State, Watch } from '@stencil/core'; import { Column } from './grid-helpers'; import { renderRow, RowOptions, RowSelectionPattern } from './row'; import { renderHeaderCell, Sort } from './header-cell'; @Component({ tag: 'sui-grid', styleUrl: './grid.css' }) export class SuiGrid { /** * Grid data */ @Prop() cells: string[][]; /** * Column definitions */ @Prop() columns: Column[]; /** * Caption/description for the grid */ @Prop() description: string; /** * Grid type: grids have controlled focus and fancy behavior, tables are simple static content */ @Prop() gridType: 'grid' | 'table'; /** * String ID of labelling element */ @Prop() labelledBy: string; /** * Number of rows in one "page": used to compute pageUp/pageDown key behavior, and when paging is used */ @Prop() pageLength = 30; /** * Custom function to control the render of cell content */ @Prop() renderCustomCell: (content: string, colIndex: number, rowIndex: number) => string | HTMLElement; /** * Index of the column that best labels a row */ @Prop() titleColumn = 0; /** Properties for Usability test case behaviors: **/ @Prop() editable: boolean = true; @Prop() editOnClick: boolean; @Prop() headerActionsMenu: boolean; @Prop() rowSelection: RowSelectionPattern; @Prop() simpleEditable = false; @Prop() useApplicationRole = false; /** * Emit a custom filter event */ @Event({ eventName: 'filter' }) filterEvent: EventEmitter; /** * Emit a custom row selection event */ @Event({ eventName: 'rowSelect' }) rowSelectionEvent: EventEmitter; /** * Emit a custom edit event when cell content change is submitted */ @Event({ eventName: 'editCell' }) editCellEvent: EventEmitter<{value: string; column: number; row: number;}>; /** * Save number of selected rows */ @State() selectedRowCount = 0; /** * Save column sort state */ @State() sortedColumn: number; @State() sortState: Sort; // save cell focus and edit states // active cell refers to the [column, row] indices of the cell @State() activeCell: [number, number] = [0, 0]; @State() isEditing = false; /** * Save current filter strings */ private filters: WeakMap<Column, string> = new WeakMap(); /** * Save selection state by row */ private selectedRows: WeakMap<string[], boolean> = new WeakMap(); /** * Save current sorted cell array * Will likely need to be moved out of component to allow on-demand and paged grids */ private sortedCells: string[][]; /* * DOM Refs: */ // Save a reference to whatever element should receive focus private focusRef: HTMLElement; /* * Private properties used to trigger DOM methods in the correct lifecycle callback */ private callFocus = false; private callInputSelection = false; private preventSave = false; // prevent saves on escape private mouseDown = false; // handle focus/click behavior @Watch('cells') watchOptions(newValue: string[][]) { this.sortedCells = this.getSortedCells(newValue); // reset selectedRowCount let selectedRowCount = 0; newValue.forEach((row: string[]) => { this.selectedRows.has(row) && selectedRowCount++; }); this.selectedRowCount = selectedRowCount; } componentWillLoad() { this.sortedCells = this.cells; } componentDidUpdate() { // handle focus this.callFocus && this.focusRef && this.focusRef.focus(); this.callFocus = false; // handle input text selection this.callInputSelection && this.focusRef && (this.focusRef as HTMLInputElement).select(); this.callInputSelection = false; } @Listen('focusout') onBlur(event: FocusEvent) { if (this.isEditing && event.relatedTarget && event.relatedTarget !== this.focusRef && !this.simpleEditable) { this.updateEditing(false, false); } } render() { const { columns = [], description, editable, gridType = 'table', headerActionsMenu, rowSelection, selectedRows, sortedCells = [], sortedColumn, sortState, useApplicationRole } = this; const rowSelectionState = this.getSelectionState(); const tableRole = useApplicationRole ? 'application' : gridType; return <table role={tableRole} aria-roledescription={useApplicationRole ? 'editable data grid' : null} class="grid" aria-labelledby={this.labelledBy} aria-readonly={editable ? null : 'true'}> {description ? <caption>{description}</caption> : null} <thead role="rowgroup" class="grid-header"> <tr role="row" class="row"> {rowSelection !== RowSelectionPattern.None ? <th role="columnheader" aria-labelledby="select-all-header" class={{'checkbox-cell': true, 'indeterminate': rowSelectionState === 'indeterminate'}}> <span class="visuallyHidden" id="select-all-header">select row</span> <input type="checkbox" aria-label="select all rows" checked={!!rowSelectionState} ref={(el) => { if (rowSelectionState === 'indeterminate') { el.indeterminate = true; } }} onChange={(event) => this.onSelectAll((event.target as HTMLInputElement).checked)} /> <span class="selection-indicator"></span> </th> : null} {columns.map((column, index) => { return renderHeaderCell({ column, colIndex: index, actionsMenu: headerActionsMenu, isSortedColumn: sortedColumn === index, sortDirection: sortState, onSort: this.onSortColumn.bind(this), onFilter: this.onFilterInput.bind(this) }); })} </tr> </thead> <tbody role="rowgroup" class="grid-body" onKeyDown={this.onCellKeydown.bind(this)}> {sortedCells.map((cells = [], index) => { const isSelected = !!selectedRows.get(cells); let rowOptions: RowOptions = { cells, index, isSelected, selection: rowSelection, renderCell: this.renderCell.bind(this), renderCheckboxCell: this.renderCheckboxCell.bind(this), onSelectionChange: this.onRowSelect.bind(this) }; if (this.rowSelection === RowSelectionPattern.Aria) { const isActiveRow = this.activeCell[1] === index; rowOptions = { ...rowOptions, isActiveRow, setFocusRef: (el) => this.focusRef = el, onRowKeyDown: this.onRowKeyDown.bind(this) } } return renderRow(rowOptions); })} </tbody> </table>; } private getSelectionState(): boolean | 'indeterminate' { return this.selectedRowCount === 0 ? false : this.selectedRowCount === this.cells.length ? true : 'indeterminate'; } private getSortedCells(cells: string[][]) { if (this.sortedColumn !== undefined && this.sortState !== Sort.None) { return [ ...cells ].sort(this.getSortFunction(this.sortedColumn, this.sortState)); } return cells; } private getSortFunction(columnIndex: number, order: Sort) { return function(row1, row2) { const a = row1[columnIndex].toLowerCase(); const b = row2[columnIndex].toLowerCase(); if (a < b) { return order === Sort.Ascending ? -1 : 1; } else if (a > b) { return order === Sort.Ascending ? 1 : -1; } else { return 0; } } } private onCellClick(row, column) { if (this.simpleEditable) return; // always edit on click if clicking the active cell if (this.editOnClick || (this.activeCell[0] === column && this.activeCell[1] === row)) { this.updateEditing(true, true); } this.activeCell = [column, row]; } private onCellDoubleClick(event) { if (!this.editOnClick && !this.simpleEditable) { this.updateEditing(true, true); event.preventDefault(); } } private onCellFocus(row, column) { if (this.mouseDown) { this.mouseDown = false; return; } this.activeCell = [column, row]; } private onCellKeydown(event: KeyboardEvent) { const { pageLength } = this; const maxCellIndex = this.rowSelection === RowSelectionPattern.Checkbox ? this.columns.length : this.columns.length - 1; let [colIndex, rowIndex] = this.activeCell; switch(event.key) { case 'ArrowUp': rowIndex = Math.max(0, rowIndex - 1); break; case 'ArrowDown': rowIndex = Math.min(this.cells.length - 1, rowIndex + 1); break; case 'ArrowLeft': colIndex = Math.max(0, colIndex - 1); break; case 'ArrowRight': colIndex = Math.min(maxCellIndex, colIndex + 1); break; case 'Home': colIndex = 0; break; case 'End': colIndex = maxCellIndex; break; case 'Enter': case ' ': if (this.simpleEditable) return; event.preventDefault(); this.updateEditing(true, true); break; case 'PageUp': rowIndex = Math.max(0, rowIndex - pageLength); break; case 'PageDown': rowIndex = Math.min(this.cells.length - 1, rowIndex + pageLength); break; } if (this.updateActiveCell(colIndex, rowIndex)) { event.preventDefault(); } } private onEditButtonClick(event: MouseEvent, row: number, column: number, edit: boolean, save = false) { event.stopPropagation(); this.activeCell = [column, row]; this.updateEditing(edit, true); if (save) { this.saveCell(column, row, (this.focusRef as HTMLInputElement).value); } } private onFilterInput(value: string, column: Column) { this.filters.set(column, value); const filters = {}; this.columns.forEach((column, index) => { if (column.filterable && this.filters.has(column)) { const filterString = this.filters.get(column); if (filterString.trim() !== '') { filters[index] = filterString; } } }); this.filterEvent.emit(filters); } private onInputBlur(event: FocusEvent) { if (!this.simpleEditable) { const cellIndex = this.rowSelection === RowSelectionPattern.Checkbox ? this.activeCell[0] - 1 : this.activeCell[0]; this.saveCell(cellIndex, this.activeCell[1], (event.target as HTMLInputElement).value); } } private onInputKeyDown(event: KeyboardEvent) { // allow input to handle its own keystrokes event.stopPropagation(); const { key, shiftKey } = event; if (key === 'Escape') { this.preventSave = true; } // switch out of edit mode on enter or escape if (key === 'Escape' || key === 'Enter') { this.updateEditing(false, true); } // save value on enter if (key === 'Enter') { const cellIndex = this.rowSelection === RowSelectionPattern.Checkbox ? this.activeCell[0] - 1 : this.activeCell[0]; this.saveCell(cellIndex, this.activeCell[1], (event.target as HTMLInputElement).value); } // allow tab and shift+tab to move through cells in a row for edit on click grid else if (key === 'Tab' && this.editOnClick) { const maxCellIndex = this.rowSelection === RowSelectionPattern.Checkbox ? this.columns.length : this.columns.length - 1; if (shiftKey && this.activeCell[0] > 0) { this.saveCell(this.activeCell[0], this.activeCell[1], (event.target as HTMLInputElement).value); this.updateActiveCell(this.activeCell[0] - 1, this.activeCell[1]); this.preventSave = true; event.preventDefault(); } else if (!shiftKey && this.activeCell[0] < maxCellIndex) { this.saveCell(this.activeCell[0], this.activeCell[1], (event.target as HTMLInputElement).value); this.updateActiveCell(this.activeCell[0] + 1, this.activeCell[1]); this.preventSave = true; event.preventDefault(); } } } private onRowKeyDown(event: KeyboardEvent) { const { pageLength } = this; let [colIndex, rowIndex] = this.activeCell; switch(event.key) { case 'ArrowUp': rowIndex = Math.max(0, rowIndex - 1); break; case 'ArrowDown': rowIndex = Math.min(this.cells.length - 1, rowIndex + 1); break; case 'PageUp': rowIndex = Math.max(0, rowIndex - pageLength); break; case 'PageDown': rowIndex = Math.min(this.cells.length - 1, rowIndex + pageLength); break; } if (this.updateActiveCell(colIndex, rowIndex)) { event.preventDefault(); event.stopPropagation(); } } private onRowSelect(row: string[], selected: boolean) { this.selectedRows.set(row, selected); this.selectedRowCount = this.selectedRowCount + (selected ? 1 : -1); } private onSelectAll(selected: boolean) { this.cells.forEach((row) => { this.selectedRows.set(row, selected); }); this.selectedRowCount = selected ? this.cells.length : 0; } private onSortColumn(columnIndex: number) { if (columnIndex === this.sortedColumn) { this.sortState = this.sortState === Sort.Descending ? Sort.Ascending : Sort.Descending; } else { this.sortedColumn = columnIndex; this.sortState = Sort.Ascending; } this.sortedCells = this.getSortedCells(this.cells); } private renderCell(rowIndex: number, cellIndex: number, content: string) { const activeCellId = this.activeCell.join('-'); const currentCellKey = `${cellIndex}-${rowIndex}`; const cellColumn = this.rowSelection === RowSelectionPattern.Checkbox ? this.columns[cellIndex - 1] : this.columns[cellIndex]; const isActiveCell = activeCellId === currentCellKey && !cellColumn.actionsColumn; const isGrid = this.gridType === 'grid'; return <td role={isGrid ? 'gridcell' : 'cell'} id={`cell-${rowIndex}-${cellIndex}`} class={{'cell': true, 'editing': this.isEditing && isActiveCell }} aria-label={this.useApplicationRole ? `${cellColumn.name} ${content}` : null} aria-readonly={!this.editable || cellColumn.actionsColumn ? 'true' : null} tabIndex={isGrid && this.rowSelection !== RowSelectionPattern.Aria ? isActiveCell ? 0 : -1 : null} ref={isActiveCell && !this.isEditing && this.rowSelection !== RowSelectionPattern.Aria ? (el) => { this.focusRef = el; } : null} onFocus={() => { this.onCellFocus(rowIndex, cellIndex)}} onClick={this.editable ? () => { this.onCellClick(rowIndex, cellIndex); } : null} onDblClick={this.editable ? this.onCellDoubleClick.bind(this) : null} onMouseDown={() => { this.mouseDown = true; }} > {this.isEditing && isActiveCell ? <input value={content} class="cell-edit" onKeyDown={this.onInputKeyDown.bind(this)} onBlur={this.onInputBlur.bind(this)} ref={(el) => this.focusRef = el} /> : <span class="cell-content">{this.renderCellContent(content, cellIndex, rowIndex)}</span> } {this.simpleEditable && !cellColumn.actionsColumn ? this.isEditing && isActiveCell ? [ <button class="grid-button" key={`${currentCellKey}-save`} type="button" onClick={(event) => { this.onEditButtonClick(event, rowIndex, cellIndex, false, true) }}><img src="/assets/ok.svg" alt="Save" role="img" /></button>, <button class="grid-button" key={`${currentCellKey}-cancel`} type="button" onClick={(event) => { this.onEditButtonClick(event, rowIndex, cellIndex, false) }}><img src="/assets/cancel.svg" alt="Cancel" role="img" /></button> ] : <button class="grid-button" key={`${currentCellKey}-edit`} type="button" ref={isActiveCell ? (el) => { this.focusRef = el; } : null} onClick={(event) => { this.onEditButtonClick(event, rowIndex, cellIndex, true) }}> <img src="/assets/edit.svg" alt="Edit" role="img" /> </button> : null } </td>; } private renderCellContent(content: string, colIndex: number, rowIndex: number) { const { gridType, renderCustomCell = (content) => content } = this; const isActionsColumn = this.columns[colIndex] && this.columns[colIndex].actionsColumn; if (isActionsColumn) { const isActiveCell = this.activeCell.join('-') === `${colIndex}-${rowIndex}`; // spoof an action button return <button class="test-actions grid-button" id={`action-${rowIndex}-${colIndex}`} aria-labelledby={`action-${rowIndex}-${colIndex} cell-${rowIndex}-${this.titleColumn}`} tabIndex={gridType === 'grid' ? isActiveCell ? 0 : -1 : null} ref={isActiveCell && this.rowSelection !== RowSelectionPattern.Aria ? (el) => { this.focusRef = el; } : null} onClick={(() => alert(`This is just a test, you successfully activated the ${content} button`))} > {content} </button>; } else { return renderCustomCell(content, colIndex, rowIndex); } } private renderCheckboxCell(rowIndex: number, selected: boolean) { const activeCellId = this.activeCell.join('-'); return <td role="gridcell" class="checkbox-cell"> <input type="checkbox" checked={selected} aria-labelledby={`cell-${rowIndex}-${this.titleColumn + 1}`} tabIndex={activeCellId === `0-${rowIndex}` ? 0 : -1} ref={activeCellId === `0-${rowIndex}` ? (el) => { this.focusRef = el; } : null} onChange={(event) => this.onRowSelect(this.sortedCells[rowIndex], (event.target as HTMLInputElement).checked)} onKeyDown={(event) => { (event.key === ' ' || event.key === 'Enter') && event.stopPropagation(); }} /> <span class="selection-indicator"></span> </td>; } private saveCell(column: number, row: number, value: string) { if (this.preventSave) { this.preventSave = false; return; } this.editCellEvent.emit({ column, row, value }); } private updateActiveCell(colIndex, rowIndex): boolean { if (colIndex !== this.activeCell[0] || rowIndex !== this.activeCell[1]) { this.callFocus = true; this.activeCell = [colIndex, rowIndex]; return true; } return false; } private updateEditing(editing: boolean, callFocus: boolean) { if (!this.editable && !this.simpleEditable) { return }; this.isEditing = editing; this.callFocus = callFocus; this.callInputSelection = editing && callFocus; } }
the_stack
declare namespace Mock { // Interface for global namespace 'betterMock' interface BetterMock { mock: Mock; setup: Setup; Random: Random; valid: Valid; toJSONSchema: ToJSONSchema; version: number; } interface MockCbOptions { url: string; type: string; body: string | null; } // Mock.mock() interface Mock { (rurl: string | RegExp, rtype: string, template: ((options: MockCbOptions) => any) | any): BetterMock; (rurl: string | RegExp, template: ((options: MockCbOptions) => any) | any): BetterMock; (template: any): any; } interface SetupSettings { timeout?: number | string; } // Mock.setup() type Setup = (settings: SetupSettings) => void; type StringPool = 'lower' | 'upper' | 'number' | 'symbol'; /** * Mock.Random - Basic */ interface RandomBasic { /** * 返回一个随机的布尔值 * @param min 指示参数 current 出现的概率 * @param max 指示参数 current 的相反值 !current 出现的概率 * @param current 可选值为布尔值 true 或 false */ boolean(min?: number, max?: number, current?: boolean): boolean; /** * 返回一个随机的自然数 * @param min 指示随机自然数的最小值。默认值为 0 * @param max 指示随机自然数的最大值。默认值为 9007199254740992 */ natural(min?: number, max?: number): number; /** * 返回一个随机的整数 * @param min 指示随机整数的最小值。默认值为 -9007199254740992 * @param max 指示随机整数的最大值。默认值为 9007199254740992 */ integer(min?: number, max?: number): number; /** * 返回一个随机的浮点数 * @param min 整数部分的最小值。默认值为 -9007199254740992 * @param max 整数部分的最大值。默认值为 9007199254740992 * @param dmin 小数部分位数的最小值。默认值为 0 * @param dmax 小数部分位数的最大值。默认值为 17 */ float(min?: number, max?: number, dmin?: number, dmax?: number): number; /** * 返回一个随机字符 * @param pool - 字符池,如果传入了 'lower'、'upper'、'number'、'symbol',将从内置的字符池从选取 */ character(pool?: StringPool | string): string; /** * 返回一个随机字符串 * @param pool 字符池,如果传入了 'lower'、'upper'、'number'、'symbol',将从内置的字符池从选取 * @param min 随机字符串的最小长度。默认值为 3 * @param max 随机字符串的最大长度。默认值为 7 */ string(pool: StringPool | string, min: number, max: number): string; /** * 返回一个随机字符串 * @param min 随机字符串的最小长度。默认值为 3 * @param max 随机字符串的最大长度。默认值为 7 */ string(min: number, max: number): string; /** * 返回一个随机字符串 * @param pool 字符池,如果传入了 'lower'、'upper'、'number'、'symbol',将从内置的字符池从选取 * @param length 字符串长度 */ string(pool: StringPool | string, length: number): string; /** * 返回一个随机字符串 * @param pool 字符池,如果传入了 'lower'、'upper'、'number'、'symbol',将从内置的字符池从选取 */ string(pool: StringPool | string): string; /** * 返回一个随机字符串 * @param length 字符串长度 */ string(length: number): string; /** * 返回一个随机字符串 */ string(): string; /** * 返回一个整型数组 * @param start 数组中整数的起始值 * @param stop 数组中整数的结束值(不包含在返回值中) * @param step 数组中整数之间的步长。默认值为 1 */ range(start: number, stop: number, step: number): number; /** * 返回一个整型数组 * @param start 数组中整数的起始值 * @param stop 数组中整数的结束值(不包含在返回值中) */ range(start: number, stop: number): number; /** * 返回一个整型数组 * @param stop 数组中整数的结束值(不包含在返回值中) */ range(stop: number): number; } // Mock.Random - Date type RandomDateUtilString = 'year' | 'month' | 'week' | 'day' | 'hour' | 'minute' | 'second' | 'week'; interface RandomDate { /** * 返回一个随机的日期字符串 * @param format 生成的日期字符串的格式,默认值为 yyyy-MM-dd */ date(format?: string): string; /** * 返回一个随机的时间字符串 * @param format 生成的时间字符串的格式,默认值为 HH:mm:ss */ time(format?: string): string; /** * 返回一个随机的日期和时间字符串 * @param format 生成的日期和时间字符串的格式,默认值为 yyyy-MM-dd HH:mm:ss */ datetime(format?: string): string; /** * 返回当前的日期和时间字符串 * @param util 时间单位,可选值有:year、month、week、day、hour、minute、second、week * @param format 生成的日期和时间字符串的格式,默认值为 yyyy-MM-dd HH:mm:ss */ now(util?: RandomDateUtilString | string, format?: string): string; /** * 随机生成一个时间戳 */ timestamp(): number; } // Mock.Random - Image interface RandomImage { /** * 随机生成一个随机的图片地址 * @param size 图片的宽高,格式为 '宽x高' * @param background 图片的背景色。默认值为 '#000000' * @param foreground 图片的前景色(文字)。默认值为 '#FFFFFF' * @param format 片的格式。默认值为 'png',可选值包括:'png'、'gif'、'jpg' * @param text 指示图片上的文字。默认值为参数 size */ image(size: string, background: string, foreground: string, format: 'png' | 'gif' | 'jpg', text: string): string; /** * 随机生成一个随机的图片地址 * @param size 图片的宽高,格式为 '宽x高' * @param background 图片的背景色。默认值为 '#000000' * @param foreground 图片的前景色(文字)。默认值为 '#FFFFFF' * @param text 指示图片上的文字。默认值为参数 size */ image(size: string, background: string, foreground: string, text: string): string; /** * 随机生成一个随机的图片地址 * @param size 图片的宽高,格式为 '宽x高' * @param background 图片的背景色。默认值为 '#000000' * @param text 指示图片上的文字。默认值为参数 size */ image(size: string, background: string, text: string): string; /** * 随机生成一个随机的图片地址 * @param size 图片的宽高,格式为 '宽x高' * @param text 指示图片上的文字。默认值为参数 size */ image(size: string, text: string): string; /** * 随机生成一个随机的图片地址 */ image(): string; /** * 随机生成一段随机的 Base64 图片编码 * @param size 图片的宽高 * @param text 图片上的文字 */ dataImage(size?: string, text?: string): string; } // Mock.Random - Color interface RandomColor { /** * 随机生成一个有吸引力的颜色,格式为 '#RRGGBB' */ color(): string; /** * 随机生成一个有吸引力的颜色,格式为 '#RRGGBB' */ hex(): string; /** * 随机生成一个有吸引力的颜色,格式为 'rgb(r, g, b)' */ rgb(): string; /** * 随机生成一个有吸引力的颜色,格式为 'rgba(r, g, b, a)' */ rgba(): string; /** * 随机生成一个有吸引力的颜色,格式为 'hsl(h, s, l)' */ hsl(): string; } // Mock.Random - Text interface RandomText { /** * 随机生成一段文本 * @param min 指示文本中句子的最小个数。默认值为 3 * @param max 指示文本中句子的最大个数。默认值为 7 */ paragraph(min?: number, max?: number): string; /** * 随机生成一段中文文本 * @param min 指示文本中句子的最小个数。默认值为 3 * @param max 指示文本中句子的最大个数。默认值为 7 */ cparagraph(min?: number, max?: number): string; /** * 随机生成一个句子,第一个单词的首字母大写 * @param min 指示句子中单词的最小个数。默认值为 12 * @param max 指示句子中单词的最大个数。默认值为 18 */ sentence(min?: number, max?: number): string; /** * 随机生成一段中文句子 * @param min 句子中汉字的最小个数。默认值为 12 * @param max 句子中汉字的最大个数。默认值为 18 */ csentence(min?: number, max?: number): string; /** * 随机生成一个单词 * @param min 单词中字符的最小个数。默认值为 3 * @param max 单词中字符的最大个数。默认值为 10 */ word(min?: number, max?: number): string; /** * 随机生成一个汉字 * @param pool 汉字字符串。表示汉字字符池 * @param min 随机汉字字符串的最小长度。默认值为 1 * @param max 随机汉字字符串的最大长度。默认值为 1 */ cword(pool?: string | number, min?: number, max?: number): string; /** * 随机生成一句标题,其中每个单词的首字母大写 * @param min 单词中字符的最小个数。默认值为 3 * @param max 单词中字符的最大个数。默认值为 7 */ title(min?: number, max?: number): string; /** * 随机生成一句中文标题 * @param min 单词中字符的最小个数。默认值为 3 * @param max 单词中字符的最大个数。默认值为 7 */ ctitle(min?: number, max?: number): string; } // Mock.Random - Name interface RandomName { /** * 随机生成一个常见的英文名 */ first(): string; /** * 随机生成一个常见的英文姓 */ last(): string; /** * 随机生成一个常见的英文姓名 * @param middle 是否生成中间名 */ name(middle?: boolean): string; /** * 随机生成一个常见的中文名 */ cfirst(): string; /** * 随机生成一个常见的中文姓 */ clast(): string; /** * 随机生成一个常见的中文姓名 */ cname(): string; } // Mock.Random - Web type RandomWebProtocol = 'http' | 'ftp' | 'gopher' | 'mailto' | 'mid' | 'cid' | 'news' | 'nntp' | 'prospero' | 'telnet' | 'rlogin' | 'tn3270' | 'wais'; interface RandomWeb { /** * 随机生成一个 URL * @param protocol URL 协议。例如 http * @param host URL 域名和端口号。例如 baidu.com */ url(protocol?: string, host?: string): string; /** * 随机生成一个 URL 协议 */ protocol(): RandomWebProtocol; /** * 随机生成一个域名 */ domain(): string; /** * 随机生成一个顶级域名 */ dtl(): string; /** * 随机生成一个邮件地址 * @param domain 邮件地址的域名。例如 nuysoft.com */ email(domain?: string): string; /** * 随机生成一个 IP 地址 */ ip(): string; } // Mock.Random - Address interface RandomAddress { /** * 随机生成一个(中国)大区 */ region(): string; /** * 随机生成一个(中国)省(或直辖市、自治区、特别行政区) */ province(): string; /** * 随机生成一个(中国)市 * @param prefix 是否生成所属的省 */ city(prefix?: boolean): string; /** * 随机生成一个(中国)县 * @param prefix 否生成所属的省、市 */ country(prefix?: boolean): string; /** * 随机生成一个邮政编码(六位数字) */ zip(): string; } // Mock.Random - Helper interface RandomHelper { /** * 把字符串的第一个字母转换为大写 * @param word 字符串 */ capitalize(word: string): string; /** * 把字符串转换为大写 * @param str 字符串 */ upper(str: string): string; /** * 把字符串转换为小写 * @param str 字符串 */ lower(str: string): string; /** * 从数组中随机选取一个元素 * @param arr 数组 */ pick<T = any>(arr: T[]): T; /** * 打乱数组中元素的顺序,并返回 * @param arr 数组 * @param min 返回的数组的最小长度 * @param max 返回的数组的最大长度 */ shuffle<T = any>(arr: T[], min?: number, max?: number): T[]; } // Mock.Random - Miscellaneous interface RandomMiscellaneous { /** * 随机生成一个 GUID */ guid(): string; /** * 随机生成一个 18 位身份证 */ id(): string; /** * 生成一个全局的自增整数 * @param step 整数自增的步长。默认值为 1 */ increment(step?: number): number; /** * 随机生成一个版本号,每一位的最大值不超过10 * @param step 版本号的层级,默认为 3 */ version(depth?: number): string; /** * 生成一个中国的手机号 */ phone(): string; } type RandomExtendSource = { [prop: string]: Function } // Mock.Random interface Random extends RandomBasic, RandomDate, RandomImage, RandomColor, RandomAddress, RandomHelper, RandomMiscellaneous, RandomName, RandomText, RandomWeb { extend(source: RandomExtendSource): Random } interface ValidRsItem { action: string; actual: string; expected: string; message: string; path: string[]; type: string; } // Mock.valid() type Valid = (template: any, data: any) => ValidRsItem[]; interface ToJSONSchemaRs { name: string | undefined; template: any; type: string; rule: object; path: string[]; properties?: ToJSONSchemaRs[]; items?: ToJSONSchemaRs[]; } // Mock.toJSONSchema() type ToJSONSchema = (template: any) => ToJSONSchemaRs; let mock: Mock; let setup: Setup; let Random: Random; let valid: Valid; let toJSONSchema: ToJSONSchema; let version: number; } export = Mock;
the_stack
namespace gdjs { export namespace evtTools { export namespace firebaseTools { /** * Firebase Cloud Firestore Event Tools. * @namespace */ export namespace firestore { const queries = new Map<string, firebase.firestore.Query>(); /** * Converts a firebase document snapshot to a plain dictionary, * so that it may be serialized or converted to a {@link gdjs.Variable}. * * @param doc - The document snapshot. * @returns - The converted object. */ const documentSnapshotToSerializable = ( doc: firebase.firestore.DocumentSnapshot ) => ({ data: doc.data(), exists: doc.exists, id: doc.id, }); /** * Converts a firebase query snapshot to a plain dictionary, * so that it may be serialized or converted to a {@link gdjs.Variable}. * * @param query - The query snapshot. * @returns - The converted object. */ const querySnapshotToSerializable = ( query: firebase.firestore.QuerySnapshot ) => ({ size: query.size, empty: query.empty, docs: query.docs.map(documentSnapshotToSerializable), }); /** * Initiate a query over a collection. * @param queryID - The name of the new query. * @param collectionName - The name of the collection to query. */ export const startQuery = (queryID: string, collectionName: string) => { queries.set(queryID, firebase.firestore().collection(collectionName)); }; /** * Create a new query from a base query. * @param queryID - The name of the new query. * @param sourceQueryID - The name of the source query. */ export const startQueryFrom = ( queryID: string, sourceQueryID: string ) => { if (queries.has(sourceQueryID)) queries.set(queryID, queries.get(sourceQueryID)!); }; /** * Filters out documents whose fields do not match a condition * from a query. * @param queryID - The query to add the filter to. * @param field - The field to run the condition on. * @param op - The condtion operator. * @param value - The value to check against. */ export const queryWhere = ( queryID: string, field: string, op: Exclude< firebase.firestore.WhereFilterOp, // Exclude unsupported "batch" operations (as they require an array as value to check) 'in' | 'array-contains-any' | 'not-in' >, value: string | number ) => { if (queries.has(queryID)) queries.set(queryID, queries.get(queryID)!.where(field, op, value)); }; /** * Orders the documents in a query. * * @param queryID - The query to add the filter to. * @param field - The field to order by. * @param direction - The direction of ordering (ascending or descending). */ export const queryOrderBy = ( queryID: string, field: string, direction: firebase.firestore.OrderByDirection ) => { if (queries.has(queryID)) queries.set( queryID, queries.get(queryID)!.orderBy(field, direction) ); }; /** * Limits the amount of documents returned by the query. * * @param queryID - The query to add the filter to. * @param amount - The amount of documents to limit to * @param last - If true, limits to the last documents instead of the first documents. */ export const queryLimit = ( queryID: string, amount: integer, last: boolean ) => { if (queries.has(queryID)) queries.set( queryID, queries.get(queryID)![last ? 'limitToLast' : 'limit'](amount) ); }; /** * Makes a query skip documents after or before a certain * value of a field the query was ordered with. * * @param queryID - The query to add the filter to. * @param value - The value of the field ordered by. * @param before - If set to true, all documents before the document are skipped, else all documents after it are skipped. * @param includeSelf - If set to true, doesn't skip the document. */ export const querySkipSome = ( queryID: string, value: number, before: boolean, includeSelf: boolean ) => { if (queries.has(queryID)) queries.set( queryID, queries .get(queryID)! [ before ? includeSelf ? 'endAt' : 'endBefore' : includeSelf ? 'startAt' : 'startAfter' ](value) ); }; /** * Execute a query and store results in a callback variable. * * @param queryID - The query to execute. * @param [callbackValueVariable] - The variable where to store the result. * @param [callbackStatusVariable] - The variable where to store if the operation was successful. */ export const executeQuery = ( queryID: string, callbackValueVariable?: gdjs.Variable, callbackStatusVariable?: gdjs.Variable ) => { if (!queries.has(queryID)) return; queries .get(queryID)! .get() .then((snapshot) => { if (typeof callbackStatusVariable !== 'undefined') callbackStatusVariable.setString('ok'); if (typeof callbackValueVariable !== 'undefined') callbackValueVariable.fromJSObject( querySnapshotToSerializable(snapshot) ); }) .catch((error) => { if (typeof callbackStatusVariable !== 'undefined') callbackStatusVariable.setString(error.message); }); }; /** * Watch a query and store results in a callback * variable whenever a documents starts/stops * matching the query or a document matching * the query is modified. * * @param queryID - The query to execute. * @param [callbackValueVariable] - The variable where to store the result. * @param [callbackStatusVariable] - The variable where to store if the operation was successful. */ export const watchQuery = ( queryID: string, callbackValueVariable?: gdjs.Variable, callbackStatusVariable?: gdjs.Variable ) => { if (!queries.has(queryID)) return; queries.get(queryID)!.onSnapshot( (snapshot) => { if (typeof callbackStatusVariable !== 'undefined') callbackStatusVariable.setString('ok'); if (typeof callbackValueVariable !== 'undefined') callbackValueVariable.fromJSObject( querySnapshotToSerializable(snapshot) ); }, (error) => { if (typeof callbackStatusVariable !== 'undefined') callbackStatusVariable.setString(error.message); } ); }; /** * Adds a variable in a collection as document with a unique name. * @param collectionName - The collection where to store the variable. * @param variable - The variable to write. * @param [callbackStateVariable] - The variable where to store the result. */ export const addDocument = ( collectionName: string, variable: gdjs.Variable, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .add(replaceTimestampsInObject(variable.toJSObject())) .then(() => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Writes a variable in a collection as document. * @param collectionName - The collection where to store the variable. * @param variableName - The name under wich the variable will be saved (document name). * @param variable - The variable to write. * @param [callbackStateVariable] - The variable where to store the result. */ export const writeDocument = ( collectionName: string, variableName: string, variable: gdjs.Variable, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .doc(variableName) .set(replaceTimestampsInObject(variable.toJSObject())) .then(() => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Writes a field of a document. * @param collectionName - The collection where to store the document. * @param documentName - The name of the document where to write a field. * @param field - The field where to write. * @param value - The value to write. * @param [callbackStateVariable] - The variable where to store the result. * @param [merge] - Should the new field replace the document or be merged with the document? */ export const writeField = ( collectionName: string, documentName: string, field: string, value: any, callbackStateVariable?: gdjs.Variable, merge: boolean = true ) => { firebase .firestore() .collection(collectionName) .doc(documentName) .set({ [field]: replaceTimestampInString(value) }, { merge: merge }) .then(() => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Updates a variable/document. * @param collectionName - The collection where the document is stored. * @param variableName - The name under wich the variable will be saved (document name). * @param variable - The variable to update. * @param [callbackStateVariable] - The variable where to store the result. */ export const updateDocument = ( collectionName: string, variableName: string, variable: gdjs.Variable, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .doc(variableName) .update(replaceTimestampsInObject(variable.toJSObject())) .then(() => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Updates a field of a document. * @param collectionName - The collection where the document is stored. * @param documentName - The name of the document where to update a field. * @param field - The field where to update. * @param value - The value to write. * @param [callbackStateVariable] - The variable where to store the result. */ export const updateField = ( collectionName: string, documentName: string, field: string, value: any, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .doc(documentName) .update({ [field]: replaceTimestampInString(value) }) .then(() => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Deletes a document. * @param collectionName - The collection where the document is stored. * @param documentName - The name of the document to delete. * @param [callbackStateVariable] - The variable where to store the result. */ export const deleteDocument = ( collectionName: string, documentName: string, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .doc(documentName) .delete() .then(() => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Deletes a field of a document. * @param collectionName - The collection where the document is stored. * @param documentName - The name of the document where to delete a field. * @param field - The field to delete. * @param [callbackStateVariable] - The variable where to store the result. */ export const deleteField = ( collectionName: string, documentName: string, field: string, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .doc(documentName) .update({ [field]: firebase.firestore.FieldValue.delete() }) .then(() => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Gets a document and store it in a variable. * @param collectionName - The collection where the document is stored. * @param documentName - The name of the document to get. * @param [callbackValueVariable] - The variable where to store the result. * @param [callbackStateVariable] - The variable where to store if the operation was successful. */ export const getDocument = ( collectionName: string, documentName: string, callbackValueVariable?: gdjs.Variable, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .doc(documentName) .get() .then((doc) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); if (callbackValueVariable) callbackValueVariable.fromJSObject(doc.data()); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Gets a field of a document and store it in a variable. * @param collectionName - The collection where the document is stored. * @param documentName - The name of the document. * @param field - The field to get. * @param [callbackValueVariable] - The variable where to store the result. * @param [callbackStateVariable] - The variable where to store if the operation was successful. */ export const getField = ( collectionName: string, documentName: string, field: string, callbackValueVariable?: gdjs.Variable, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .doc(documentName) .get() .then((doc) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); if (callbackValueVariable) callbackValueVariable.fromJSObject(doc.get(field)); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Checks for existence of a document. * @param collectionName - The collection where the document is stored. * @param documentName - The name of the document to check. * @param [callbackValueVariable] - The variable where to store the result. * @param [callbackStateVariable] - The variable where to store if the operation was successful. */ export const hasDocument = ( collectionName: string, documentName: string, callbackValueVariable?: gdjs.Variable, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .doc(documentName) .get() .then((doc) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); if (callbackValueVariable) callbackValueVariable.setBoolean(doc.exists); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Checks for existence of a field. * @param collectionName - The collection where the document is stored. * @param documentName - The name of the document. * @param field - The field to check. * @param [callbackValueVariable] - The variable where to store the result. * @param [callbackStateVariable] - The variable where to store if the operation was successful. */ export const hasField = ( collectionName: string, documentName: string, field: string, callbackValueVariable?: gdjs.Variable, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .doc(documentName) .get() .then((doc) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString('ok'); if (callbackValueVariable) { const value = doc.get(field, { serverTimestamps: 'estimate' }); callbackValueVariable.setBoolean( doc.exists && value !== undefined && value !== null ); } }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Lists all the documents in a collection. * @param collectionName - The collection where to count documents. * @param [callbackValueVariable] - The variable where to store the result. * @param [callbackStateVariable] - The variable where to store if the operation was successful. * * @deprecated Use a query without filters instead. */ export const listDocuments = ( collectionName: string, callbackValueVariable?: gdjs.Variable, callbackStateVariable?: gdjs.Variable ) => { firebase .firestore() .collection(collectionName) .get() .then((snapshot) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString( snapshot.empty ? 'empty' : 'ok' ); if (callbackValueVariable) callbackValueVariable.fromJSObject( snapshot.docs.map((doc) => doc.id) ); }) .catch((error) => { if (typeof callbackStateVariable !== 'undefined') callbackStateVariable.setString(error.message); }); }; /** * Returns a special string replaced by a firebase serverTimestamp field value. */ export const getServerTimestamp = () => '[{__FIREBASE_SERVERSIDE_TIMESTAMP}]'; const replaceTimestampInString = (str: any) => { if (str === '[{__FIREBASE_SERVERSIDE_TIMESTAMP}]') return firebase.firestore.FieldValue.serverTimestamp(); else return str; }; const replaceTimestampsInObject = (object: object): object => { for (const i in object) { const item = object[i]; if (typeof item === 'object') replaceTimestampsInObject(item); else if (item === '[{__FIREBASE_SERVERSIDE_TIMESTAMP}]') object[i] = firebase.firestore.FieldValue.serverTimestamp(); } return object; }; } } } }
the_stack
import Augur from "augur.js"; import { BigNumber } from "bignumber.js"; import * as t from "io-ts"; import * as Knex from "knex"; import * as _ from "lodash"; import { FrozenFunds } from "../../blockchain/log-processors/profit-loss/frozen-funds"; import { BN_WEI_PER_ETHER, ZERO } from "../../constants"; import { Address, MarketsRow, OutcomeParam, ReportingState, SortLimitParams } from "../../types"; import { fixedPointToDecimal, numTicksToTickSize } from "../../utils/convert-fixed-point-to-decimal"; import { Percent, safePercent, Tokens } from "../../utils/dimension-quantity"; import { getRealizedProfitPercent, getTotalProfitPercent, getUnrealizedProfitPercent } from "../../utils/financial-math"; import { getAllOutcomesProfitLoss, ProfitLossResult } from "./get-profit-loss"; export const UserTradingPositionsParamsSpecific = t.type({ universe: t.union([t.string, t.null, t.undefined]), marketId: t.union([t.string, t.null, t.undefined]), account: t.union([t.string, t.null, t.undefined]), outcome: t.union([OutcomeParam, t.number, t.null, t.undefined]), }); export const UserTradingPositionsParams = t.intersection([ UserTradingPositionsParamsSpecific, SortLimitParams, t.partial({ endTime: t.number, }), ]); // TradingPosition represents a user's current or historical // trading activity in one market outcome. See NetPosition. export interface TradingPosition extends ProfitLossResult, FrozenFunds { position: string; } // AggregatedTradingPosition is an aggregation of TradingPosition for some // scope, eg. an aggregation of all TradingPosition in a user's portfolio. export interface AggregatedTradingPosition extends Pick<ProfitLossResult, "realized" | "unrealized" | "total" | "unrealizedCost" | "realizedCost" | "totalCost" | "realizedPercent" | "unrealizedPercent" | "totalPercent" | "unrealizedRevenue" | "unrealizedRevenue24hAgo" | "unrealizedRevenue24hChangePercent" >, FrozenFunds { } // MarketTradingPosition is a market-level aggregation of // TradingPositions, ie. an aggregation of all outcomes in one market. export interface MarketTradingPosition extends AggregatedTradingPosition, Pick<ProfitLossResult, "timestamp" | "marketId"> { } // GetUserTradingPositionsResponse is the response type for getUserTradingPositions() export interface GetUserTradingPositionsResponse { tradingPositions: Array<TradingPosition>; // per-outcome TradingPosition, where unrealized profit is relative to an outcome's last price (as traded by anyone) tradingPositionsPerMarket: { // per-market aggregation of trading positions [marketId: string]: MarketTradingPosition, }; tradingPositionsTotal: AggregatedTradingPosition | undefined; // portfolio-level aggregation of all user's trading positions. Undefined if and only if getUserTradingPositions() was filtered by marketId frozenFundsTotal: FrozenFunds | undefined; // user's total frozen funds. Undefined if and only if getUserTradingPositions() was filtered by marketId. WARNING - frozenFundsTotal is greater than tradingPositionsTotal.frozenFunds (in general) because frozenFundsTotal also includes sum of market validity bonds for active markets this user created } interface RawPosition { marketId: string; outcome: number; balance: BigNumber; maxPrice: BigNumber; minPrice: BigNumber; numTicks: BigNumber; seen: boolean; } async function queryUniverse(db: Knex, marketId: Address): Promise<Address> { const market = await db .first("universe") .from("markets") .where({ marketId }); if (!market || market.universe == null) throw new Error("If universe isn't provided, you must provide a valid marketId"); return market.universe; } export async function getUserTradingPositions(db: Knex, augur: Augur, params: t.TypeOf<typeof UserTradingPositionsParams>): Promise<GetUserTradingPositionsResponse> { if (params.universe == null && params.marketId == null) throw new Error("Must provide reference to universe, specify universe or marketId"); if (params.account == null) throw new Error("Missing required parameter: account"); const endTime = params.endTime || Date.now() / 1000; const universeId = params.universe || (await queryUniverse(db, params.marketId!)); const { profit: profitsPerMarket } = await getAllOutcomesProfitLoss(db, { universe: universeId, account: params.account, marketId: params.marketId || null, startTime: 0, endTime, periodInterval: endTime, }); const rawPositionsQuery = db .select(["tokens.marketId", "tokens.outcome", "balances.balance", "markets.maxPrice", "markets.minPrice", "markets.numticks"]) .from("balances") .innerJoin("tokens", "tokens.contractAddress", "balances.token") .innerJoin("markets", "tokens.marketId", "markets.marketId") .whereNotNull("tokens.marketId") .whereNotNull("tokens.outcome") .andWhere("balances.owner", params.account) .andWhere("markets.universe", universeId); if (params.marketId) rawPositionsQuery.andWhere("markets.marketId", params.marketId); const getSumOfMarketValidityBondsPromise = getEthEscrowedInValidityBonds(db, params.account); // do this here so that awaits are concurrent const rawPositions: Array<RawPosition> = await rawPositionsQuery; const rawPositionsMapping: { [key: string]: RawPosition } = _.reduce(rawPositions, (result, rawPosition) => { const key = rawPosition.marketId.concat(rawPosition.outcome.toString()); const tickSize = numTicksToTickSize(rawPosition.numTicks, rawPosition.minPrice, rawPosition.maxPrice); rawPosition.balance = augur.utils.convertOnChainAmountToDisplayAmount(new BigNumber(rawPosition.balance, 10), tickSize); result[key] = rawPosition; return result; }, {} as { [key: string]: RawPosition }); const marketToLargestShort: { [key: string]: BigNumber } = {}; let positions: Array<TradingPosition> = _.flatten(_.map(profitsPerMarket, (outcomePls: Array<Array<ProfitLossResult>>) => { const lastTimestampPls = _.last(outcomePls)!; return _.map(lastTimestampPls, (plr) => { const key = plr.marketId.concat(plr.outcome.toString()); const rawPosition = rawPositionsMapping[key]; let position = "0"; marketToLargestShort[plr.marketId] = BigNumber.min(marketToLargestShort[plr.marketId] || ZERO, plr.netPosition); if (rawPosition) { rawPositionsMapping[key].seen = true; position = rawPosition.balance.toString(); } return Object.assign( { position }, plr, ); }); })); // Show outcomes with just a raw position if they have some quantity not accounted for via trades (e.g manual transfers) const rawPositionOnlyToShow = _.filter(rawPositionsMapping, (rawPosition) => { const largestShort = marketToLargestShort[rawPosition.marketId]; return !rawPosition.seen && largestShort && largestShort.abs().lt(rawPosition.balance); }); const noPLPositions: Array<TradingPosition> = _.map(rawPositionOnlyToShow, (rawPosition) => { return { position: rawPosition.balance.toString(), marketId: rawPosition.marketId, outcome: rawPosition.outcome, netPosition: ZERO, averagePrice: ZERO, realized: ZERO, unrealized: ZERO, total: ZERO, timestamp: 0, unrealizedCost: ZERO, realizedCost: ZERO, totalCost: ZERO, realizedPercent: ZERO, unrealizedPercent: ZERO, totalPercent: ZERO, unrealizedRevenue: ZERO, frozenFunds: ZERO, lastTradePrice: ZERO, lastTradePrice24hAgo: ZERO, lastTradePrice24hChangePercent: ZERO, unrealizedRevenue24hAgo: ZERO, unrealizedRevenue24hChangePercent: ZERO, }; }); positions = positions.concat(noPLPositions); if (params.outcome !== null && typeof params.outcome !== "undefined") { positions = _.filter(positions, { outcome: params.outcome }); } // frozenFundsTotal is undefined iff request included a marketId, // because the data to compute totals is unavailable when filtering // by a marketId. By our business definition, a user's total frozen // funds includes their market validity bonds. (Validity bonds are // paid in ETH and are escrowed until the markets resolve valid.) const frozenFundsTotal: FrozenFunds | undefined = params.marketId ? undefined : { frozenFunds: positions.reduce<BigNumber>((sum: BigNumber, p: TradingPosition) => sum.plus(p.frozenFunds), ZERO).plus(await getSumOfMarketValidityBondsPromise), }; // tradingPositionsTotal is undefined iff request included a marketId, because // the data to compute totals is unavailable when filtering by a marketId. const tradingPositionsTotal: AggregatedTradingPosition | undefined = params.marketId ? undefined : getAggregatedTradingPosition(positions); return { tradingPositions: positions, tradingPositionsPerMarket: aggregateMarketTradingPositions(positions), tradingPositionsTotal, frozenFundsTotal, }; } // getEthEscrowedInValidityBonds returns the sum of all market validity bonds for // non-finalized markets created by the passed marketCreator. Ie. how much ETH // this creator has escrowed in validity bonds. Denominated in Eth (whole tokens). async function getEthEscrowedInValidityBonds(db: Knex, marketCreator: Address): Promise<BigNumber> { const marketsRow: Array<Pick<MarketsRow<BigNumber>, "validityBondSize">> = await db.select("validityBondSize", "reportingState").from("markets") .leftJoin("market_state", "markets.marketStateId", "market_state.marketStateId") .whereNot({ reportingState: ReportingState.FINALIZED }) .where({ marketCreator }); let totalValidityBonds = ZERO; for (const market of marketsRow) { // market.validityBondSize is in attoETH and totalValidityBonds is in ETH totalValidityBonds = totalValidityBonds.plus( fixedPointToDecimal(market.validityBondSize, BN_WEI_PER_ETHER)); } return totalValidityBonds; } function aggregateMarketTradingPositions(tps: Array<TradingPosition>): { [marketId: string]: MarketTradingPosition } { const tpsByMarketId = _.groupBy(tps, (tp) => tp.marketId); return _.mapValues(tpsByMarketId, (tpsForOneMarketId: Array<TradingPosition>) => { return { timestamp: tpsForOneMarketId[0].timestamp, marketId: tpsForOneMarketId[0].marketId, ...getAggregatedTradingPosition(tpsForOneMarketId), }; }); } function getAggregatedTradingPosition(tps: Array<TradingPosition>): AggregatedTradingPosition { const partialAggregatedTradingPosition: Pick<AggregatedTradingPosition, Exclude<keyof AggregatedTradingPosition, "realizedPercent" | "unrealizedPercent" | "totalPercent" | "unrealizedRevenue24hChangePercent">> = { realized: sum(tps, (tp) => tp.realized), unrealized: sum(tps, (tp) => tp.unrealized), total: sum(tps, (tp) => tp.total), unrealizedCost: sum(tps, (tp) => tp.unrealizedCost), realizedCost: sum(tps, (tp) => tp.realizedCost), totalCost: sum(tps, (tp) => tp.totalCost), unrealizedRevenue: sum(tps, (tp) => tp.unrealizedRevenue), frozenFunds: sum(tps, (tp) => tp.frozenFunds), unrealizedRevenue24hAgo: sum(tps, (tp) => tp.unrealizedRevenue24hAgo), }; const { realizedProfitPercent } = getRealizedProfitPercent({ realizedCost: new Tokens(partialAggregatedTradingPosition.realizedCost), realizedProfit: new Tokens(partialAggregatedTradingPosition.realized), }); const { unrealizedProfitPercent } = getUnrealizedProfitPercent({ unrealizedCost: new Tokens(partialAggregatedTradingPosition.unrealizedCost), unrealizedProfit: new Tokens(partialAggregatedTradingPosition.unrealized), }); const { totalProfitPercent } = getTotalProfitPercent({ totalCost: new Tokens(partialAggregatedTradingPosition.totalCost), totalProfit: new Tokens(partialAggregatedTradingPosition.total), }); const unrealizedRevenue24hChangePercent: Percent = safePercent({ numerator: new Tokens(partialAggregatedTradingPosition.unrealizedRevenue), denominator: new Tokens(partialAggregatedTradingPosition.unrealizedRevenue24hAgo), subtractOne: true, }); return { realizedPercent: realizedProfitPercent.magnitude, unrealizedPercent: unrealizedProfitPercent.magnitude, totalPercent: totalProfitPercent.magnitude, unrealizedRevenue24hChangePercent: unrealizedRevenue24hChangePercent.magnitude, ...partialAggregatedTradingPosition, }; function sum(tps: Array<TradingPosition>, field: (tp: TradingPosition) => BigNumber): BigNumber { let s = ZERO; for (const tp of tps) { s = s.plus(field(tp)); } return s; } }
the_stack
import { arrForEach, arrIndexOf, hasOwnProperty, isFunction, isObject, isString, isSymbol, objKeys } from "@microsoft/applicationinsights-core-js"; import { Util } from "@microsoft/applicationinsights-common"; import { strShimPrototype } from "@microsoft/applicationinsights-shims"; const strConstructor = "constructor"; const strGetOwnPropertyNames = "getOwnPropertyNames"; export const MAX_DEPTH = 16; export function makeRegex(value: string) { if (value && value.length > 0) { value = value.replace(/\\/g, "\\\\"); // eslint-disable-next-line security/detect-non-literal-regexp value = value.replace(/([\+\?\|\{\[\(\)\^\$\#\.]}])/g, "\\$1"); value = value.replace(/\*/g, ".*"); return new RegExp("(" + value + ")"); } return null; } export function toggleClassName(el: HTMLElement, className: string) { const idx = el.className.indexOf(className); if (idx === -1) { el.className += className; } else { el.className = el.className.substring(0, idx) + el.className.substring(idx + className.length); } } export function traverseAndReplace(target: Object, maxDepth: number, currentDepth: number, thingsReferenced: any[], excludedKeys: string[], includeFunctions: boolean): Object { const out = {}; if (!thingsReferenced) { thingsReferenced = []; } if (isObject(target)) { for (const key of getTargetKeys(target, excludedKeys, includeFunctions)) { let targetValue = target[key]; if (isSymbol(targetValue)) { targetValue = targetValue.toString(); } if (targetValue !== null && arrIndexOf(thingsReferenced, targetValue) !== -1) { out[key] = `<circular (${key} - "${getTargetName(targetValue)}")>`; } else if (targetValue !== null && isObject(targetValue)) { if (currentDepth >= maxDepth) { out[key] = "<max allowed depth reached>"; } else { thingsReferenced.push(target); out[key] = traverseAndReplace(targetValue, maxDepth, currentDepth + 1, thingsReferenced, excludedKeys, includeFunctions); thingsReferenced.pop(); } } else { out[key] = targetValue; } } } return out; } function _sanitizeText(value: string) { if (value) { value = value.replace(/&/g, "&amp;"); value = value.replace(/>/g, "&gt;"); value = value.replace(/</g, "&lt;"); } return value; } function _setInnerText(elm: HTMLElement, theText: string, textFilter: string): boolean { let innerText = theText; let matchPos = -1; let matchLen = 0; let rg = makeRegex(textFilter); if (rg) { let matchTxt = rg.exec(innerText); if (matchTxt && matchTxt[1]) { matchPos = theText.indexOf(matchTxt[1]); matchLen = matchTxt[1].length; } } if (matchPos !== -1) { let innerHtml = _sanitizeText(theText.substring(0, matchPos)) + "<span class=\"matched-text-filter\">" + _sanitizeText(theText.substring(matchPos, matchPos + matchLen)) + "</span>" + theText.substring(matchPos + matchLen); elm.innerHTML = innerHtml; return true; } elm.innerText = theText; return false; } let lastSelectedElement: HTMLElement; let selectedObject: object; export function copySelectedTree() { const toCopy: Object = selectedObject; if (!toCopy) { return; } const textArea = document.createElement("textarea"); textArea.innerText = JSON.stringify(toCopy); textArea.style.opacity = "0"; document.body.appendChild(textArea); textArea.select(); document.execCommand("copy"); textArea.parentElement.removeChild(textArea); } export function focusHandler(evt: Event, target: Object, level: number, excludeKeys: string[], includeFunctions: boolean) { if (lastSelectedElement) { toggleClassName(lastSelectedElement, " last-selected-element"); } lastSelectedElement = (evt.target as HTMLElement); for (let i = 0; i < 10; i++) { if (lastSelectedElement.tagName === "DIV") { break; } lastSelectedElement = lastSelectedElement.parentElement; } lastSelectedElement.className += " last-selected-element"; selectedObject = traverseAndReplace(target, MAX_DEPTH, level, null, excludeKeys, includeFunctions); } function _navHandler(evt: KeyboardEvent, openHandler?: (evt: Event, forceState?: boolean) => void, currentState?: boolean) { const el = evt.target as HTMLElement; switch (evt.which) { // Enter case 13: (openHandler) ? openHandler(evt) : void 0; break; // ArrowUp case 38: evt.preventDefault(); const prev = el.previousElementSibling as HTMLElement; if (prev && prev.tagName !== "BUTTON") { prev.focus(); } break; // ArrowDown case 40: evt.preventDefault(); const next = el.nextElementSibling as HTMLElement; if (next) { next.focus(); } break; // ArrowRight case 39: if (openHandler) { openHandler(evt, true); if (currentState) { (el.firstElementChild.nextSibling as HTMLElement).focus(); } } break; // ArrowLeft case 37: if (openHandler) { openHandler(evt, false); } if (!currentState) { (el.parentElement as HTMLElement).focus(); } break; // c case 67: if (evt.ctrlKey) { copySelectedTree(); (evt.target as HTMLElement).focus(); } break; } } export function getTargetName(target: any) { if (target) { if (isString(target.identifier)) { return target.identifier; } if (isString(target.name)) { return target.name; } if (hasOwnProperty(target, strShimPrototype)) { // Look like a prototype return target.name || ""; } return ((target[strConstructor]) || {}).name || ""; } return ""; } function _toString(value: any) { if (isString(value)) { return value; } if (isSymbol(value)) { return value.toString(); } if (isFunction(value["toString"])) { return (value["toString"] as any)() || ""; } return ""; } export function getTargetKeys(target: any, excludedKeys: string[], includeFunctions: boolean) { let keys: string[] = objKeys(target); if (!Util.isArray(target)) { try { if (Object[strGetOwnPropertyNames]) { // We need to use this for built in objects such as Error which don't return their values via objKeys because they are not enumerable for example let propKeys = Object[strGetOwnPropertyNames](target); if (propKeys) { arrForEach(propKeys, (key) => { const theKey = _toString(key); if (theKey && keys.indexOf(theKey) === -1) { keys.push(key); } }); } } } catch (ex) { // getOwnPropertyNames can fail in ES5, if the argument to this method is not an object (a primitive), // then it will cause a TypeError. In ES2015, a non-object argument will be coerced to an object. } } let theKeys: string[] = []; arrForEach(keys, (key) => { if (!includeFunctions && isFunction(target[key])) { return; } const theKey = _toString(key); if (theKey && excludedKeys.indexOf(theKey) === -1) { theKeys.push(theKey); } }); return theKeys; } export function formatLogElements(target: Object, tmLabel: string, key: string, level: number, textFilter: string, excludeKeys: string[], thingsReferenced?: any[], includeFunctions?:boolean): any { let openState = false; if (!level) { level = 0; } if (!thingsReferenced) { thingsReferenced = []; } let isObj = isObject(target) || Util.isError(target); let isErr = target["baseType"] === "ExceptionData" || Util.isError(target); const children: HTMLElement[] = []; function _openNode(currentLine: HTMLElement) { openState = true; arrForEach(children, (child) => { rootDiv.appendChild(child); }); currentLine.className = "obj-key expandable open" } function _collapseNode(currentLine: HTMLElement) { // rootDiv.innerHTML = ''; arrForEach(children, (child) => { rootDiv.removeChild(child); }); // rootDiv.appendChild(currentLine); openState = false; currentLine.className = "obj-key expandable closed" } let matched = false; let childOpened = false; const keys = getTargetKeys(target, excludeKeys, includeFunctions); if (keys.length === 0) { keys.push("<empty>"); } if (level >= MAX_DEPTH) { keys.unshift("<maxdepth>"); } for (const key of keys) { if (excludeKeys.indexOf(key) !== -1) { continue; } let targetValue = target[key]; if (isSymbol(targetValue)) { targetValue = targetValue.toString(); } if (key === "<maxdepth>") { const builder = document.createElement("div"); builder.className = "empty"; builder.innerText = "<max allowed depth reached>"; children.push(builder); break; } else if (key === "<empty>") { const builder = document.createElement("div"); builder.className = "empty"; builder.innerText = "<empty>"; children.push(builder); } else if (targetValue !== null && arrIndexOf(thingsReferenced, targetValue) !== -1) { const builder = document.createElement("div"); builder.className = "empty"; builder.innerText = `<circular (${key}) - "${getTargetName(targetValue)}">`; children.push(builder); } else if (targetValue !== null && (isObject(targetValue) || Util.isError(targetValue))) { thingsReferenced.push(target); let formatted = formatLogElements(targetValue, null, key, level + 1, textFilter, excludeKeys, thingsReferenced, includeFunctions); thingsReferenced.pop(); if (formatted.matched) { childOpened = true; } if (formatted.isErr) { isErr = true; } children.push(formatted.root); } else { const builder = document.createElement("div"); builder.setAttribute("tabindex", "0"); builder.onclick = (evt: MouseEvent) => { evt.stopPropagation(); } builder.ontouchend = (evt: TouchEvent) => { evt.stopPropagation(); } builder.onkeydown = (evt: KeyboardEvent) => { evt.stopPropagation(); _navHandler(evt); } builder.onfocus = (evt: Event) => { focusHandler(evt, target, level, excludeKeys, includeFunctions); } const outerSpan = document.createElement("span"); const keySpan = document.createElement("span"); keySpan.className = "key"; if (_setInnerText(keySpan, `${key}: `, textFilter)) { childOpened = true; } outerSpan.appendChild(keySpan); const valueSpan = document.createElement("span"); if (isFunction(targetValue)) { const fnStr = targetValue.toString(); const fnHead = fnStr.match(/^([^{]+)/)[1]; valueSpan.textContent = `${fnHead}{...}`; } else { if (_setInnerText(valueSpan, `${targetValue}`, textFilter)) { childOpened = true; } } valueSpan.className = `${typeof targetValue}`; outerSpan.appendChild(valueSpan); builder.appendChild(outerSpan); children.push(builder); } } const rootDiv = document.createElement("div"); let innerText = ""; let currentLine = document.createElement("span"); if (isObj || children.length) { innerText = `${key ? key : "obj"}: `; if (Util.isArray(target)) { innerText += `[${getTargetKeys(target, excludeKeys, includeFunctions).length}]`; } else { let targetName = getTargetName(target); if (targetName) { innerText += ` <"${targetName}"> ` } innerText += `{${getTargetKeys(target, excludeKeys, includeFunctions).length}}`; } matched = _setInnerText(currentLine, innerText, textFilter); if (tmLabel) { const tmWrapper = document.createElement("span"); const tmDetails = document.createElement("span"); tmDetails.className = "obj-time"; tmDetails.innerText = tmLabel; tmWrapper.appendChild(tmDetails); tmWrapper.appendChild(currentLine); currentLine = tmWrapper; } currentLine.className = "obj-key expandable closed" } else { innerText = `${key ? key : "obj"}: ${target.toString()}`; matched = _setInnerText(currentLine, innerText, textFilter); currentLine.className = "obj-key"; } rootDiv.appendChild(currentLine); rootDiv.setAttribute("tabindex", "0"); if (childOpened) { // A child node matched so auto-expand _openNode(currentLine); } if (isObj) { if (isErr) { rootDiv.className = "exception" } const openHandler = (evt: Event, forceState?: boolean) => { evt.stopPropagation(); if (Util.getIEVersion()) { focusHandler(evt, target, level, excludeKeys, includeFunctions); } if (forceState !== undefined && openState === forceState) { return; } if (lastSelectedElement === rootDiv) { if (openState) { _collapseNode(currentLine); } else { _openNode(currentLine); } } } rootDiv.onkeydown = (evt: KeyboardEvent) => { _navHandler(evt, openHandler, openState); } rootDiv.onclick = (evt: MouseEvent) => { openHandler(evt); } rootDiv.ontouchend = (evt: TouchEvent) => { openHandler(evt); } rootDiv.onfocus = (evt: Event) => { focusHandler(evt, target, level, excludeKeys, includeFunctions); } } return { root: rootDiv, isErr: isErr, matched: matched || childOpened }; }
the_stack
import { Mat4, Vec3, Vec4, EPSILON } from '../mol-math/linear-algebra'; import { Viewport, cameraProject, cameraUnproject } from './camera/util'; import { CameraTransitionManager } from './camera/transition'; import { BehaviorSubject } from 'rxjs'; import { Scene } from '../mol-gl/scene'; export { ICamera, Camera }; interface ICamera { readonly viewport: Viewport, readonly view: Mat4, readonly projection: Mat4, readonly projectionView: Mat4, readonly inverseProjectionView: Mat4, readonly state: Readonly<Camera.Snapshot>, readonly viewOffset: Camera.ViewOffset, readonly far: number, readonly near: number, readonly fogFar: number, readonly fogNear: number, } const tmpPos1 = Vec3(); const tmpPos2 = Vec3(); const tmpClip = Vec4(); class Camera implements ICamera { readonly view: Mat4 = Mat4.identity(); readonly projection: Mat4 = Mat4.identity(); readonly projectionView: Mat4 = Mat4.identity(); readonly inverseProjectionView: Mat4 = Mat4.identity(); private pixelScale: number get pixelRatio() { const dpr = (typeof window !== 'undefined') ? window.devicePixelRatio : 1; return dpr * this.pixelScale; } readonly viewport: Viewport; readonly state: Readonly<Camera.Snapshot> = Camera.createDefaultSnapshot(); readonly viewOffset = Camera.ViewOffset(); near = 1 far = 10000 fogNear = 5000 fogFar = 10000 zoom = 1 readonly transition: CameraTransitionManager = new CameraTransitionManager(this); readonly stateChanged = new BehaviorSubject<Partial<Camera.Snapshot>>(this.state); get position() { return this.state.position; } set position(v: Vec3) { Vec3.copy(this.state.position, v); } get up() { return this.state.up; } set up(v: Vec3) { Vec3.copy(this.state.up, v); } get target() { return this.state.target; } set target(v: Vec3) { Vec3.copy(this.state.target, v); } private prevProjection = Mat4.identity(); private prevView = Mat4.identity(); private deltaDirection = Vec3(); private newPosition = Vec3(); update() { const snapshot = this.state as Camera.Snapshot; if (snapshot.radiusMax === 0) { return false; } const height = 2 * Math.tan(snapshot.fov / 2) * Vec3.distance(snapshot.position, snapshot.target); this.zoom = this.viewport.height / height; updateClip(this); switch (this.state.mode) { case 'orthographic': updateOrtho(this); break; case 'perspective': updatePers(this); break; default: throw new Error('unknown camera mode'); } const changed = !Mat4.areEqual(this.projection, this.prevProjection, EPSILON) || !Mat4.areEqual(this.view, this.prevView, EPSILON); if (changed) { Mat4.mul(this.projectionView, this.projection, this.view); if (!Mat4.tryInvert(this.inverseProjectionView, this.projectionView)) { Mat4.copy(this.view, this.prevView); Mat4.copy(this.projection, this.prevProjection); Mat4.mul(this.projectionView, this.projection, this.view); return false; } Mat4.copy(this.prevView, this.view); Mat4.copy(this.prevProjection, this.projection); } return changed; } setState(snapshot: Partial<Camera.Snapshot>, durationMs?: number) { this.transition.apply(snapshot, durationMs); this.stateChanged.next(snapshot); } getSnapshot() { return Camera.copySnapshot(Camera.createDefaultSnapshot(), this.state); } getTargetDistance(radius: number) { return Camera.targetDistance(radius, this.state.fov, this.viewport.width, this.viewport.height); } getFocus(target: Vec3, radius: number, up?: Vec3, dir?: Vec3): Partial<Camera.Snapshot> { const r = Math.max(radius, 0.01); const targetDistance = this.getTargetDistance(r); Vec3.sub(this.deltaDirection, this.target, this.position); if (dir) Vec3.matchDirection(this.deltaDirection, dir, this.deltaDirection); Vec3.setMagnitude(this.deltaDirection, this.deltaDirection, targetDistance); Vec3.sub(this.newPosition, target, this.deltaDirection); const state = Camera.copySnapshot(Camera.createDefaultSnapshot(), this.state); state.target = Vec3.clone(target); state.radius = r; state.position = Vec3.clone(this.newPosition); if (up) Vec3.matchDirection(state.up, up, state.up); return state; } getInvariantFocus(target: Vec3, radius: number, up: Vec3, dir: Vec3): Partial<Camera.Snapshot> { const r = Math.max(radius, 0.01); const targetDistance = this.getTargetDistance(r); Vec3.copy(this.deltaDirection, dir); Vec3.setMagnitude(this.deltaDirection, this.deltaDirection, targetDistance); Vec3.sub(this.newPosition, target, this.deltaDirection); const state = Camera.copySnapshot(Camera.createDefaultSnapshot(), this.state); state.target = Vec3.clone(target); state.radius = r; state.position = Vec3.clone(this.newPosition); Vec3.copy(state.up, up); return state; } focus(target: Vec3, radius: number, durationMs?: number, up?: Vec3, dir?: Vec3) { if (radius > 0) { this.setState(this.getFocus(target, radius, up, dir), durationMs); } } /** Transform point into 2D window coordinates. */ project(out: Vec4, point: Vec3) { return cameraProject(out, point, this.viewport, this.projectionView); } /** * Transform point from screen space to 3D coordinates. * The point must have `x` and `y` set to 2D window coordinates * and `z` between 0 (near) and 1 (far); the optional `w` is not used. */ unproject(out: Vec3, point: Vec3 | Vec4) { return cameraUnproject(out, point, this.viewport, this.inverseProjectionView); } /** World space pixel size at given `point` */ getPixelSize(point: Vec3) { // project -> unproject of `point` does not exactly return the same // to get a sufficiently accurate measure we unproject the original // clip position in addition to the one shifted bey one pixel this.project(tmpClip, point); this.unproject(tmpPos1, tmpClip); tmpClip[0] += 1; this.unproject(tmpPos2, tmpClip); return Vec3.distance(tmpPos1, tmpPos2); } constructor(state?: Partial<Camera.Snapshot>, viewport = Viewport.create(0, 0, 128, 128), props: Partial<{ pixelScale: number }> = {}) { this.viewport = viewport; this.pixelScale = props.pixelScale || 1; Camera.copySnapshot(this.state, state); } } namespace Camera { export type Mode = 'perspective' | 'orthographic' export type SnapshotProvider = Partial<Snapshot> | ((scene: Scene, camera: Camera) => Partial<Snapshot>) /** * Sets an offseted view in a larger frustum. This is useful for * - multi-window or multi-monitor/multi-machine setups * - jittering the camera position for sampling */ export interface ViewOffset { enabled: boolean, fullWidth: number, fullHeight: number, offsetX: number, offsetY: number, width: number, height: number } export function ViewOffset(): ViewOffset { return { enabled: false, fullWidth: 1, fullHeight: 1, offsetX: 0, offsetY: 0, width: 1, height: 1 }; } export function setViewOffset(out: ViewOffset, fullWidth: number, fullHeight: number, offsetX: number, offsetY: number, width: number, height: number) { out.fullWidth = fullWidth; out.fullHeight = fullHeight; out.offsetX = offsetX; out.offsetY = offsetY; out.width = width; out.height = height; } export function copyViewOffset(out: ViewOffset, view: ViewOffset) { out.enabled = view.enabled; out.fullWidth = view.fullWidth; out.fullHeight = view.fullHeight; out.offsetX = view.offsetX; out.offsetY = view.offsetY; out.width = view.width; out.height = view.height; } export function targetDistance(radius: number, fov: number, width: number, height: number) { const r = Math.max(radius, 0.01); const aspect = width / height; const aspectFactor = (height < width ? 1 : aspect); return Math.abs((r / aspectFactor) / Math.sin(fov / 2)); } export function createDefaultSnapshot(): Snapshot { return { mode: 'perspective', fov: Math.PI / 4, position: Vec3.create(0, 0, 100), up: Vec3.create(0, 1, 0), target: Vec3.create(0, 0, 0), radius: 0, radiusMax: 10, fog: 50, clipFar: true }; } export interface Snapshot { mode: Mode fov: number position: Vec3 up: Vec3 target: Vec3 radius: number radiusMax: number fog: number clipFar: boolean } export function copySnapshot(out: Snapshot, source?: Partial<Snapshot>) { if (!source) return out; if (typeof source.mode !== 'undefined') out.mode = source.mode; if (typeof source.fov !== 'undefined') out.fov = source.fov; if (typeof source.position !== 'undefined') Vec3.copy(out.position, source.position); if (typeof source.up !== 'undefined') Vec3.copy(out.up, source.up); if (typeof source.target !== 'undefined') Vec3.copy(out.target, source.target); if (typeof source.radius !== 'undefined') out.radius = source.radius; if (typeof source.radiusMax !== 'undefined') out.radiusMax = source.radiusMax; if (typeof source.fog !== 'undefined') out.fog = source.fog; if (typeof source.clipFar !== 'undefined') out.clipFar = source.clipFar; return out; } export function areSnapshotsEqual(a: Snapshot, b: Snapshot) { return a.mode === b.mode && a.fov === b.fov && a.radius === b.radius && a.radiusMax === b.radiusMax && a.fog === b.fog && a.clipFar === b.clipFar && Vec3.exactEquals(a.position, b.position) && Vec3.exactEquals(a.up, b.up) && Vec3.exactEquals(a.target, b.target); } } function updateOrtho(camera: Camera) { const { viewport, zoom, near, far, viewOffset } = camera; const fullLeft = -viewport.width / 2; const fullRight = viewport.width / 2; const fullTop = viewport.height / 2; const fullBottom = -viewport.height / 2; const dx = (fullRight - fullLeft) / (2 * zoom); const dy = (fullTop - fullBottom) / (2 * zoom); const cx = (fullRight + fullLeft) / 2; const cy = (fullTop + fullBottom) / 2; let left = cx - dx; let right = cx + dx; let top = cy + dy; let bottom = cy - dy; if (viewOffset.enabled) { const zoomW = zoom / (viewOffset.width / viewOffset.fullWidth); const zoomH = zoom / (viewOffset.height / viewOffset.fullHeight); const scaleW = (fullRight - fullLeft) / viewOffset.width; const scaleH = (fullTop - fullBottom) / viewOffset.height; left += scaleW * (viewOffset.offsetX / zoomW); right = left + scaleW * (viewOffset.width / zoomW); top -= scaleH * (viewOffset.offsetY / zoomH); bottom = top - scaleH * (viewOffset.height / zoomH); } // build projection matrix Mat4.ortho(camera.projection, left, right, top, bottom, near, far); // build view matrix Mat4.lookAt(camera.view, camera.position, camera.target, camera.up); } function updatePers(camera: Camera) { const aspect = camera.viewport.width / camera.viewport.height; const { near, far, viewOffset } = camera; let top = near * Math.tan(0.5 * camera.state.fov); let height = 2 * top; let width = aspect * height; let left = -0.5 * width; if (viewOffset.enabled) { left += viewOffset.offsetX * width / viewOffset.fullWidth; top -= viewOffset.offsetY * height / viewOffset.fullHeight; width *= viewOffset.width / viewOffset.fullWidth; height *= viewOffset.height / viewOffset.fullHeight; } // build projection matrix Mat4.perspective(camera.projection, left, left + width, top, top - height, near, far); // build view matrix Mat4.lookAt(camera.view, camera.position, camera.target, camera.up); } function updateClip(camera: Camera) { let { radius, radiusMax, mode, fog, clipFar } = camera.state; if (radius < 0.01) radius = 0.01; const normalizedFar = clipFar ? radius : radiusMax; const cameraDistance = Vec3.distance(camera.position, camera.target); let near = cameraDistance - radius; let far = cameraDistance + normalizedFar; const fogNearFactor = -(50 - fog) / 50; const fogNear = cameraDistance - (normalizedFar * fogNearFactor); const fogFar = far; if (mode === 'perspective') { // set at least to 5 to avoid slow sphere impostor rendering near = Math.max(Math.min(radiusMax, 5), near); far = Math.max(5, far); } else { // not too close to 0 as it causes issues with outline rendering near = Math.max(Math.min(radiusMax, 5), near); far = Math.max(5, far); } if (near === far) { // make sure near and far are not identical to avoid Infinity in the projection matrix far = near + 0.01; } camera.near = near; camera.far = 2 * far; // avoid precision issues distingushing far objects from background camera.fogNear = fogNear; camera.fogFar = fogFar; }
the_stack
import { default as axios } from 'axios'; import BigNumber from 'bignumber.js'; import queryString from 'query-string'; import { address, Integer, SigningMethod, ApiOrderQueryV2, ApiOrderV2, ApiOrder, ApiAccount, ApiFillQueryV2, ApiFillV2, ApiTradeQueryV2, ApiTradeV2, ApiMarket, ApiOrderOnOrderbook, ApiMarketName, CanonicalOrder, ApiSide, MarketId, BigNumberable, SignedCanonicalOrder, ApiMarketMessageV2, RequestMethod, } from '../types'; import { CanonicalOrders } from './CanonicalOrders'; import _ from 'lodash'; const FOUR_WEEKS_IN_SECONDS = 60 * 60 * 24 * 28; const DEFAULT_API_ENDPOINT = 'https://api.dydx.exchange'; const DEFAULT_API_TIMEOUT = 10000; export class Api { private endpoint: String; private canonicalOrders: CanonicalOrders; private timeout: number; constructor( canonicalOrders: CanonicalOrders, endpoint: string = DEFAULT_API_ENDPOINT, timeout: number = DEFAULT_API_TIMEOUT, ) { this.endpoint = endpoint; this.canonicalOrders = canonicalOrders; this.timeout = timeout; } public async placeCanonicalOrder({ order: { side, market, amount, price, makerAccountOwner, expiration = new BigNumber(FOUR_WEEKS_IN_SECONDS), limitFee, }, fillOrKill, postOnly, clientId, cancelId, cancelAmountOnRevert, }: { order: { side: ApiSide, market: ApiMarketName, amount: BigNumberable, price: BigNumberable, makerAccountOwner: address, expiration: BigNumberable, limitFee?: BigNumberable, }, fillOrKill?: boolean, postOnly?: boolean, clientId?: string, cancelId?: string, cancelAmountOnRevert?: boolean, }): Promise<{ order: ApiOrder }> { const order: SignedCanonicalOrder = await this.createCanonicalOrder({ side, market, amount, price, makerAccountOwner, expiration, limitFee, postOnly, }); return this.submitCanonicalOrder({ order, fillOrKill, postOnly, cancelId, clientId, cancelAmountOnRevert, }); } /** * Creates but does not place a signed canonicalOrder */ async createCanonicalOrder({ side, market, amount, price, makerAccountOwner, expiration, limitFee, postOnly, }: { side: ApiSide, market: ApiMarketName, amount: BigNumberable, price: BigNumberable, makerAccountOwner: address, expiration: BigNumberable, limitFee?: BigNumberable, postOnly?: boolean, }): Promise<SignedCanonicalOrder> { if (!Object.values(ApiSide).includes(side)) { throw new Error(`side: ${side} is invalid`); } if (!Object.values(ApiMarketName).includes(market)) { throw new Error(`market: ${market} is invalid`); } const amountNumber: BigNumber = new BigNumber(amount); const isTaker: boolean = !postOnly; const markets: string[] = market.split('-'); const baseMarket: BigNumber = MarketId[markets[0]]; const limitFeeNumber: BigNumber = limitFee ? new BigNumber(limitFee) : this.canonicalOrders.getFeeForOrder(baseMarket, amountNumber, isTaker); const realExpiration: BigNumber = getRealExpiration(expiration); const order: CanonicalOrder = { baseMarket, makerAccountOwner, quoteMarket: MarketId[markets[1]], isBuy: side === ApiSide.BUY, isDecreaseOnly: false, amount: amountNumber, limitPrice: new BigNumber(price), triggerPrice: new BigNumber('0'), limitFee: limitFeeNumber, makerAccountNumber: new BigNumber('0'), expiration: realExpiration, salt: generatePseudoRandom256BitNumber(), }; const typedSignature: string = await this.canonicalOrders.signOrder( order, SigningMethod.Hash, ); return { ...order, typedSignature, }; } /** * Submits an already signed canonicalOrder */ public async submitCanonicalOrder({ order, fillOrKill = false, postOnly = false, cancelId, clientId, cancelAmountOnRevert, }: { order: SignedCanonicalOrder, fillOrKill: boolean, postOnly: boolean, cancelId: string, clientId?: string, cancelAmountOnRevert?: boolean, }): Promise<{ order: ApiOrder }> { const jsonOrder = jsonifyCanonicalOrder(order); const data: any = { fillOrKill, postOnly, clientId, cancelId, cancelAmountOnRevert, order: jsonOrder, }; return this.axiosRequest({ data, url: `${this.endpoint}/v2/orders`, method: RequestMethod.POST, }); } public async cancelOrderV2({ orderId, makerAccountOwner, }: { orderId: string, makerAccountOwner: address, }): Promise<{ order: ApiOrder }> { const signature = await this.canonicalOrders.signCancelOrderByHash( orderId, makerAccountOwner, SigningMethod.Hash, ); return this.axiosRequest({ url: `${this.endpoint}/v2/orders/${orderId}`, method: RequestMethod.DELETE, headers: { authorization: `Bearer ${signature}`, }, }); } public async getOrdersV2({ accountOwner, accountNumber, side, status, orderType, market, limit, startingBefore, }: ApiOrderQueryV2): Promise<{ orders: ApiOrderV2[] }> { const queryObj: any = { side, orderType, limit, market, status, accountOwner, accountNumber: accountNumber && new BigNumber(accountNumber).toFixed(0), startingBefore: startingBefore && startingBefore.toISOString(), }; const query: string = queryString.stringify(queryObj, { skipNull: true, arrayFormat: 'comma' }); return this.axiosRequest({ url: `${this.endpoint}/v2/orders?${query}`, method: RequestMethod.GET, }); } public async getOrderV2({ id, }: { id: string, }): Promise<{ order: ApiOrderV2 }> { return this.axiosRequest({ url: `${this.endpoint}/v2/orders/${id}`, method: RequestMethod.GET, }); } public async getMarketV2({ market, }: { market: string, }): Promise<{ market: ApiMarketMessageV2 }> { return this.axiosRequest({ url: `${this.endpoint}/v2/markets/${market}`, method: RequestMethod.GET, }); } public async getMarketsV2(): Promise<{ markets: { [market: string]: ApiMarketMessageV2 } }> { return this.axiosRequest({ url: `${this.endpoint}/v2/markets`, method: RequestMethod.GET, }); } public async getFillsV2({ orderId, side, market, transactionHash, accountOwner, accountNumber, startingBefore, limit, }: ApiFillQueryV2): Promise<{ fills: ApiFillV2[] }> { const queryObj: any = { orderId, side, limit, market, transactionHash, accountOwner, accountNumber: accountNumber && new BigNumber(accountNumber).toFixed(0), startingBefore: startingBefore && startingBefore.toISOString(), }; const query: string = queryString.stringify(queryObj, { skipNull: true, arrayFormat: 'comma' }); return this.axiosRequest({ url: `${this.endpoint}/v2/fills?${query}`, method: RequestMethod.GET, }); } public async getTradesV2({ orderId, side, market, transactionHash, accountOwner, accountNumber, startingBefore, limit, }: ApiTradeQueryV2): Promise<{ trades: ApiTradeV2[] }> { const queryObj: any = { orderId, side, limit, market, transactionHash, accountOwner, accountNumber: accountNumber && new BigNumber(accountNumber).toFixed(0), startingBefore: startingBefore && startingBefore.toISOString(), }; const query: string = queryString.stringify(queryObj, { skipNull: true, arrayFormat: 'comma' }); return this.axiosRequest({ url: `${this.endpoint}/v2/trades?${query}`, method: RequestMethod.GET, }); } public async getAccountBalances({ accountOwner, accountNumber = new BigNumber(0), }: { accountOwner: address, accountNumber: Integer | string, }): Promise<ApiAccount> { const numberStr = new BigNumber(accountNumber).toFixed(0); return this.axiosRequest({ url: `${this.endpoint}/v1/accounts/${accountOwner}?number=${numberStr}`, method: RequestMethod.GET, }); } public async getOrderbookV2({ market, }: { market: ApiMarketName, }): Promise<{ bids: ApiOrderOnOrderbook[], asks: ApiOrderOnOrderbook[] }> { return this.axiosRequest({ url: `${this.endpoint}/v1/orderbook/${market}`, method: RequestMethod.GET, }); } public async getMarkets(): Promise<{ markets: ApiMarket[] }> { return this.axiosRequest({ url: `${this.endpoint}/v1/markets`, method: RequestMethod.GET, }); } private async axiosRequest( { url, method, headers, data, }: { url: string, method: RequestMethod, headers?: any, data?: any, }): Promise<any> { try { const response = await axios({ url, method, headers, data, timeout: this.timeout, }); return response.data; } catch (error) { const message = _.get(error, 'response.data.errors[0].msg') || error.message; const newError = new Error(message); newError.stack = error.stack; if (error.response) { // Include selected data from the response. Don't include the whole response, which may // contain circular references. (newError as any).response = _.pick(error.response, ['data', 'status', 'statusText']); } throw newError; } } } function generatePseudoRandom256BitNumber(): BigNumber { const MAX_DIGITS_IN_UNSIGNED_256_INT = 78; // BigNumber.random returns a pseudo-random number between 0 & 1 with a passed in number of // decimal places. // Source: https://mikemcl.github.io/bignumber.js/#random const randomNumber = BigNumber.random(MAX_DIGITS_IN_UNSIGNED_256_INT); const factor = new BigNumber(10).pow(MAX_DIGITS_IN_UNSIGNED_256_INT - 1); const randomNumberScaledTo256Bits = randomNumber.times(factor).integerValue(); return randomNumberScaledTo256Bits; } function jsonifyCanonicalOrder(order: SignedCanonicalOrder) { return { isBuy: order.isBuy, isDecreaseOnly: order.isDecreaseOnly, baseMarket: order.baseMarket.toFixed(0), quoteMarket: order.quoteMarket.toFixed(0), amount: order.amount.toFixed(0), limitPrice: order.limitPrice.toString(), triggerPrice: order.triggerPrice.toString(), limitFee: order.limitFee.toString(), makerAccountNumber: order.makerAccountNumber.toFixed(0), makerAccountOwner: order.makerAccountOwner, expiration: order.expiration.toFixed(0), typedSignature: order.typedSignature, salt: order.salt.toFixed(0), }; } function getRealExpiration(expiration: BigNumberable): BigNumber { return new BigNumber(expiration).eq(0) ? new BigNumber(0) : new BigNumber(Math.round(new Date().getTime() / 1000)).plus( new BigNumber(expiration), ); }
the_stack
import { EncodedString } from '../../elements'; import { EditCrm } from '../edit-crm/edit-crm'; import { DefaultLink } from '../default-link/default-link'; import { CodeEditBehaviorInstance } from '../editpages/code-edit-pages/code-edit-behavior'; import { Polymer, ElementTagNameMaps } from '../../../../tools/definitions/polymer'; import { CenterElement } from '../../util/center-element/center-element'; import { PaperToggleOption } from '../inputs/paper-toggle-option/paper-toggle-option'; import { SCRIPT_CONVERSION_TYPE } from '../../../js/background/sharedTypes'; import { I18NKeys } from '../../../_locales/i18n-keys'; declare const browserAPI: browserAPI; declare const BrowserAPI: BrowserAPI; namespace CRMAppElement { interface JQContextMenuObj { name: string; callback(): void; type?: 'checkbox'; selected?: boolean; items?: { [key: number]: JQContextMenuItem }; } type JQContextMenuItem = JQContextMenuObj | string; interface JQueryContextMenu extends JQueryStatic { contextMenu(settings: { selector: string; items: JQContextMenuItem[]; } | 'destroy'): void; bez(curve: number[]): string; } type TypeCheckTypes = 'string' | 'function' | '' | 'object' | 'array' | 'boolean'; interface TypeCheckConfig { val: string; type: TypeCheckTypes | TypeCheckTypes[]; optional?: boolean; forChildren?: { val: string; type: TypeCheckTypes | TypeCheckTypes[]; optional?: boolean; }[]; dependency?: string; min?: number; max?: number; } type TypeCheckErrors = { err: string; storageType?: 'local'|'sync'; }[]; window.runOrAddAsCallback = function (toRun: Function, thisElement: HTMLElement, params: any[]): void { if (window.app.settings) { toRun.apply(thisElement, params); } else { window.app.addSettingsReadyCallback(toRun, thisElement, params); } }; (() => { const animateExists = !!document.createElement('div').animate; const animatePolyFill = function (this: HTMLElement, properties: { [key: string]: any; }[], options: { duration?: number; easing?: string|'bez'; fill?: 'forwards'|'backwards'|'both'; }): Animation { if (!properties[1]) { var skippedAnimation: Animation = { currentTime: null, play: function () { }, reverse: function () { }, cancel: function() { }, finish: function() {}, pause: function() {}, updatePlaybackRate(_playbackRate: number) {}, addEventListener(_type: string, _listener: EventListenerOrEventListenerObject) {}, removeEventListener(_type: string, _listener: EventListenerOrEventListenerObject) {}, dispatchEvent(_event: Event) { return true }, effect: null, finished: Promise.resolve(skippedAnimation), pending: false, startTime: Date.now(), id: '', ready: Promise.resolve(skippedAnimation), playState: 'finished', playbackRate: 1.0, timeline: { currentTime: Date.now() }, oncancel: null, onfinish: null }; return skippedAnimation; } const element = this; let direction: 'forwards' | 'backwards' = 'forwards'; const state: { isPaused: boolean; currentProgress: number; msRemaining: number; finishedPromise: Promise<Animation>; finishPromise: (animation: Animation) => void; playbackRate: number; playState: 'idle'|'running'|'paused'|'finished'; iterations: number; } = { isPaused: false, currentProgress: 0, msRemaining: 0, finishedPromise: null, finishPromise: null, playbackRate: 1.0, playState: 'idle', iterations: 0 }; var returnVal: Animation = { play() { state.playState = 'running'; state.iterations++; state.finishedPromise = new Promise<Animation>((resolve) => { state.finishPromise = resolve; }); let duration = (options && options.duration) || 500; if (state.isPaused) { duration = state.msRemaining; } duration = duration / state.playbackRate; $(element).stop().animate(properties[~~(direction === 'forwards')], { duration: duration, complete() { state.playState = 'finished'; state.isPaused = false; state.finishPromise && state.finishPromise(returnVal); if (returnVal.onfinish) { returnVal.onfinish.apply(returnVal, { currentTime: Date.now(), timelineTime: null }); } }, progress(_animation, progress, remainingMs) { state.currentProgress = progress; state.msRemaining = remainingMs; } }); state.isPaused = false; }, reverse() { direction = 'backwards'; this.play(); }, cancel() { state.playState = 'idle'; $(element).stop(); state.isPaused = false; //Reset to start const props = properties[~~(direction !== 'forwards')]; for (const prop in props) { element.style[prop as any] = props[prop]; } returnVal.oncancel && returnVal.oncancel.apply(returnVal, { currentTime: Date.now(), timelineTime: null }); }, finish() { state.isPaused = false; $(element).stop().animate(properties[~~(direction === 'forwards')], { duration: 0, complete() { state.playState = 'finished'; state.finishPromise && state.finishPromise(returnVal); if (returnVal.onfinish) { returnVal.onfinish.apply(returnVal, { currentTime: Date.now(), timelineTime: null }); } } }); }, pause() { state.playState = 'paused'; $(element).stop(); state.isPaused = true; }, id: '', pending: false, currentTime: null, effect: { getTiming(): EffectTiming { const duration = ((options && options.duration) || 500) / state.playbackRate; return { delay: 0, direction: direction === 'forwards' ? 'normal' : 'reverse', duration: duration, easing: options.easing, fill: options.fill } }, updateTiming(_timing?: OptionalEffectTiming) { }, getComputedTiming() { const duration = ((options && options.duration) || 500) / state.playbackRate; return { endTime: duration, activeDuration: duration, localTime: state.playState === 'running' ? duration - state.msRemaining : null, progress: state.playState === 'running' ? state.currentProgress : null, currentIteration: state.playState === 'running' ? state.iterations : null } } }, updatePlaybackRate(_playbackRate: number) {}, addEventListener(_type: string, _listener: EventListenerOrEventListenerObject) {}, removeEventListener(_type: string, _listener: EventListenerOrEventListenerObject) {}, dispatchEvent(_event: Event) { return true }, timeline: { currentTime: null }, startTime: Date.now(), ready: Promise.resolve(returnVal), playbackRate: null, playState: null, finished: null, oncancel: null, onfinish: null }; Object.defineProperty(returnVal.timeline, 'currentTime', { get() { return Date.now(); } }); Object.defineProperties(returnVal, { playbackRate: { get() { return state.playbackRate; } }, playState: { get() { return state.playState; } }, finished: { get() { return state.finishedPromise; } } }); $(this).animate(properties[1], options.duration, function () { if (returnVal.onfinish) { returnVal.onfinish.apply({ effect: { target: element } }); } }); return returnVal; }; if (!animateExists) { HTMLElement.prototype.animate = animatePolyFill as any; HTMLElement.prototype.__isAnimationJqueryPolyfill = true; } })(); const crmAppProperties: { settings: CRM.SettingsStorage; onSettingsReadyCallbacks: { callback: Function; thisElement: HTMLElement; params: any[]; }[]; crmTypes: boolean[]; settingsJsonLength: number; globalExcludes: string[]; versionUpdateTab: number; } = { settings: { type: Object, notify: true }, onSettingsReadyCallbacks: { type: Array, value: [] }, crmTypes: Array, settingsJsonLength: { type: Number, notify: true, value: 0 }, globalExcludes: { type: Array, notify: true, value: [] }, versionUpdateTab: { type: Number, notify: true, value: 0, observer: 'versionUpdateChanged' } } as any; interface PersistentData { lineSeperators: { start: number; end: number; }[]; script: string; lines: string[]; siblingExpr?: Tern.Expression; isObj?: boolean; } interface ChromePersistentData { persistent: { passes: number; diagnostic: boolean; lineSeperators: { start: number; end: number; }[]; script: string; lines: string[]; }; parentExpressions: Tern.Expression[]; functionCall: string[]; isReturn: boolean; isValidReturn: boolean; returnExpr: Tern.Expression; returnName: string; expression: Tern.Expression; } type TransferOnErrorError = { from: { line: number; } to: { line: number; } }; type TransferOnError = (position: TransferOnErrorError, passes: number) => void; type ScriptUpgradeErrorHandler = (oldScriptErrors: CursorPosition[], newScriptErrors: CursorPosition[], parseError: boolean) => void; class CA { static is = 'crm-app'; static _log: any[] = []; /** * Whether to show the item-edit-page */ static show: boolean = false; /** * What item to show in the item-edit-page */ static item: CRM.Node = null; /** * The item to show, if it is a script */ static scriptItem: CRM.ScriptNode; /** * The item to show, if it is a stylesheet */ static stylesheetItem: CRM.StylesheetNode; /** * The last-used unique ID */ private static _latestId: CRM.GenericNodeId = -1 as CRM.GenericNodeId; /** * The value of the storage.local */ static storageLocal: CRM.StorageLocal; /** * A copy of the storage.local to compare when calling upload */ private static _storageLocalCopy: CRM.StorageLocal; /** * A copy of the settings to compare when calling upload */ private static _settingsCopy: CRM.SettingsStorage; /** * The nodes in an object where the key is the ID and the * value is the node */ static nodesById: CRMStore = new window.Map(); /** * The column index of the "shadow" node, if any */ static shadowStart: number; /** * The global variables for the jsLint linter */ static jsLintGlobals: string[] = []; /** * The tern server used for key bindings */ static ternServer: Tern.ServerInstance; /** * The monaco theme style element */ static monacoStyleElement: HTMLStyleElement = null; static properties = crmAppProperties; private static _getRegisteredListener(this: CrmApp, element: Polymer.PolymerElement|HTMLElement|DocumentFragment, eventType: string) { const listeners = this.listeners; if (!element || !('getAttribute' in element)) { return null; } return (element as Polymer.PolymerElement) .getAttribute(`data-on-${eventType}`) as keyof typeof listeners; } static domListener(this: CrmApp, event: Polymer.CustomEvent) { const listeners = this.listeners; const fnName: keyof typeof listeners = window.app.util.iteratePath(event, (element) => { return this._getRegisteredListener(element, event.type); }); if (fnName) { if (fnName !== 'prototype' && fnName !== 'parent' && listeners[fnName]) { const listener = this.listeners[fnName]; (listener as (this: typeof listener, event: Polymer.CustomEvent, eDetail: Polymer.CustomEvent['detail']) => void).bind(listeners)(event, event.detail); } else { console.warn.apply(console, this._logf(`_createEventHandler`, `listener method ${fnName as any} not defined`)); } } else { console.warn.apply(console, this._logf(`_createEventHandler`, `property data-on${event.type} not defined`)); } } static getKeyBindingValue(binding: { name: string; defaultKey: string; monacoKey: string; storageKey: keyof CRM.KeyBindings; }) { return (window.app.settings && window.app.settings.editor.keyBindings[binding.storageKey]) || binding.defaultKey; } static _currentItemIsCss(_item: CRM.ScriptNode|CRM.StylesheetNode) { return (this.item && this.item.type === 'stylesheet'); } private static _isDemo() { return location.href.indexOf('demo') > -1; } private static _onIsTest() { return new Promise((resolve) => { if (location.href.indexOf('test') > -1) { resolve(null); } else { if (window.onIsTest === true) { resolve(null); } else { window.onIsTest = () => { resolve(null); }; } } }) } static _getPageTitle(this: CrmApp): string { return this._isDemo() ? 'Demo, actual right-click menu does NOT work in demo' : this.___(I18NKeys.generic.appTitle); } static _isOldChrome() { return this.getChromeVersion() < 30; } static _getChromeAge() { return new Date().getUTCFullYear() - 2013; } static _getString(str: string | null): string { return str || ''; } static _isOfType<T extends { type: string; }>(option: T, type: T['type']): boolean { return option.type === type; } private static getChromeVersion() { if (BrowserAPI.getBrowser() === 'chrome') { return parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10); } return 1000; } static generateCodeOptionsArray<T extends CRM.Options>(this: CrmApp, settings: T|string): { key: keyof T; value: T[keyof T] }[] { if (!settings || typeof settings === 'string') { return []; } return Object.getOwnPropertyNames(settings).map((key: keyof T) => { if (key === '$schema') { return null; } return { key: key, value: JSON.parse(JSON.stringify(settings[key])) }; }).filter(item => item !== null).map(({ key, value }) => { if (value.type === 'choice') { //If nothing is selected, select the first item const choice = value as CRM.OptionChoice; if (typeof choice.selected !== 'number' || choice.selected > choice.values.length || choice.selected < 0) { choice.selected = 0; } } return { key, value } }); } static _isOnlyGlobalExclude(this: CrmApp): boolean { return this.globalExcludes.length === 1; }; static _isVersionUpdateTabX(this: CrmApp, currentTab: number, desiredTab: number) { return currentTab === desiredTab; }; private static _getUpdatedScriptString(this: CrmApp, updatedScript: { name: string; oldVersion: string; newVersion: string; }): string { if (!updatedScript) { return 'Please ignore'; } return this.___(I18NKeys.crmApp.code.nodeUpdated, updatedScript.name, updatedScript.oldVersion, updatedScript.newVersion); }; static _getPermissionDescription(this: CrmApp): (permission: string) => string { return this.templates.getPermissionDescription; }; static _getNodeName(this: CrmApp, nodeId: CRM.GenericNodeId) { return window.app.nodesById.get(nodeId).name; }; static _getNodeVersion(this: CrmApp, nodeId: CRM.GenericNodeId) { return (window.app.nodesById.get(nodeId).nodeInfo && window.app.nodesById.get(nodeId).nodeInfo.version) || '1.0'; }; static _placeCommas(this: CrmApp, num: number): string { const split = this._reverseString(num.toString()).match(/[0-9]{1,3}/g); return this._reverseString(split.join(',')); }; static _supportsStorageSync() { return 'sync' in BrowserAPI.getSrc().storage && 'get' in BrowserAPI.getSrc().storage.sync; } static _getCRMInRMDisabledReason(this: CrmApp) { return this.___(I18NKeys.crmApp.options.chromeLow, ~~/Chrome\/([0-9.]+)/.exec(navigator.userAgent) ? (~~/Chrome\/([0-9.]+)/.exec(navigator.userAgent)[1].split('.')[0] + '') : this.___(I18NKeys.crmApp.options.notChrome)); } static _getStorageSyncDisabledReason(this: CrmApp) { if (!this._supportsStorageSync()) { return this.___(I18NKeys.crmApp.options.useStorageSyncDisabledUnavailable); } else { return this.___(I18NKeys.crmApp.options.useStorageSyncDisabledTooBig); } } static _getSettingsJsonLengthColor(this: CrmApp): string { let red; let green; if (this.settingsJsonLength <= 51200) { //Green to yellow, increase red green = 255; red = (this.settingsJsonLength / 51200) * 255; } else { //Yellow to red, reduce green red = 255; green = 255 - (((this.settingsJsonLength - 51200) / 51200) * 255); } //Darken a bit red = Math.floor(red * 0.7); green = Math.floor(green * 0.7); return 'color: rgb(' + red + ', ' + green + ', 0);'; }; private static _findScriptsInSubtree(this: CrmApp, toFind: CRM.Node, container: CRM.Node[]) { if (toFind.type === 'script') { container.push(toFind); } else if (toFind.children) { for (let i = 0; i < toFind.children.length; i++) { this._findScriptsInSubtree(toFind.children[i], container); } } }; private static async _runDialogsForImportedScripts(this: CrmApp, nodesToAdd: CRM.Node[], dialogs: CRM.ScriptNode[]) { if (dialogs[0]) { const script = dialogs.splice(0, 1)[0]; await window.scriptEdit.openPermissionsDialog(script); await this._runDialogsForImportedScripts(nodesToAdd, dialogs); } else { this._addImportedNodes(nodesToAdd); } }; private static _addImportedNodes(this: CrmApp, nodesToAdd: CRM.Node[]): boolean { if (!nodesToAdd[0]) { return false; } const toAdd = nodesToAdd.splice(0, 1)[0]; this.util.treeForEach(toAdd, (node) => { node.id = this.generateItemId(); node.nodeInfo.source = 'local'; }); this.crm.add(toAdd); const scripts: CRM.ScriptNode[] = []; this._findScriptsInSubtree(toAdd, scripts); this._runDialogsForImportedScripts(nodesToAdd, scripts); return true; }; private static _reverseString(this: CrmApp, string: string): string { return string.split('').reverse().join(''); }; private static _genRequestPermissionsHandler(this: CrmApp, overlayContainer: { overlay: HTMLPaperDialogElement }, toRequest: CRM.Permission[]) { const fn = () => { let el: HTMLElement & { animation?: { reverse?(): void; } }, svg; const overlay = overlayContainer.overlay; overlay.style.maxHeight = 'initial!important'; overlay.style.top = 'initial!important'; overlay.removeEventListener('iron-overlay-opened', fn); $(window.app.util.getQuerySlot()(overlay, '.requestPermissionsShowBot')).off('click').on('click', function (this: HTMLElement) { el = $(this).parent().parent().children('.requestPermissionsPermissionBotCont')[0]; svg = $(this).find('.requestPermissionsSvg')[0]; if ((svg as any).__rotated) { window.setTransform(svg, 'rotate(90deg)'); (svg as any).rotated = false; } else { window.setTransform(svg, 'rotate(270deg)'); (svg as any).rotated = true; } if (el.animation && el.animation.reverse) { el.animation.reverse(); } else { el.animation = el.animate([{ height: '0' }, { height: el.scrollHeight + 'px' }], { duration: 250, easing: 'linear', fill: 'both' }); } }); $(this.shadowRoot.querySelectorAll('#requestPermissionsShowOther')).off('click').on('click', function (this: HTMLElement) { const showHideSvg = this; const otherPermissions = $(this).parent().parent().parent().children('#requestPermissionsOther')[0]; if (!otherPermissions.style.height || otherPermissions.style.height === '0px') { $(otherPermissions).animate({ height: otherPermissions.scrollHeight + 'px' }, 350, function () { (<unknown>showHideSvg.children[0] as HTMLElement).style.display = 'none'; (<unknown>showHideSvg.children[1] as HTMLElement).style.display = 'block'; }); } else { $(otherPermissions).animate({ height: 0 }, 350, function () { (<unknown>showHideSvg.children[0] as HTMLElement).style.display = 'block'; (<unknown>showHideSvg.children[1] as HTMLElement).style.display = 'none'; }); } }); let permission: string; $(this.shadowRoot.querySelectorAll('.requestPermissionButton')).off('click').on('click', function (this: HTMLPaperCheckboxElement) { permission = this.previousElementSibling.previousElementSibling.textContent; const slider = this; if (this.checked) { try { browserAPI.permissions.request({ permissions: [permission as _browser.permissions.Permission] }).then((accepted) => { if (!accepted) { //The user didn't accept, don't pretend it's active when it's not, turn it off slider.checked = false; } else { //Accepted, remove from to-request permissions browserAPI.storage.local.get<CRM.StorageLocal>().then((e) => { const permissionsToRequest = e.requestPermissions; permissionsToRequest.splice(permissionsToRequest.indexOf(permission), 1); browserAPI.storage.local.set({ requestPermissions: permissionsToRequest }); }); } }); } catch (e) { //Accepted, remove from to-request permissions browserAPI.storage.local.get<CRM.StorageLocal>().then((e) => { const permissionsToRequest = e.requestPermissions; permissionsToRequest.splice(permissionsToRequest.indexOf(permission), 1); browserAPI.storage.local.set({ requestPermissions: permissionsToRequest }); }); } } else { browserAPI.permissions.remove({ permissions: [permission as _browser.permissions.Permission] }).then((removed) => { if (!removed) { //It didn't get removed slider.checked = true; } }); } }); $(this.shadowRoot.querySelectorAll('#requestPermissionsAcceptAll')).off('click').on('click', function () { browserAPI.permissions.request({ permissions: toRequest as _browser.permissions.Permission[] }).then((accepted) => { if (accepted) { browserAPI.storage.local.set({ requestPermissions: [] }); $('.requestPermissionButton.required').each(function (this: HTMLPaperCheckboxElement) { this.checked = true; }); } }); }); } return fn; } /** * Shows the user a dialog and asks them to allow/deny those permissions */ private static async _requestPermissions(this: CrmApp, toRequest: CRM.Permission[], force: boolean = false) { let i; let index; const allPermissions = this.templates.getPermissions(); for (i = 0; i < toRequest.length; i++) { index = allPermissions.indexOf(toRequest[i]); if (index === -1) { toRequest.splice(index, 1); i--; } else { allPermissions.splice(index, 1); } } browserAPI.storage.local.set({ requestPermissions: toRequest }); if (toRequest.length > 0 || force) { const allowed = browserAPI.permissions ? await browserAPI.permissions.getAll() : { permissions: [] }; const requested: { name: string; description: string; toggled: boolean; }[] = []; for (i = 0; i < toRequest.length; i++) { requested.push({ name: toRequest[i], description: this.templates.getPermissionDescription(toRequest[i]), toggled: false }); } const other: { name: string; description: string; toggled: boolean; }[] = []; for (i = 0; i < allPermissions.length; i++) { other.push({ name: allPermissions[i], description: this.templates.getPermissionDescription(allPermissions[i]), toggled: (allowed.permissions.indexOf((allPermissions as _browser.permissions.Permission[])[i]) > -1) }); } const requestPermissionsOther = this.$$('#requestPermissionsOther'); const overlayContainer: { overlay: HTMLPaperDialogElement; } = { overlay: null }; const handler = this._genRequestPermissionsHandler(overlayContainer, toRequest); const interval = window.setInterval(() => { try { const centerer = window.doc.requestPermissionsCenterer as CenterElement; const overlay = overlayContainer.overlay = window.app.util.getQuerySlot()(centerer)[0] as HTMLPaperDialogElement; if (overlay.open) { window.clearInterval(interval); const innerOverlay = window.app.util.getQuerySlot()(overlay)[0] as HTMLElement; window.app.$.requestedPermissionsTemplate.items = requested; window.app.$.requestedPermissionsOtherTemplate.items = other; overlay.addEventListener('iron-overlay-opened', handler); setTimeout(function () { const requestedPermissionsCont = innerOverlay.querySelector('#requestedPermissionsCont'); const requestedPermissionsAcceptAll = innerOverlay.querySelector('#requestPermissionsAcceptAll'); const requestedPermissionsType = innerOverlay.querySelector('.requestPermissionsType'); if (requested.length === 0) { requestedPermissionsCont.style.display = 'none'; requestPermissionsOther.style.height = (31 * other.length) + 'px'; requestedPermissionsAcceptAll.style.display = 'none'; requestedPermissionsType.style.display = 'none'; } else { requestedPermissionsCont.style.display = 'block'; requestPermissionsOther.style.height = '0'; requestedPermissionsAcceptAll.style.display = 'block'; requestedPermissionsType.style.display = 'block'; } overlay.open(); }, 0); } } catch (e) { //Somehow the element doesn't exist yet } }, 100); } }; private static async _transferCRMFromOld(this: CrmApp, openInNewTab: boolean, storageSource: { getItem(index: string | number): any; } = localStorage, method: SCRIPT_CONVERSION_TYPE = SCRIPT_CONVERSION_TYPE.BOTH): Promise<CRM.Tree> { return await this._transferFromOld.transferCRMFromOld(openInNewTab, storageSource, method); }; static initCodeOptions(this: CrmApp, node: CRM.ScriptNode | CRM.StylesheetNode) { this.$.codeSettingsDialog.item = node; this.$.codeSettingsNodeName.innerText = node.name; this.$.codeSettingsRepeat.items = this.generateCodeOptionsArray(node.value.options); this.$.codeSettingsNoItems.if = this.$.codeSettingsRepeat.items.length === 0; this.$.codeSettingsRepeat.render(); this.async(() => { this.$.codeSettingsDialog.fit(); Array.prototype.slice.apply(this.$.codeSettingsDialog.querySelectorAll('paper-dropdown-menu')) .forEach((el: HTMLPaperDropdownMenuElement) => { el.init(); el.updateSelectedContent(); }); this.$.codeSettingsDialog.open(); }, 250); } static async versionUpdateChanged(this: CrmApp) { if (this._isVersionUpdateTabX(this.versionUpdateTab, 1)) { const versionUpdateDialog = this.$.versionUpdateDialog; if (!versionUpdateDialog.editorManager) { versionUpdateDialog.editorManager = await this.$.tryOutEditor.create(this.$.tryOutEditor.EditorMode.JS, { value: '//some javascript code\nvar body = document.getElementById(\'body\');\nbody.style.color = \'red\';\n\n', language: 'javascript', theme: window.app.settings.editor.theme === 'dark' ? 'vs-dark' : 'vs', wordWrap: 'off', fontSize: (~~window.app.settings.editor.zoom / 100) * 14, folding: true }); } } }; /** * Generates an ID for a node */ static generateItemId(this: CrmApp) { this._latestId = this._latestId || 0 as CRM.GenericNodeId; this._latestId++; if (this.settings) { this.settings.latestId = this._latestId; window.app.upload(); } return this._latestId; }; static toggleShrinkTitleRibbon(this: CrmApp) { const viewportHeight = window.innerHeight; const $settingsCont = $(this.$$('#settingsContainer')); if (window.app.storageLocal.shrinkTitleRibbon) { $(window.doc.editorTitleRibbon).animate({ fontSize: '100%' }, 250); $(window.doc.editorCurrentScriptTitle).animate({ paddingTop: '4px', paddingBottom: '4px' }, 250); $settingsCont.animate({ height: viewportHeight - 50 }, 250, function () { window.addCalcFn($settingsCont[0], 'height', '100vh - 66px'); }); window.setTransform(window.doc.shrinkTitleRibbonButton, 'rotate(270deg)'); window.doc.showHideToolsRibbonButton.classList.add('hidden'); } else { $(window.doc.editorTitleRibbon).animate({ fontSize: '40%' }, 250); $(window.doc.editorCurrentScriptTitle).animate({ paddingTop: 0, paddingBottom: 0 }, 250); $settingsCont.animate({ height: viewportHeight - 18 }, 250, function () { window.addCalcFn($settingsCont[0], 'height', '100vh - -29px'); }); window.setTransform(window.doc.shrinkTitleRibbonButton, 'rotate(90deg)'); window.doc.showHideToolsRibbonButton.classList.remove('hidden'); } window.app.storageLocal.shrinkTitleRibbon = !window.app.storageLocal.shrinkTitleRibbon; browserAPI.storage.local.set({ shrinkTitleRibbon: window.app.storageLocal.shrinkTitleRibbon }); }; static addSettingsReadyCallback(this: CrmApp, callback: Function, thisElement: HTMLElement, params: any[]) { this.onSettingsReadyCallbacks.push({ callback: callback, thisElement: thisElement, params: params }); }; /** * Uploads the settings to chrome.storage */ static upload(this: CrmApp, force: boolean = false) { this.uploading.upload(force); (async () => { await window.onExistsChain(window, 'app', 'settings', 'crm'); this.updateCrmRepresentation(window.app.settings.crm); })(); } static updateEditorZoom(this: CrmApp) { const prevStyle = document.getElementById('editorZoomStyle'); prevStyle && prevStyle.remove(); const styleEl = document.createElement('style'); styleEl.id = 'editorZoomStyle'; styleEl.innerText = `.CodeMirror, .CodeMirror-focused { font-size: ${1.25 * ~~window.app.settings.editor.zoom}%!important; }`; document.head.appendChild(styleEl); }; private static _assertCRMNodeShape(this: CrmApp, node: CRM.Node): boolean { let changed = false; if (node.type !== 'menu') { return false; } if (!node.children) { node.children = []; changed = true; } for (let i = node.children.length - 1; i >= 0; i--) { if (!node.children[i]) { // Remove dead children node.children.splice(i, 1); changed = true; } } for (const child of node.children) { // Put the function first to make sure it's executed // even when changed is true changed = this._assertCRMNodeShape(child) || changed; } return changed; } private static _assertCRMShape(this: CrmApp, crm: CRM.Tree) { let changed = false; for (let i = 0; i < crm.length; i++) { // Put the function first to make sure it's executed // even when changed is true changed = this._assertCRMNodeShape(crm[i]) || changed; } if (changed) { window.app.upload(); } } static updateCrmRepresentation(this: CrmApp, crm: CRM.Tree) { this._assertCRMShape(crm); this._setup.orderNodesById(crm); this.crm.buildNodePaths(crm); } static setLocal<K extends keyof CRM.StorageLocal>(this: CrmApp, key: K, value: CRM.StorageLocal[K]) { const obj = { [key]: value }; browserAPI.storage.local.set(obj as any); browserAPI.storage.local.get<CRM.StorageLocal>().then((storageLocal) => { this.storageLocal = storageLocal; this.upload(); if (key === 'CRMOnPage' || key === 'editCRMInRM') { (window.doc.editCRMInRM as PaperToggleOption).setCheckboxDisabledValue && (window.doc.editCRMInRM as PaperToggleOption).setCheckboxDisabledValue(!storageLocal.CRMOnPage); this.pageDemo.create(); } }); }; static async refreshPage(this: CrmApp) { //Reset dialog if (window.app.item) { const dialog = window[window.app.item.type + 'Edit' as 'scriptEdit' | 'stylesheetEdit' | 'linkEdit' | 'dividerEdit' | 'menuEdit']; dialog && dialog.cancel(); } window.app.item = null; //Reset storages window.app.settings = window.app.storageLocal = null; window.app._settingsCopy = window.app._storageLocalCopy = null; if (window.Storages) { window.Storages.clearStorages(); await window.Storages.loadStorages(); } else { await browserAPI.runtime.sendMessage({ type: '_resetSettings' }); } //On a demo or test page right now, use background page to init settings await this._setup.setupStorages(); //Reset checkboxes this._setup.initCheckboxes(window.app.storageLocal); //Reset default links and searchengines Array.prototype.slice.apply(this.shadowRoot.querySelectorAll('default-link')).forEach(function (link: DefaultLink) { link.reset(); }); //Reset regedit part window.doc.URISchemeFilePath.value = 'C:\\files\\my_file.exe'; window.doc.URISchemeSchemeName.value = await this.__async(I18NKeys.crmApp.uriScheme.example); //Hide all open dialogs Array.prototype.slice.apply(this.shadowRoot.querySelectorAll('paper-dialog')).forEach((dialog: HTMLPaperDialogElement) => { dialog.opened && dialog.close(); }); this.upload(true); await window.onExistsChain(window, 'app', 'settings', 'crm'); }; private static _codeStr(code: string): { content: string; isCode: true; } { return { content: code, isCode: true } } private static _logCode(...args: ({ content: string; isCode: true; }|string)[]) { let currentWord: string = ''; const logArgs: string[] = []; const styleArgs: string[] = []; const isEdge = BrowserAPI.getBrowser() === 'edge'; for (const arg of args) { if (typeof arg === 'string') { currentWord += arg; } else { const { content } = arg; if (isEdge) { currentWord += arg; } else { logArgs.push(`${currentWord}%c${content}`); styleArgs.push('color: grey;font-weight: bold;'); currentWord = '%c'; styleArgs.push('color: white; font-weight: regular'); } } } if (currentWord.length > 0) { logArgs.push(currentWord); } console.log.apply(console, [logArgs.join(' ')].concat(styleArgs)); } private static _getDotValue<T extends { [key: string]: T | U }, U>(this: CrmApp, source: T, index: string): U { const indexes = index.split('.'); let currentValue: T | U = source; for (let i = 0; i < indexes.length; i++) { if (indexes[i] in (currentValue as any)) { currentValue = (currentValue as T)[indexes[i]]; } else { return undefined; } } return currentValue as U; } private static dependencyMet(this: CrmApp, data: TypeCheckConfig, optionals: { [key: string]: any; [key: number]: any; }): boolean { if (data.dependency && !optionals[data.dependency]) { optionals[data.val] = false; return false; } return true; } private static _isDefined(this: CrmApp, data: TypeCheckConfig, value: any, optionals: { [key: string]: any; [key: number]: any; }, errors: TypeCheckErrors): boolean | 'continue' { //Check if it's defined if (value === undefined || value === null) { if (data.optional) { optionals[data.val] = false; return 'continue'; } else { errors.push({ err: `Value for ${data.val} is not set` }); return false; } } return true; } private static _typesMatch(this: CrmApp, data: TypeCheckConfig, value: any, errors: TypeCheckErrors): string { const types = Array.isArray(data.type) ? data.type : [data.type]; for (let i = 0; i < types.length; i++) { const type = types[i]; if (type === 'array') { if (typeof value === 'object' && Array.isArray(value)) { return type; } } if (typeof value === type) { return type; } } errors.push({ err: `Value for ${data.val} is not of type ${types.join(' or ')}` }); return null; } private static _checkNumberConstraints(this: CrmApp, data: TypeCheckConfig, value: number, errors: TypeCheckErrors): boolean { if (data.min !== undefined) { if (data.min > value) { errors.push({ err: `Value for ${data.val} is smaller than ${data.min}` }); return false; } } if (data.max !== undefined) { if (data.max < value) { errors.push({ err: `Value for ${data.val} is bigger than ${data.max}` }); return false; } } return true; } private static _checkArrayChildType(this: CrmApp, data: TypeCheckConfig, value: any, forChild: { val: string; type: TypeCheckTypes | TypeCheckTypes[]; optional?: boolean; }, errors: TypeCheckErrors): boolean { const types = Array.isArray(forChild.type) ? forChild.type : [forChild.type] for (let i = 0; i < types.length; i++) { const type = types[i]; if (type === 'array') { if (Array.isArray(value)) { return true; } } else if (typeof value === type) { return true; } } errors.push({ err: `For not all values in the array ${data.val} is the property ${ forChild.val} of type ${types.join(' or ')}` }); return false; } private static _checkArrayChildrenConstraints<T extends { [key: string]: any; }>(this: CrmApp, data: TypeCheckConfig, value: T[], errors: TypeCheckErrors): boolean { for (let i = 0; i < value.length; i++) { for (let j = 0; j < data.forChildren.length; j++) { const forChild = data.forChildren[j]; const childValue = value[i][forChild.val]; //Check if it's defined if (childValue === undefined || childValue === null) { if (!forChild.optional) { errors.push({ err: `For not all values in the array ${data.val} is the property ${forChild.val} defined` }); return false; } } else if (!this._checkArrayChildType(data, childValue, forChild, errors)) { return false; } } } return true; } private static _checkConstraints(this: CrmApp, data: TypeCheckConfig, value: any, errors: TypeCheckErrors): boolean { if (typeof value === 'number') { return this._checkNumberConstraints(data, value, errors); } if (Array.isArray(value) && data.forChildren) { return this._checkArrayChildrenConstraints(data, value, errors); } return true; } private static typeCheck(this: CrmApp, source: any, toCheck: TypeCheckConfig[], errors: TypeCheckErrors) { const optionals: { [key: string]: any; [key: number]: any; } = {}; for (let i = 0; i < toCheck.length; i++) { const config = toCheck[i]; //Skip if dependency not met if (!this.dependencyMet(config, optionals)) { continue; } const value = this._getDotValue(source as any, config.val); //Check if it's defined const isDefined = this._isDefined(config, value, optionals, errors); if (isDefined === true) { const matchedType = this._typesMatch(config, value, errors); if (matchedType) { optionals[config.val] = true; this._checkConstraints(config, value, errors); continue; } } else if (isDefined === 'continue') { continue; } return false; } return true; }; private static _checkLocalFormat(this: CrmApp) { const storage = window.app.storageLocal; const errors: TypeCheckErrors = []; this.typeCheck(storage, [{ val: 'libraries', type: 'array', forChildren: [{ val: 'code', type: 'string' }, { val: 'name', type: 'string', optional: true }, { val: 'url', type: 'string', optional: true }, { val: 'ts', type: 'object' }] }, { val: 'requestPermissions', type: 'array' }, { val: 'selectedCrmType', type: 'array', }, { val: 'jsLintGlobals', type: 'array' }, { val: 'globalExcludes', type: 'array' }, { val: 'resources', type: 'object' }, { val: 'nodeStorage', type: 'object' }, { val: 'resourceKeys', type: 'array' }, { val: 'urlDataPairs', type: 'object' }, { val: 'notFirstTime', type: 'boolean' }, { val: 'lastUpdatedAt', type: 'string' }, { val: 'authorName', type: 'string' }, { val: 'recoverUnsavedData', type: 'boolean' }, { val: 'CRMOnPage', type: 'boolean' }, { val: 'editCRMInRM', type: 'boolean' }, { val: 'useAsUserscriptInstaller', type: 'boolean' }, { val: "useAsUserstylesInstaller", type: "boolean" }, { val: 'hideToolsRibbon', type: 'boolean' }, { val: 'shrinkTitleRibbon', type: 'boolean' }, { val: 'showOptions', type: 'boolean' }, { val: 'catchErrors', type: 'boolean' }, { val: 'useStorageSync', type: 'boolean' }, { val: 'settingsVersionData', type: 'object' }, { val: 'addedPermissions', type: 'array', forChildren: [{ val: 'node', type: '' }, { val: 'permissions', type: 'array' }] }, { val: 'updatedScripts', type: 'array', forChildren: [{ val: 'name', type: 'string' }, { val: 'oldVersion', type: 'string' }, { val: 'newVersion', type: 'string' }] }, { val: 'isTransfer', type: 'boolean' }, { val: 'upgradeErrors', type: 'object', optional: true }], errors); return errors; } private static _checkSyncFormat(this: CrmApp) { const storage = window.app.settings; const errors: TypeCheckErrors = []; this.typeCheck(storage, [{ val: 'errors', type: 'object' }, { val: 'settingsLastUpdatedAt', type: '', }, { val: 'crm', type: 'array', forChildren: [{ val: 'type', type: 'string' }, { val: 'index', type: '', optional: true }, { val: 'isLocal', type: 'boolean' }, { val: 'permissions', type: 'array' }, { val: 'id', type: '' }, { val: 'path', type: 'array' }, { val: 'name', type: 'string' }, { val: 'nodeInfo', type: 'object' }, { val: 'triggers', type: 'array' }, { val: 'onContentTypes', type: 'array' }, { val: 'showOnSpecified', type: 'boolean' }] }, { val: 'latestId', type: '' }, { val: 'rootName', type: 'string' }, { val: 'nodeStorageSync', type: 'object' }, { val: 'editor', type: 'object' }, { val: 'editor.theme', type: 'string' }, { val: 'editor.zoom', type: 'string' }, { val: 'editor.keyBindings', type: 'object' }, { val: 'editor.keyBindings.goToDef', type: 'string' }, { val: 'editor.keyBindings.rename', type: 'string' }, { val: 'editor.cssUnderlineDisabled', type: 'boolean' }, { val: 'editor.disabledMetaDataHighlight', type: 'boolean' }], errors); return errors; } private static _checkFormat(this: CrmApp) { let errors: { err: string; storageType: 'local'|'sync'; }[] = []; errors = this._checkLocalFormat().map((err) => { err.storageType = 'local'; return err; }) as { err: string; storageType: 'local'|'sync'; }[]; errors = errors.concat(this._checkSyncFormat().map((err) => { err.storageType = 'sync'; return err; }) as { err: string; storageType: 'local'|'sync'; }[]); return errors; } private static _setupConsoleInterface(this: CrmApp) { window.consoleInfo = () => { this._logCode('Edit local (not synchronized with your google account) settings as follows:'); this._logCode(' ', this._codeStr('window.app.storageLocal.<setting> = <value>;')); this._logCode(' For example: ', this._codeStr('window.app.storageLocal.hideToolsRibbon = false;')); this._logCode(' To get the type formatting of local settings call ', this._codeStr('window.getLocalFormat();')); this._logCode(' To read the current settings just call ', this._codeStr('window.app.storageLocal;')); this._logCode(''); this._logCode('Edit synchronized settings as follows:'); this._logCode(' ', this._codeStr('window.app.settings.<setting> = <value>')); this._logCode(' For example: ', this._codeStr('window.app.settings.rootName = "ROOT";')); this._logCode(' Or: ', this._codeStr('window.app.settings.editor.theme = "white";')); this._logCode(' To get the type formatting of local settings call ', this._codeStr('window.getSyncFormat();')); this._logCode(' To read the current settings just call ', this._codeStr('window.app.settings;')); this._logCode(''); this._logCode('Edit the CRM as follows:'); this._logCode(' ', this._codeStr('window.app.settings.crm[<index>].<property> = <value>')); this._logCode(' For example: ', this._codeStr('window.app.settings.crm[0].name = "MyName";')); this._logCode(' To find the index either call ', this._codeStr('window.app.settings.crm;'), ' or ', this._codeStr('window.getIndexByName("<name>");')); this._logCode(' To get the type formatting of a CRM node call ', this._codeStr('window.getCRMFormat();')); this._logCode(''); this._logCode('To force upload any changes you made call ', this._codeStr('window.upload();')); this._logCode('To look at the changes that were made call ', this._codeStr('window.getChanges();')); this._logCode('To check the format of your changes call ', this._codeStr('window.checkFormat();')); this._logCode('To upload changes you made if the format is correct call ', this._codeStr('window.uploadIfCorrect();')); }; window.getLocalFormat = () => { this._logCode('Format can be found here https://github.com/SanderRonde/CustomRightClickMenu/blob/polymer-2/tools/definitions/crm.d.ts#L1148'); }; window.getSyncFormat = () => { this._logCode('Format can be found here https://github.com/SanderRonde/CustomRightClickMenu/blob/polymer-2/tools/definitions/crm.d.ts#L1091'); }; window.getCRMFormat = () => { this._logCode('Format can be found here https://github.com/SanderRonde/CustomRightClickMenu/blob/polymer-2/tools/definitions/crm.d.ts#L1103'); }; window.upload = window.app.upload; window.getChanges = () => { this._logCode('Here are the changes that have been made. Keep in mind that this includes unuploaded changes the extension made.'); this._logCode(''); const { hasLocalChanged, haveSettingsChanged, localChanges, settingsChanges } = this.uploading.getChanges(false); if (!hasLocalChanged) { this._logCode('No changes to local storage were made'); } else { this._logCode('The following changes to local storage were made'); for (const change of localChanges) { this._logCode('Key ', this._codeStr(change.key), ' had value ', this._codeStr(change.oldValue), ' and was changed to ', this._codeStr(change.newValue)); } } this._logCode(''); if (!haveSettingsChanged) { this._logCode('No changes to synced storage were made'); } else { this._logCode('The following changes to synced storage were made'); for (const change of settingsChanges) { this._logCode('Key ', this._codeStr(change.key), ' had value ', this._codeStr(change.oldValue), ' and was changed to ', this._codeStr(change.newValue)); } } } window.checkFormat = () => { const errors = this._checkFormat(); if (errors.length === 0) { this._logCode('Format is correct!'); } else { for (const err of errors) { this._logCode('Storage type: ', err.storageType, this._codeStr(err.err)); } } } window.uploadIfCorrect = () => { if (this._checkFormat().length === 0) { window.app.upload(); this._logCode('Successfully uploaded'); } else { this._logCode('Did not upload because errors were found.'); } } } static ready(this: CrmApp) { window.app = this; window.doc = window.app.$; this._setupConsoleInterface(); browserAPI.runtime.onInstalled.addListener(async (details) => { if (details.reason === 'update') { //Show a little message this.$.messageToast.text = this.___(I18NKeys.crmApp.code.extensionUpdated, (await browserAPI.runtime.getManifest()).version); this.$.messageToast.show(); } }); if (typeof localStorage === 'undefined') { //Running a test browserAPI.runtime.onMessage.addListener((message: any, _sender: _browser.runtime.MessageSender, respond: (response: object) => any) => { if (message.type === 'idUpdate') { this._latestId = message.latestId; } respond(null); }); } let controlPresses = 0; document.body.addEventListener('keyup', (event) => { if (event.key === 'Control') { controlPresses++; window.setTimeout(() => { if (controlPresses >= 3) { this.listeners._toggleBugReportingTool(); controlPresses = 0; } else { if (controlPresses > 0) { controlPresses--; } } }, 800); } }); this._setup.setupLoadingBar().then(() => { this._setup.setupStorages(); }); if (this._onIsTest()) { var dummyContainer = window.dummyContainer = document.createElement('div'); dummyContainer.id = 'dummyContainer'; dummyContainer.style.width = '100vw'; dummyContainer.style.position = 'fixed'; dummyContainer.style.top = '0'; dummyContainer.style.zIndex = '999999999'; dummyContainer.style.display = 'flex'; dummyContainer.style.flexDirection = 'row'; dummyContainer.style.justifyContent = 'space-between'; document.body.appendChild(dummyContainer); var node = document.createElement('style'); node.innerHTML = '#dummyContainer > * {\n' + ' background-color: blue;\n' + '}'; document.head.appendChild(node); } this.show = false; }; private static _TernFile = class TernFile { parent: any; scope: any; text: string; ast: Tern.ParsedFile; lineOffsets: number[]; constructor(public name: string) { } } /** * Functions related to transferring from version 1.0 */ private static _transferFromOld = class CRMAppTransferFromOld { private static _backupLocalStorage() { if (typeof localStorage === 'undefined' || (typeof window.indexedDB === 'undefined' && typeof (window as any).webkitIndexedDB === 'undefined')) { return; } const data = JSON.stringify(localStorage); const idb: IDBFactory = window.indexedDB || (window as any).webkitIndexedDB; const req = idb.open('localStorageBackup', 1); req.onerror = () => { console.log('Error backing up localStorage data'); }; req.onupgradeneeded = (event) => { const db: IDBDatabase = (event.target as any).result; const objectStore = db.createObjectStore('data', { keyPath: 'id' }); objectStore.add({ id: 0, data: data }); } } private static _parseOldCRMNode(string: string, openInNewTab: boolean, method: SCRIPT_CONVERSION_TYPE): CRM.Node { let node: CRM.Node = {} as any; const oldNodeSplit = string.split('%123'); const name = oldNodeSplit[0]; const type = oldNodeSplit[1].toLowerCase(); const nodeData = oldNodeSplit[2]; switch (type) { //Stylesheets don't exist yet so don't implement those case 'link': let split; if (nodeData.indexOf(', ') > -1) { split = nodeData.split(', '); } else { split = nodeData.split(','); } node = this.parent().templates.getDefaultLinkNode({ name: name, id: this.parent().generateItemId() as CRM.NodeId<CRM.LinkNode>, value: split.map(function (url) { return { newTab: openInNewTab, url: url }; }) }); break; case 'divider': node = this.parent().templates.getDefaultDividerNode({ name: name, id: this.parent().generateItemId() as CRM.NodeId<CRM.DividerNode> }); break; case 'menu': node = this.parent().templates.getDefaultMenuNode({ name: name, id: this.parent().generateItemId() as CRM.NodeId<CRM.MenuNode>, children: nodeData as any }); break; case 'script': const scriptSplit = nodeData.split('%124'); let scriptLaunchMode = scriptSplit[0]; const scriptData = scriptSplit[1]; let triggers; const launchModeString = scriptLaunchMode + ''; if (launchModeString + '' !== '0' && launchModeString + '' !== '2') { triggers = launchModeString.split('1,')[1].split(','); triggers = triggers.map(function (item) { return { not: false, url: item.trim() }; }).filter(function (item) { return item.url !== ''; }); scriptLaunchMode = '2'; } const id = this.parent().generateItemId(); node = this.parent().templates.getDefaultScriptNode({ name: name, id: id, triggers: triggers || [], value: { launchMode: parseInt(scriptLaunchMode, 10), updateNotice: true, oldScript: scriptData, script: this.parent().legacyScriptReplace.convertScriptFromLegacy(scriptData, id, method) } as CRM.ScriptVal }); break; } return node; }; private static _assignParents(parent: CRM.Tree, nodes: CRM.Node[], index: { index: number; }, amount: number) { for (; amount !== 0 && nodes[index.index]; index.index++ , amount--) { const currentNode = nodes[index.index]; if (currentNode.type === 'menu') { const childrenAmount = ~~currentNode.children; currentNode.children = []; index.index++; this._assignParents(currentNode.children, nodes, index, childrenAmount); index.index--; } parent.push(currentNode); } }; private static _chainPromise<T>(promiseInitializers: (() =>Promise<T>)[], index: number = 0): Promise<T> { return new Promise<T>((resolve, reject) => { promiseInitializers[index]().then((value) => { if (index + 1 >= promiseInitializers.length) { resolve(value); } else { this._chainPromise(promiseInitializers, index + 1).then((value) => { resolve(value); }, (err) => { reject(err); }); } }, (err) => { reject(err); }); }); } private static async _execFile(path: string): Promise<void> { const el = document.createElement('script'); el.src = browserAPI.runtime.getURL(browserAPI.runtime.getURL(path)); document.body.appendChild(el); } private static _loadTernFiles(): Promise<void> { return new Promise((resolve, reject) => { const files: string[] = [ '/js/libraries/tern/walk.js', '/js/libraries/tern/signal.js', '/js/libraries/tern/acorn.js', '/js/libraries/tern/tern.js', '/js/libraries/tern/ternserver.js', '/js/libraries/tern/def.js', '/js/libraries/tern/comment.js', '/js/libraries/tern/infer.js' ]; this._chainPromise(files.map((file) => { return () => { return this._execFile(file) } })).then(async () => { await window.onExists('tern', window); resolve(null); }, (err) => { reject(err); }); }); } static async transferCRMFromOld(openInNewTab: boolean, storageSource: { getItem(index: string | number): any; }, method: SCRIPT_CONVERSION_TYPE): Promise<CRM.Tree> { this._backupLocalStorage(); await this._loadTernFiles(); let i; const amount = parseInt(storageSource.getItem('numberofrows'), 10) + 1; const nodes = []; for (i = 1; i < amount; i++) { nodes.push(this._parseOldCRMNode(storageSource.getItem(i), openInNewTab, method)); } //Structure nodes with children etc const crm: CRM.Tree = []; this._assignParents(crm, nodes, { index: 0 }, nodes.length); return crm; }; static parent() { return window.app; } } /** * Functions related to setting up the page on launch */ private static _setup = class CRMAppSetup { private static async _restoreUnsavedInstances(editingObj: { id: CRM.GenericNodeId; mode: string; val: string; crmType: boolean[]; }) { const crmItem = this.parent().nodesById.get(editingObj.id) as CRM.ScriptNode | CRM.StylesheetNode; const code = (crmItem.type === 'script' ? (editingObj.mode === 'main' ? crmItem.value.script : crmItem.value.backgroundScript) : (crmItem.value.stylesheet)); this.parent().listeners.iconSwitch(null, editingObj.crmType); this.parent().$.keepChangesButton.addEventListener('click', () => { window.app.uploading.createRevertPoint(); if (crmItem.type === 'script') { crmItem.value[(editingObj.mode === 'main' ? 'script' : 'backgroundScript')] = editingObj.val; } else { crmItem.value.stylesheet = editingObj.val; } window.app.upload(); browserAPI.storage.local.set({ editing: null }); window.setTimeout(function () { //Remove the CodeMirror instances for performance editor.destroy(); }, 500); }); this.parent().$.discardButton.addEventListener('click', () => { browserAPI.storage.local.set({ editing: null }); window.setTimeout(function () { //Remove the CodeMirror instances for performance editor.destroy(); }, 500); }); const isTs = crmItem.type === 'script' && crmItem.value.ts && crmItem.value.ts.enabled; const stopHighlighting = (element: HTMLEditCrmItemElement) => { const item = element.$$('.item'); item.animate([ { opacity: '1' }, { opacity: '0.6' } ], { duration: 250, easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)' }).onfinish = () => { item.style.opacity = '0.6'; $(this.parent().$$('.pageCont')).animate({ backgroundColor: 'white' }, 200); Array.prototype.slice.apply(this.parent().shadowRoot.querySelectorAll('.crmType')).forEach((crmType: HTMLElement) => { crmType.classList.add('dim'); }); const editCrmItems = window.app.editCRM.getItems(); editCrmItems.forEach((el) => { el.$$('.item').animate([{ opacity: '0' }, { opacity: '1' }], { duration: 200 }).onfinish = () => { document.body.style.pointerEvents = 'all'; } }); window.setTimeout(() => { window.doc.restoreChangesDialog.style.display = 'block'; }, 200); }; }; const path = this.parent().nodesById.get(editingObj.id).path; const highlightItem = () => { document.body.style.pointerEvents = 'none'; const columnConts = this.parent().editCRM.$.CRMEditColumnsContainer.children; const columnCont = columnConts[(path.length - 1)]; const paperMaterial = columnCont.querySelector('.paper-material'); const crmEditColumn = paperMaterial.querySelector('.CRMEditColumn'); const editCRMItems = crmEditColumn.querySelectorAll('edit-crm-item'); const crmElement = editCRMItems[path[path.length - 1]]; //Just in case the item doesn't exist (anymore) if (crmElement.$$('.item')) { crmElement.$$('.item').animate([{ opacity: '0.6' }, { opacity: '1' }], { duration: 250, easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)' }).onfinish = function (this: Animation) { crmElement.$$('.item').style.opacity = '1'; }; setTimeout(function () { stopHighlighting(crmElement); }, 2000); } else { window.doc.restoreChangesDialog.style.display = 'block'; $(this.parent().shadowRoot.querySelectorAll('.pageCont')).animate({ backgroundColor: 'white' }, 200); Array.prototype.slice.apply(this.parent().shadowRoot.querySelectorAll('.crmType')).forEach((crmType: HTMLElement) => { crmType.classList.remove('dim'); }); const crmeditItemItems = window.app.editCRM.getItems().map((element) => { return element.$$('.item'); }); $(crmeditItemItems).animate({ opacity: 1 }, 200, function () { document.body.style.pointerEvents = 'all'; }); } }; window.doc.highlightChangedScript.addEventListener('click', () => { //Find the element first //Check if the element is already visible window.doc.restoreChangesDialog.style.display = 'none'; this.parent().$$('.pageCont').style.backgroundColor = 'rgba(0,0,0,0.4)'; window.app.editCRM.getItems().forEach((element) => { const item = element.$$('.item'); item.style.opacity = '0.6'; }); Array.prototype.slice.apply(this.parent().shadowRoot.querySelectorAll('.crmType')).forEach((crmType: HTMLElement) => { crmType.classList.add('dim'); }); setTimeout(function () { if (path.length === 1) { //Always visible highlightItem(); } else { let visible = true; for (let i = 1; i < path.length; i++) { if (window.app.editCRM.crm[i].indent.length !== path[i - 1]) { visible = false; break; } } if (!visible) { //Make it visible const popped = JSON.parse(JSON.stringify(path)); popped.pop(); window.app.editCRM.build({ setItems: popped }); setTimeout(highlightItem, 700); } else { highlightItem(); } } }, 500); }); window.doc.restoreChangesDialog.open(); let editor: any = null; window.setTimeout(async () => { const me = window.doc.restoreChangesEditor; const type = crmItem.type === 'script' ? (isTs ? me.EditorMode.TS_META : me.EditorMode.JS_META) : me.EditorMode.CSS_META; editor = await window.doc.restoreChangesEditor.createDiff([code, editingObj.val], type, { wordWrap: 'off', fontSize: (~~window.app.settings.editor.zoom / 100) * 14, folding: true }); }, 1000); }; private static _listening: boolean = false; private static _bindListeners() { if (this._listening) return; this._listening = true; const urlInput = window.doc.addLibraryUrlInput; const manualInput = window.doc.addLibraryManualInput; window.doc.addLibraryUrlOption.addEventListener('change', function () { manualInput.style.display = 'none'; urlInput.style.display = 'block'; }); window.doc.addLibraryManualOption.addEventListener('change', function () { urlInput.style.display = 'none'; manualInput.style.display = 'block'; }); $('#addLibraryDialog').on('iron-overlay-closed', function (this: HTMLElement) { $(this).find('#addLibraryButton, #addLibraryConfirmAddition, #addLibraryDenyConfirmation').off('click'); }); }; private static _crmTypeNumberToArr(crmType: number): boolean[] { const arr = [false, false, false, false, false, false]; arr[crmType] = true; return arr; } static async setupStorages() { const parent = this.parent(); const storageLocal = await browserAPI.storage.local.get<CRM.StorageLocal & { nodeStorage: any; settings?: CRM.SettingsStorage; }>(); function callback(items: CRM.SettingsStorage) { parent.settings = items; parent._settingsCopy = JSON.parse(JSON.stringify(items)); window.app.editCRM.$.rootCRMItem.updateName(items.rootName); for (let i = 0; i < parent.onSettingsReadyCallbacks.length; i++) { parent.onSettingsReadyCallbacks[i].callback.apply( parent.onSettingsReadyCallbacks[i].thisElement, parent.onSettingsReadyCallbacks[i].params); } parent.updateEditorZoom(); parent.updateCrmRepresentation(items.crm); if (parent.settings.latestId) { parent._latestId = items.latestId as CRM.GenericNodeId; } else { parent._latestId = 0 as CRM.GenericNodeId; } window.doc.editCRMInRM.setCheckboxDisabledValue(!storageLocal .CRMOnPage); if (parent._isDemo()) { window.doc.CRMOnPage.toggled = true; window.app.setLocal('CRMOnPage', true); window.doc.CRMOnPage.setCheckboxDisabledValue(true); parent.pageDemo.create() } else { storageLocal.CRMOnPage && parent.pageDemo.create(); } } Array.prototype.slice.apply(parent.shadowRoot.querySelectorAll('paper-toggle-option')).forEach(function (setting: PaperToggleOption) { setting.init(storageLocal); }); parent._setup._bindListeners(); delete storageLocal.nodeStorage; if (storageLocal.requestPermissions && storageLocal.requestPermissions.length > 0) { if (browserAPI.permissions) { await parent._requestPermissions(storageLocal.requestPermissions as CRM.Permission[]); } } if (storageLocal.editing) { const editing = storageLocal.editing as { val: string; id: CRM.GenericNodeId; mode: string; crmType: boolean[]; }; setTimeout(function () { //Check out if the code is actually different const node = parent.nodesById.get(editing.id) as CRM.ScriptNode | CRM.StylesheetNode; const nodeCurrentCode = (node.type === 'script' ? node.value.script : node.value.stylesheet); if (nodeCurrentCode.trim() !== editing.val.trim()) { parent._setup._restoreUnsavedInstances(editing); } else { browserAPI.storage.local.set({ editing: null }); } }, 2500); } if (storageLocal.selectedCrmType !== undefined) { const selected = Array.isArray(storageLocal.selectedCrmType) ? storageLocal.selectedCrmType : this._crmTypeNumberToArr(storageLocal.selectedCrmType); parent.crmTypes = selected; parent._setup.switchToIcons(selected); } else { browserAPI.storage.local.set({ selectedCrmType: [true, true, true, true, true, true] }); parent.crmTypes = [true, true, true, true, true, true]; parent._setup.switchToIcons([true, true, true, true, true, true]); } if (storageLocal.jsLintGlobals) { parent.jsLintGlobals = storageLocal.jsLintGlobals; } else { parent.jsLintGlobals = ['window', '$', 'jQuery', 'crmapi']; browserAPI.storage.local.set({ jsLintGlobals: parent.jsLintGlobals }); } if (storageLocal.globalExcludes && storageLocal.globalExcludes.length > 1) { parent.globalExcludes = storageLocal.globalExcludes; } else { parent.globalExcludes = ['']; browserAPI.storage.local.set({ globalExcludes: parent.globalExcludes }); } if (storageLocal.addedPermissions && storageLocal.addedPermissions.length > 0) { window.setTimeout(function () { (window.doc.addedPermissionsTabContainer as AddedPermissionsTabContainer).tab = 0; (window.doc.addedPermissionsTabContainer as AddedPermissionsTabContainer).maxTabs = storageLocal.addedPermissions.length; window.doc.addedPermissionsTabRepeater.items = storageLocal.addedPermissions; if (storageLocal.addedPermissions.length === 1) { (window.doc.addedPermissionNextButton.querySelector('.next') as HTMLElement) .style.display = 'none'; } else { (window.doc.addedPermissionNextButton.querySelector('.close') as HTMLElement) .style.display = 'none'; } window.doc.addedPermissionPrevButton.style.display = 'none'; window.doc.addedPermissionsTabRepeater.render(); window.doc.addedPermissionsDialog.open(); browserAPI.storage.local.set({ addedPermissions: null }); }, 2500); } if (storageLocal.updatedNodes && storageLocal.updatedNodes.length > 0) { parent.$.scriptUpdatesToast.text = parent._getUpdatedScriptString( storageLocal.updatedNodes[0]); parent.$.scriptUpdatesToast.scripts = storageLocal.updatedNodes; parent.$.scriptUpdatesToast.index = 0; parent.$.scriptUpdatesToast.show(); if (storageLocal.updatedNodes.length > 1) { parent.$.nextScriptUpdateButton.style.display = 'inline'; } else { parent.$.nextScriptUpdateButton.style.display = 'none'; } browserAPI.storage.local.set({ updatedScripts: [] }); storageLocal.updatedNodes = []; } if (storageLocal.settingsVersionData && storageLocal.settingsVersionData.wasUpdated) { const versionData = storageLocal.settingsVersionData; versionData.wasUpdated = false; browserAPI.storage.local.set({ settingsVersionData: versionData }); const toast = window.doc.updatedSettingsToast; toast.text = this.parent().___(I18NKeys.crmApp.code.settingsUpdated, new Date(versionData.latest.date).toLocaleDateString()); toast.show(); } if (storageLocal.isTransfer) { browserAPI.storage.local.set({ isTransfer: false }); //Lazyload the image window.app.$.stylesheetGif.src = window.app.$.stylesheetGif.getAttribute('data-src'); window.doc.versionUpdateDialog.open(); } parent.storageLocal = storageLocal; parent._storageLocalCopy = JSON.parse(JSON.stringify(storageLocal)); if (storageLocal.useStorageSync && parent._supportsStorageSync()) { //Parse the data before sending it to the callback browserAPI.storage.sync.get().then((storageSync: any) => { const sync = storageSync as { [key: string]: string } & { indexes: number|string[]; }; let indexes = sync.indexes; if (indexes == null || indexes === -1 || indexes === undefined) { browserAPI.storage.local.set({ useStorageSync: false }); callback(storageLocal.settings); } else { const settingsJsonArray: string[] = []; const indexesLength = typeof indexes === 'number' ? indexes : (Array.isArray(indexes) ? indexes.length : 0); window.app.util.createArray(indexesLength).forEach((_, index) => { settingsJsonArray.push(sync[`section${index}`]); }); const jsonString = settingsJsonArray.join(''); parent.settingsJsonLength = jsonString.length; const settings = JSON.parse(jsonString); if (parent.settingsJsonLength >= 102400) { window.app.$.useStorageSync.setCheckboxDisabledValue(true); } callback(settings); } }); } else { //Send the "settings" object on the storage.local to the callback parent.settingsJsonLength = JSON.stringify(storageLocal.settings || {}).length; if (!storageLocal.settings) { browserAPI.storage.local.set({ useStorageSync: true }); browserAPI.storage.sync.get().then((storageSync: any) => { const sync = storageSync as { [key: string]: string } & { indexes: string[]; }; const indexes = sync.indexes; const settingsJsonArray: string[] = []; const indexesLength = typeof indexes === 'number' ? indexes : (Array.isArray(indexes) ? indexes.length : 0); window.app.util.createArray(indexesLength).forEach((_, index) => { settingsJsonArray.push(sync[`section${index}`]); }); const jsonString = settingsJsonArray.join(''); parent.settingsJsonLength = jsonString.length; const settings = JSON.parse(jsonString); callback(settings); }); } else { callback(storageLocal.settings); } if (!parent._supportsStorageSync() || parent.settingsJsonLength >= 102400) { window.app.$.useStorageSync.setCheckboxDisabledValue(true); } } }; static setupLoadingBar(): Promise<void> { return new Promise(async (resolve) => { window.splashScreen.done.then(() => { //Wait until the element is actually registered to the DOM window.setTimeout(() => { //All elements have been loaded, unhide them all window.setTimeout(() => { window.setTimeout(() => { //Wait for the fade to pass window.polymerElementsLoaded = true; }, 500); if (BrowserAPI.getBrowser() === 'edge') { console.log(this.parent().___(I18NKeys.crmApp.code.hiMessage)); } else { console.log(`%c${this.parent().___(I18NKeys.crmApp.code.hiMessage)}`, 'font-size:120%;font-weight:bold;'); } console.log(this.parent().___(I18NKeys.crmApp.code.consoleInfo)); }, 200); window.CRMLoaded = window.CRMLoaded || { listener: null, register(fn) { fn(); } } window.CRMLoaded.listener && window.CRMLoaded.listener(); resolve(null); }, 25); }); }); }; static initCheckboxes(defaultLocalStorage: CRM.StorageLocal) { Array.prototype.slice.apply(this.parent().shadowRoot.querySelectorAll('paper-toggle-option')).forEach(function (setting: PaperToggleOption) { setting.init && setting.init(defaultLocalStorage); }); }; static orderNodesById(tree: CRM.Tree, root: boolean = true) { if (root) { this.parent().nodesById.clear(); } for (let i = 0; i < tree.length; i++) { const node = tree[i]; this.parent().nodesById.set(node.id, node); node.children && this.orderNodesById(node.children, false); } }; static switchToIcons(indexes: boolean[]) { if (typeof indexes === 'number') { const arr = [false, false, false, false, false, false]; arr[indexes] = true; indexes = arr; } let i; let element; const crmTypes = this.parent().shadowRoot.querySelectorAll('.crmType'); for (i = 0; i < 6; i++) { if (indexes[i]) { element = <unknown>crmTypes[i] as HTMLElement; element.classList.add('toggled'); } } this.parent().crmTypes = [...indexes]; this.parent().fire('crmTypeChanged', {}); }; static parent() { return window.app; } } /** * Functions related to uploading the data to the backgroundpage */ static uploading = class CRMAppUploading { private static _areValuesDifferent(val1: any[] | Object, val2: any[] | Object): boolean { //Array or object const obj1ValIsArray = Array.isArray(val1); let obj2ValIsArray = Array.isArray(val2); const obj1ValIsObjOrArray = typeof val1 === 'object'; let obj2ValIsObjOrArray = typeof val2 === 'object'; if (obj1ValIsObjOrArray) { //Array or object if (!obj2ValIsObjOrArray) { return true; } else { //Both objects or arrays //1 is an array if (obj1ValIsArray) { //2 is not an array if (!obj2ValIsArray) { return true; } else { //Both are arrays, compare them if (!this._parent().util.compareArray(val1 as any[], val2 as any[])) { //Changes have been found, also say the container arrays have changed return true; } } } else { //1 is not an array, check if 2 is if (obj2ValIsArray) { //2 is an array, changes return true; } else { //2 is also not an array, they are both objects if (!this._parent().util.compareObj(val1, val2)) { //Changes have been found, also say the container arrays have changed return true; } } } } } else if (val1 !== val2) { //They are both normal string//bool values, do a normal comparison return true; } return false; }; private static _getObjDifferences<T, S>(obj1: { [key: string]: T [key: number]: T }, obj2: { [key: string]: S [key: number]: S }, changes: { oldValue: S; newValue: T; key: any; }[]): boolean { for (let key in obj1) { if (obj1.hasOwnProperty(key)) { if (this._areValuesDifferent(obj1[key], obj2[key])) { changes.push({ oldValue: obj2[key], newValue: obj1[key], key: key }); } } } return changes.length > 0; }; static getChanges(force: boolean, { local = this._parent().storageLocal, localCopy = this._parent()._storageLocalCopy, sync = this._parent().settings, syncCopy = this._parent()._settingsCopy, }: { local?: CRM.StorageLocal; localCopy?: CRM.StorageLocal; sync?: CRM.SettingsStorage; syncCopy?: CRM.SettingsStorage; } = { local: this._parent().storageLocal, localCopy: this._parent()._storageLocalCopy, sync: this._parent().settings, syncCopy: this._parent()._settingsCopy, }): { hasLocalChanged: boolean; haveSettingsChanged: boolean; localChanges: any; settingsChanges: any; } { const localChanges: { oldValue: any; newValue: any; key: any; }[] = []; const settingsChanges: { oldValue: any; newValue: any; key: any; }[] = []; const hasLocalChanged = this._getObjDifferences(local, force ? {} : localCopy, localChanges); const haveSettingsChanged = this._getObjDifferences(sync, force ? {} : syncCopy, settingsChanges); return { hasLocalChanged, haveSettingsChanged, localChanges, settingsChanges } } private static _updateCopies() { this._parent()._storageLocalCopy = JSON.parse(JSON.stringify(this._parent().storageLocal)); this._parent()._settingsCopy = JSON.parse(JSON.stringify(this._parent().settings)); } private static _uploadChanges({ hasLocalChanged, haveSettingsChanged, localChanges, settingsChanges }: { hasLocalChanged: boolean; haveSettingsChanged: boolean; localChanges: any; settingsChanges: any; }) { if (hasLocalChanged || haveSettingsChanged) { //Changes occured browserAPI.runtime.sendMessage({ type: 'updateStorage', data: { type: 'optionsPage', localChanges: hasLocalChanged && localChanges, settingsChanges: haveSettingsChanged && settingsChanges } }); } this._parent().pageDemo.create(); this._updateCopies(); } static upload(force: boolean) { //Send changes to background-page, background-page uploads everything //Compare storageLocal objects this._uploadChanges(this.getChanges(force)); }; private static _lastRevertPoint: { local: CRM.StorageLocal; sync: CRM.SettingsStorage; } = null; static createRevertPoint(showToast: boolean = true, toastTime: number = 10000) { if (showToast) { window.app.util.showToast('Undo'); window.app.$.undoToast.duration = toastTime; window.app.$.undoToast.show(); } const revertPoint = { local: JSON.parse(JSON.stringify(window.app.storageLocal)), sync: JSON.parse(JSON.stringify(window.app.settings)) }; this._lastRevertPoint = revertPoint; return revertPoint; } static showRevertPointToast(revertPoint: { local: CRM.StorageLocal; sync: CRM.SettingsStorage; }, toastTime: number = 10000) { window.app.util.showToast('Undo'); window.app.$.undoToast.duration = toastTime; window.app.$.undoToast.show(); this._lastRevertPoint = revertPoint; } static revert(revertPoint: { local: CRM.StorageLocal; sync: CRM.SettingsStorage; } = this._lastRevertPoint) { // Hide the toast if it isn't hidden already window.app.$.undoToast.hide(); if (!this._lastRevertPoint) return; this._uploadChanges(this.getChanges(false, { local: revertPoint.local, localCopy: this._parent().storageLocal, sync: revertPoint.sync, syncCopy: this._parent().settings })); window.app.settings = revertPoint.sync; window.app.updateCrmRepresentation(window.app.settings.crm); window.app.editCRM.build(); } private static _parent() { return window.app; } } /** * Functions for transferring an old version of a script to a new version */ static legacyScriptReplace = class LegacyScriptReplace { static localStorageReplace = class LogalStorageReplace { static findExpression(expression: Tern.Expression, data: PersistentData, strToFind: string, onFind: (data: PersistentData, expression: Tern.Expression) => void): boolean { if (!expression) { return false; } switch (expression.type) { case 'Identifier': if (expression.name === strToFind) { onFind(data, expression); return true; } break; case 'VariableDeclaration': for (let i = 0; i < expression.declarations.length; i++) { //Check if it's an actual chrome assignment const declaration = expression.declarations[i]; if (declaration.init) { if (this.findExpression(declaration.init, data, strToFind, onFind)) { return true; } } } break; case 'MemberExpression': data.isObj = true; if (this.findExpression(expression.object, data, strToFind, onFind)) { return true; } data.siblingExpr = expression.object; data.isObj = false; return this.findExpression(expression.property as Tern.Identifier, data, strToFind, onFind); case 'CallExpression': if (expression.arguments && expression.arguments.length > 0) { for (let i = 0; i < expression.arguments.length; i++) { if (this.findExpression(expression.arguments[i], data, strToFind, onFind)) { return true; } } } if (expression.callee) { return this.findExpression(expression.callee, data, strToFind, onFind); } break; case 'AssignmentExpression': return this.findExpression(expression.right, data, strToFind, onFind); case 'FunctionExpression': case 'FunctionDeclaration': for (let i = 0; i < expression.body.body.length; i++) { if (this.findExpression(expression.body.body[i], data, strToFind, onFind)) { return true; } } break; case 'ExpressionStatement': return this.findExpression(expression.expression, data, strToFind, onFind); case 'SequenceExpression': for (let i = 0; i < expression.expressions.length; i++) { if (this.findExpression(expression.expressions[i], data, strToFind, onFind)) { return true; } } break; case 'UnaryExpression': case 'ConditionalExpression': if (this.findExpression(expression.consequent, data, strToFind, onFind)) { return true; } return this.findExpression(expression.alternate, data, strToFind, onFind); case 'IfStatement': if (this.findExpression(expression.consequent, data, strToFind, onFind)) { return true; } if (expression.alternate) { return this.findExpression(expression.alternate, data, strToFind, onFind); } break; case 'LogicalExpression': case 'BinaryExpression': if (this.findExpression(expression.left, data, strToFind, onFind)) { return true; } return this.findExpression(expression.right, data, strToFind, onFind); case 'BlockStatement': for (let i = 0; i < expression.body.length; i++) { if (this.findExpression(expression.body[i], data, strToFind, onFind)) { return true; } } break; case 'ReturnStatement': return this.findExpression(expression.argument, data, strToFind, onFind); case 'ObjectExpressions': for (let i = 0; i < expression.properties.length; i++) { if (this.findExpression(expression.properties[i].value, data, strToFind, onFind)) { return true; } } break; } return false; } static getLineSeperators(lines: string[]): { start: number; end: number; }[] { let index = 0; const lineSeperators = []; for (let i = 0; i < lines.length; i++) { lineSeperators.push({ start: index, end: index += lines[i].length + 1 }); } return lineSeperators; } static replaceCalls(lines: string[]): string { //Analyze the file const file = new window.app._TernFile('[doc]'); file.text = lines.join('\n'); const srv = new window.CodeMirror.TernServer({ defs: [] }); window.tern.withContext(srv.cx, () => { file.ast = window.tern.parse(file.text, srv.passes, { directSourceFile: file, allowReturnOutsideFunction: true, allowImportExportEverywhere: true, ecmaVersion: srv.ecmaVersion }); }); const scriptExpressions = file.ast.body; let script = file.text; //Check all expressions for chrome calls const persistentData: PersistentData = { lines: lines, lineSeperators: this.getLineSeperators(lines), script: script }; for (let i = 0; i < scriptExpressions.length; i++) { const expression = scriptExpressions[i]; if (this.findExpression(expression, persistentData, 'localStorage', (data, expression) => { data.script = data.script.slice(0, expression.start) + 'localStorageProxy' + data.script.slice(expression.end); data.lines = data.script.split('\n'); })) { //Margins may have changed, redo tern stuff return this.replaceCalls(persistentData.lines); } } return persistentData.script; } } static chromeCallsReplace = class ChromeCallsReplace { private static _isProperty(toCheck: string, prop: string): boolean { if (toCheck === prop) { return true; } return toCheck.replace(/['|"|`]/g, '') === prop; } private static _getCallLines(lineSeperators: { start: number; end: number; }[], start: number, end: number): { from: { index: number; line: number; }; to: { index: number; line: number; } } { const line: { from: { index: number, line: number; }, to: { index: number, line: number; }; } = {} as any; for (let i = 0; i < lineSeperators.length; i++) { const sep = lineSeperators[i]; if (sep.start <= start) { line.from = { index: sep.start, line: i }; } if (sep.end >= end) { line.to = { index: sep.end, line: i }; break; } } return line; } private static _getFunctionCallExpressions(data: ChromePersistentData): Tern.Expression { //Keep looking through the parent expressions untill a CallExpression or MemberExpression is found let index = data.parentExpressions.length - 1; let expr = data.parentExpressions[index]; while (expr && expr.type !== 'CallExpression') { expr = data.parentExpressions[--index]; } return data.parentExpressions[index]; } private static _getChromeAPI(expr: Tern.Expression, data: ChromePersistentData): { call: string; args: string; } { data.functionCall = data.functionCall.map((prop) => { return prop.replace(/['|"|`]/g, ''); }); let functionCall = data.functionCall; functionCall = functionCall.reverse(); if (functionCall[0] === 'chrome') { functionCall.splice(0, 1); } const argsStart = expr.callee.end; const argsEnd = expr.end; const args = data.persistent.script.slice(argsStart, argsEnd); return { call: functionCall.join('.'), args: args }; } private static _getLineIndexFromTotalIndex(lines: string[], line: number, index: number): number { for (let i = 0; i < line; i++) { index -= lines[i].length + 1; } return index; } private static _replaceChromeFunction(data: ChromePersistentData, expr: Tern.Expression, callLine: { from: { line: number; } to: { line: number; } }) { if (data.isReturn && !data.isValidReturn) { return; } var lines = data.persistent.lines; //Get chrome API let i; var chromeAPI = this._getChromeAPI(expr, data); var firstLine = data.persistent.lines[callLine.from.line]; var lineExprStart = this._getLineIndexFromTotalIndex(data.persistent.lines, callLine.from.line, ((data.returnExpr && data.returnExpr.start) || expr.callee.start)); var lineExprEnd = this._getLineIndexFromTotalIndex(data.persistent.lines, callLine.from.line, expr.callee.end); var newLine = firstLine.slice(0, lineExprStart) + `window.crmAPI.chrome('${chromeAPI.call}')`; var lastChar = null; while (newLine[(lastChar = newLine.length - 1)] === ' ') { newLine = newLine.slice(0, lastChar); } if (newLine[(lastChar = newLine.length - 1)] === ';') { newLine = newLine.slice(0, lastChar); } if (chromeAPI.args !== '()') { var argsLines = chromeAPI.args.split('\n'); newLine += argsLines[0]; for (i = 1; i < argsLines.length; i++) { lines[callLine.from.line + i] = argsLines[i]; } } if (data.isReturn) { var lineRest = firstLine.slice(lineExprEnd + chromeAPI.args.split('\n')[0].length); while (lineRest.indexOf(';') === 0) { lineRest = lineRest.slice(1); } newLine += `.return(function(${data.returnName}) {` + lineRest; var usesTabs = true; var spacesAmount = 0; //Find out if the writer uses tabs or spaces for (let i = 0; i < data.persistent.lines.length; i++) { if (data.persistent.lines[i].indexOf(' ') === 0) { usesTabs = true; break; } else if (data.persistent.lines[i].indexOf(' ') === 0) { var split = data.persistent.lines[i].split(' '); for (var j = 0; j < split.length; j++) { if (split[j] === ' ') { spacesAmount++; } else { break; } } usesTabs = false; break; } } var indent; if (usesTabs) { indent = ' '; } else { indent = []; indent[spacesAmount] = ' '; indent = indent.join(' '); } //Only do this for the current scope var scopeLength = null; var idx = null; for (i = data.parentExpressions.length - 1; scopeLength === null && i !== 0; i--) { if (data.parentExpressions[i].type === 'BlockStatement' || (data.parentExpressions[i].type === 'FunctionExpression' && (data.parentExpressions[i].body as Tern.BlockStatement).type === 'BlockStatement')) { scopeLength = this._getLineIndexFromTotalIndex(data.persistent.lines, callLine.from.line, data.parentExpressions[i].end); idx = 0; //Get the lowest possible scopeLength as to stay on the last line of the scope while (scopeLength > 0) { scopeLength = this._getLineIndexFromTotalIndex(data.persistent.lines, callLine.from.line + (++idx), data.parentExpressions[i].end); } scopeLength = this._getLineIndexFromTotalIndex(data.persistent.lines, callLine.from.line + (idx - 1), data.parentExpressions[i].end); } } if (idx === null) { idx = (lines.length - callLine.from.line) + 1; } var indents = 0; var newLineData = lines[callLine.from.line]; while (newLineData.indexOf(indent) === 0) { newLineData = newLineData.replace(indent, ''); indents++; } //Push in one extra line at the end of the expression var prevLine; var indentArr = []; indentArr[indents] = ''; var prevLine2 = indentArr.join(indent) + '}).send();'; var max = data.persistent.lines.length + 1; for (i = callLine.from.line; i < callLine.from.line + (idx - 1); i++) { lines[i] = indent + lines[i]; } //If it's going to add a new line, indent the last line as well // if (idx === (lines.length - callLines.from.line) + 1) { // lines[i] = indent + lines[i]; // } for (i = callLine.from.line + (idx - 1); i < max; i++) { prevLine = lines[i]; lines[i] = prevLine2; prevLine2 = prevLine; } } else { lines[callLine.from.line + (i - 1)] = lines[callLine.from.line + (i - 1)] + '.send();'; if (i === 1) { newLine += '.send();'; } } lines[callLine.from.line] = newLine; return; } private static _callsChromeFunction(callee: Tern.FunctionCallExpression, data: ChromePersistentData, onError: TransferOnError): boolean { data.parentExpressions.push(callee); //Check if the function has any arguments and check those first if (callee.arguments && callee.arguments.length > 0) { for (let i = 0; i < callee.arguments.length; i++) { if (this._findChromeExpression(callee.arguments[i], this ._removeObjLink(data), onError)) { return true; } } } if (callee.type !== 'MemberExpression') { //This is a call upon something (like a call in crmAPI.chrome), check the previous expression first return this._findChromeExpression(callee, this._removeObjLink(data), onError); } //Continue checking the call itself if (callee.property) { data.functionCall = data.functionCall || []; data.functionCall.push(callee.property.name || (callee.property as any).raw); } if (callee.object && callee.object.name) { //First object const isWindowCall = (this._isProperty(callee.object.name, 'window') && this._isProperty(callee.property.name || (callee.property as any).raw, 'chrome')); if (isWindowCall || this._isProperty(callee.object.name, 'chrome')) { data.expression = callee; const expr = this._getFunctionCallExpressions(data); const callLines = this._getCallLines(data .persistent .lineSeperators, expr.start, expr.end); if (data.isReturn && !data.isValidReturn) { callLines.from.index = this._getLineIndexFromTotalIndex(data.persistent .lines, callLines.from.line, callLines.from.index); callLines.to.index = this._getLineIndexFromTotalIndex(data.persistent .lines, callLines.to.line, callLines.to.index); onError(callLines, data.persistent.passes); return false; } if (!data.persistent.diagnostic) { this._replaceChromeFunction(data, expr, callLines); } return true; } } else if (callee.object) { return this._callsChromeFunction(callee.object as any, data, onError); } return false; } private static _removeObjLink(data: ChromePersistentData): ChromePersistentData { const parentExpressions = data.parentExpressions || []; const newObj: ChromePersistentData = {} as any; for (let key in data) { if (data.hasOwnProperty(key) && key !== 'parentExpressions' && key !== 'persistent') { (newObj as any)[key] = (data as any)[key]; } } const newParentExpressions = []; for (let i = 0; i < parentExpressions.length; i++) { newParentExpressions.push(parentExpressions[i]); } newObj.persistent = data.persistent; newObj.parentExpressions = newParentExpressions; return newObj; } private static _findChromeExpression(expression: Tern.Expression, data: ChromePersistentData, onError: TransferOnError): boolean { data.parentExpressions = data.parentExpressions || []; data.parentExpressions.push(expression); switch (expression.type) { case 'VariableDeclaration': data.isValidReturn = expression.declarations.length === 1; for (let i = 0; i < expression.declarations.length; i++) { //Check if it's an actual chrome assignment var declaration = expression.declarations[i]; if (declaration.init) { var decData = this._removeObjLink(data); var returnName = declaration.id.name; decData.isReturn = true; decData.returnExpr = expression; decData.returnName = returnName; if (this._findChromeExpression(declaration.init, decData, onError)) { return true; } } } break; case 'CallExpression': case 'MemberExpression': const argsTocheck: Tern.Expression[] = []; if (expression.arguments && expression.arguments.length > 0) { for (let i = 0; i < expression.arguments.length; i++) { if (expression.arguments[i].type !== 'MemberExpression' && expression.arguments[i].type !== 'CallExpression') { //It's not a direct call to chrome, just handle this later after the function has been checked argsTocheck.push(expression.arguments[i]); } else { if (this._findChromeExpression(expression.arguments[i], this._removeObjLink(data), onError)) { return true; } } } } data.functionCall = []; if (expression.callee) { if (this._callsChromeFunction(expression.callee, data, onError)) { return true; } } for (let i = 0; i < argsTocheck.length; i++) { if (this._findChromeExpression(argsTocheck[i], this._removeObjLink(data), onError)) { return true; } } break; case 'AssignmentExpression': data.isReturn = true; data.returnExpr = expression; data.returnName = expression.left.name; return this._findChromeExpression(expression.right, data, onError); case 'FunctionExpression': case 'FunctionDeclaration': data.isReturn = false; for (let i = 0; i < expression.body.body.length; i++) { if (this._findChromeExpression(expression.body.body[i], this ._removeObjLink(data), onError)) { return true; } } break; case 'ExpressionStatement': return this._findChromeExpression(expression.expression, data, onError); case 'SequenceExpression': data.isReturn = false; var lastExpression = expression.expressions.length - 1; for (let i = 0; i < expression.expressions.length; i++) { if (i === lastExpression) { data.isReturn = true; } if (this._findChromeExpression(expression.expressions[i], this ._removeObjLink(data), onError)) { return true; } } break; case 'UnaryExpression': case 'ConditionalExpression': data.isValidReturn = false; data.isReturn = true; if (this._findChromeExpression(expression.consequent, this ._removeObjLink(data), onError)) { return true; } if (this._findChromeExpression(expression.alternate, this ._removeObjLink(data), onError)) { return true; } break; case 'IfStatement': data.isReturn = false; if (this._findChromeExpression(expression.consequent, this ._removeObjLink(data), onError)) { return true; } if (expression.alternate && this._findChromeExpression(expression.alternate, this ._removeObjLink(data), onError)) { return true; } break; case 'LogicalExpression': case 'BinaryExpression': data.isReturn = true; data.isValidReturn = false; if (this._findChromeExpression(expression.left, this._removeObjLink(data), onError)) { return true; } if (this._findChromeExpression(expression.right, this ._removeObjLink(data), onError)) { return true; } break; case 'BlockStatement': data.isReturn = false; for (let i = 0; i < expression.body.length; i++) { if (this._findChromeExpression(expression.body[i], this ._removeObjLink(data), onError)) { return true; } } break; case 'ReturnStatement': data.isReturn = true; data.returnExpr = expression; data.isValidReturn = false; return this._findChromeExpression(expression.argument, data, onError); case 'ObjectExpressions': data.isReturn = true; data.isValidReturn = false; for (let i = 0; i < expression.properties.length; i++) { if (this._findChromeExpression(expression.properties[i].value, this ._removeObjLink(data), onError)) { return true; } } break; } return false; } private static _generateOnError(container: TransferOnErrorError[][]): ( position: TransferOnErrorError, passes: number) => void { return (position: TransferOnErrorError, passes: number) => { if (!container[passes]) { container[passes] = [position]; } else { container[passes].push(position); } }; } private static _replaceChromeCalls(lines: string[], passes: number, onError: TransferOnError): string { //Analyze the file var file = new window.app._TernFile('[doc]'); file.text = lines.join('\n'); var srv = new window.CodeMirror.TernServer({ defs: [] }); window.tern.withContext(srv.cx, () => { file.ast = window.tern.parse(file.text, srv.passes, { directSourceFile: file, allowReturnOutsideFunction: true, allowImportExportEverywhere: true, ecmaVersion: srv.ecmaVersion }); }); const scriptExpressions = file.ast.body; let index = 0; const lineSeperators = []; for (let i = 0; i < lines.length; i++) { lineSeperators.push({ start: index, end: index += lines[i].length + 1 }); } let script = file.text; //Check all expressions for chrome calls const persistentData: { lines: any[], lineSeperators: any[], script: string, passes: number, diagnostic?: boolean; } = { lines: lines, lineSeperators: lineSeperators, script: script, passes: passes }; let expression; if (passes === 0) { //Do one check, not replacing anything, to find any possible errors already persistentData.diagnostic = true; for (let i = 0; i < scriptExpressions.length; i++) { expression = scriptExpressions[i]; this._findChromeExpression(expression, { persistent: persistentData } as ChromePersistentData, onError); } persistentData.diagnostic = false; } for (let i = 0; i < scriptExpressions.length; i++) { expression = scriptExpressions[i]; if (this._findChromeExpression(expression, { persistent: persistentData } as ChromePersistentData, onError)) { script = this._replaceChromeCalls(persistentData.lines.join('\n') .split('\n'), passes + 1, onError); break; } } return script; } private static _removePositionDuplicates(arr: TransferOnErrorError[]): TransferOnErrorError[] { var jsonArr: EncodedString<TransferOnErrorError>[] = []; arr.forEach((item, index) => { jsonArr[index] = JSON.stringify(item); }); jsonArr = jsonArr.filter((item, pos) => { return jsonArr.indexOf(item) === pos; }); return jsonArr.map((item) => { return JSON.parse(item); }); } static replace(script: string, onError: ( oldScriptErrors: TransferOnErrorError[], newScriptErrors: TransferOnErrorError[], parseError?: boolean ) => void): string { //Remove execute locally const lineIndex = script.indexOf('/*execute locally*/'); if (lineIndex !== -1) { script = script.replace('/*execute locally*/\n', ''); if (lineIndex === script.indexOf('/*execute locally*/')) { script = script.replace('/*execute locally*/', ''); } } const errors: TransferOnErrorError[][] = []; try { script = this._replaceChromeCalls(script.split('\n'), 0, this._generateOnError(errors)); } catch (e) { onError(null, null, true); return script; } const firstPassErrors = errors[0]; const finalPassErrors = errors[errors.length - 1]; if (finalPassErrors) { onError(this._removePositionDuplicates(firstPassErrors), this._removePositionDuplicates(finalPassErrors)); } return script; } } static generateScriptUpgradeErrorHandler(id: CRM.GenericNodeId): ScriptUpgradeErrorHandler { return function (oldScriptErrors, newScriptErrors, parseError) { browserAPI.storage.local.get<CRM.StorageLocal>().then((keys) => { if (!keys.upgradeErrors) { var val: { [key: number]: { oldScript: CursorPosition[]; newScript: CursorPosition[]; generalError: boolean; } } = {}; val[id] = { oldScript: oldScriptErrors, newScript: newScriptErrors, generalError: parseError }; keys.upgradeErrors = val; window.app.storageLocal.upgradeErrors = val; } keys.upgradeErrors[id] = window.app.storageLocal.upgradeErrors[id] = { oldScript: oldScriptErrors, newScript: newScriptErrors, generalError: parseError }; browserAPI.storage.local.set({ upgradeErrors: keys.upgradeErrors } as any); }); }; }; static convertScriptFromLegacy(script: string, id: CRM.GenericNodeId, method: SCRIPT_CONVERSION_TYPE): string { //Remove execute locally let usedExecuteLocally = false; const lineIndex = script.indexOf('/*execute locally*/'); if (lineIndex !== -1) { script = script.replace('/*execute locally*/\n', ''); if (lineIndex === script.indexOf('/*execute locally*/')) { script = script.replace('/*execute locally*/', ''); } usedExecuteLocally = true; } try { switch (method) { case SCRIPT_CONVERSION_TYPE.CHROME: script = this.chromeCallsReplace.replace(script, this.generateScriptUpgradeErrorHandler(id)); break; case SCRIPT_CONVERSION_TYPE.LOCAL_STORAGE: script = usedExecuteLocally ? this.localStorageReplace.replaceCalls(script.split('\n')) : script; break; case SCRIPT_CONVERSION_TYPE.BOTH: const localStorageConverted = usedExecuteLocally ? this.localStorageReplace.replaceCalls(script.split('\n')) : script; script = this.chromeCallsReplace.replace(localStorageConverted, this.generateScriptUpgradeErrorHandler(id) ); break; } } catch (e) { return script; } return script; } }; /** * Dom listeners for this node */ static listeners = class CRMAppListeners { static undo() { window.app.uploading.revert(); } static _toggleBugReportingTool() { window.errorReportingTool.toggleVisibility(); }; static toggleTypescript() { window.scriptEdit.toggleTypescript(); window.app.$.editorTypescript.classList.toggle('active'); } static toggleToolsRibbon() { const horizontalCenterer = window.crmEditPage.$.horizontalCenterer; const bcr = horizontalCenterer.getBoundingClientRect(); const viewportWidth = bcr.width + 20; $(window.doc.editorToolsRibbonContainer).animate({ marginLeft: window.app.storageLocal.hideToolsRibbon ? '0' : '-200px' }, { duration: 250, easing: ($ as JQueryContextMenu).bez([0.215, 0.610, 0.355, 1.000]), step: (now: number) => { window.doc.fullscreenEditorEditor.style.width = `${viewportWidth - 200 - now}px`; window.doc.fullscreenEditorEditor.style.marginLeft = `${now + 200}px`; (window.scriptEdit || window.scriptEdit).getEditorInstance().editor.layout(); } }); window.app.storageLocal.hideToolsRibbon = !window.app.storageLocal.hideToolsRibbon; window.app.upload(); }; static launchSearchWebsiteToolScript() { if (this.parent().item && this.parent().item.type === 'script' && window.scriptEdit) { const paperSearchWebsiteDialog = this.parent().$.paperSearchWebsiteDialog; paperSearchWebsiteDialog.init(); paperSearchWebsiteDialog.setOutputType('script'); paperSearchWebsiteDialog.show(); } }; static launchSearchWebsiteToolLink() { const paperSearchWebsiteDialog = this.parent().$.paperSearchWebsiteDialog; paperSearchWebsiteDialog.init(); paperSearchWebsiteDialog.setOutputType('link'); paperSearchWebsiteDialog.show(); }; static launchExternalEditorDialog() { if (!(window.doc.externalEditorDialogTrigger as HTMLElement & { disabled: boolean; }).disabled) { window.externalEditor.init(); window.externalEditor.editingCRMItem = window.codeEditBehavior.getActive().item as any; window.externalEditor.setupExternalEditing(); } }; static runLint() { window.app.util.getDialog().getEditorInstance().runLinter(); }; static showCssTips() { window.doc.cssEditorInfoDialog.open(); }; static async showManagePermissions() { if (browserAPI.permissions) { await this.parent()._requestPermissions([], true); } else { window.app.util.showToast(this.parent().___(I18NKeys.crmApp.code.permissionsNotSupported)); } }; static iconSwitch(e: Polymer.ClickEvent, types: { x?: any; }|boolean[]) { let parentCrmTypes = this.parent().crmTypes; if (typeof parentCrmTypes === 'number') { const arr = [false, false, false, false, false, false]; arr[parentCrmTypes] = true; parentCrmTypes = arr; } else { parentCrmTypes = [...parentCrmTypes]; } let selectedTypes = parentCrmTypes; if (Array.isArray(types)) { for (let i = 0; i < 6; i++) { let crmEl = <unknown>this.parent().shadowRoot.querySelectorAll('.crmType')[i] as HTMLElement; if (types[i]) { crmEl.classList.add('toggled'); } else { crmEl.classList.remove('toggled'); } } selectedTypes = [...types]; } else { const element = this.parent().util.findElementWithClassName(e, 'crmType'); const crmTypes = this.parent().shadowRoot.querySelectorAll('.crmType'); for (let i = 0; i < 6; i++) { let crmEl = <unknown>crmTypes[i] as HTMLElement; if (crmEl === element) { //Toggle this element if (!selectedTypes[i]) { //Toggle it on crmEl.classList.add('toggled'); } else { //Toggle it off crmEl.classList.remove('toggled'); } selectedTypes[i] = !selectedTypes[i]; } } } browserAPI.storage.local.set({ selectedCrmType: selectedTypes }); for (let i = 0; i < 6; i++) { if (this.parent().crmTypes[i] !== selectedTypes[i]) { this.parent().fire('crmTypeChanged', {}); break; } } this.parent().crmTypes = selectedTypes; }; private static _getDownloadPermission(callback: (allowed: boolean) => void) { if (BrowserAPI.getSrc().downloads && BrowserAPI.getSrc().downloads.download) { callback(true); return; } if (!(BrowserAPI.getSrc().permissions)) { window.app.util.showToast(this.parent().___(I18NKeys.crmApp.code.downloadNotSupported)); callback(false); return; } browserAPI.permissions.contains({ permissions: ['downloads'] }).then(async (granted) => { if (granted) { callback(true); } else { browserAPI.permissions.request({ permissions: ['downloads'] }).then((granted) => { //Refresh browserAPI object browserAPI.downloads = browserAPI.downloads || BrowserAPI.getDownloadAPI(); callback(granted); }); } }); } static async _generateRegexFile() { const filePath = this.parent().$.URISchemeFilePath.$$('input').value.replace(/\\/g, '\\\\'); const schemeName = this.parent().$.URISchemeSchemeName.$$('input').value; const regFile = [ 'Windows Registry Editor Version 5.00', '', '[HKEY_CLASSES_ROOT\\' + schemeName + ']', '@="URL:' + schemeName + ' Protocol"', '"URL Protocol"=""', '', '[HKEY_CLASSES_ROOT\\' + schemeName + '\\shell]', '', '[HKEY_CLASSES_ROOT\\' + schemeName + '\\shell\\open]', '', '[HKEY_CLASSES_ROOT\\' + schemeName + '\\shell\\open\\command]', '@="\\"' + filePath + '\\""' ].join('\n'); this._getDownloadPermission((allowed) => { if (allowed) { if (browserAPI.downloads) { browserAPI.downloads.download({ url: 'data:text/plain;charset=utf-8;base64,' + window.btoa(regFile), filename: schemeName + '.reg' }); } else { window.app.util.showToast(this.parent().___(I18NKeys.crmApp.code.downloadNotSupported)); } } }); }; static globalExcludeChange(e: Polymer.ClickEvent) { const input = this.parent().util.findElementWithTagname(e, 'paper-input'); let excludeIndex = null; const allExcludes = document.getElementsByClassName('globalExcludeContainer'); for (let i = 0; i < allExcludes.length; i++) { if (allExcludes[i] === input.parentNode) { excludeIndex = i; break; } } if (excludeIndex === null) { return; } const value = input.$$('input').value; this.parent().globalExcludes[excludeIndex] = value; this.parent().set('globalExcludes', this.parent().globalExcludes); browserAPI.storage.local.set({ globalExcludes: this.parent().globalExcludes } as any); }; static removeGlobalExclude(e: Polymer.ClickEvent) { const node = this.parent().util.findElementWithTagname(e, 'paper-icon-button'); let excludeIndex = null; const allExcludes = document.getElementsByClassName('globalExcludeContainer'); for (let i = 0; i < allExcludes.length; i++) { if (allExcludes[i] === node.parentNode) { excludeIndex = i; break; } } if (excludeIndex === null) { return; } this.parent().splice('globalExcludes', excludeIndex, 1); }; static async importData() { const dataString = this.parent().$.importSettingsInput.value as EncodedString<{ local?: CRM.StorageLocal; storageLocal?: CRM.StorageLocal; settings: CRM.SettingsStorage; }>; if (!this.parent().$.oldCRMImport.checked) { let data: { crm?: CRM.Tree; local?: CRM.StorageLocal; nonLocal?: CRM.SettingsStorage; storageLocal?: CRM.StorageLocal; }; try { data = JSON.parse(dataString); this.parent().$.importSettingsError.style.display = 'none'; } catch (e) { this.parent().$.importSettingsError.style.display = 'block'; return; } window.app.uploading.createRevertPoint(); const overWriteImport = this.parent().$.overWriteImport; if (overWriteImport.checked && (data.local || data.storageLocal)) { this.parent().settings = data.nonLocal || this.parent().settings; this.parent().storageLocal = data.local || this.parent().storageLocal; } if (data.crm) { if (overWriteImport.checked) { this.parent().settings.crm = this.parent().util.crmForEach(data.crm, (node) => { node.id = this.parent().generateItemId(); }); } else { this.parent()._addImportedNodes(data.crm); } this.parent().editCRM.build(); } //Apply settings this.parent()._setup.initCheckboxes(this.parent().storageLocal); this.parent().upload(); } else { try { const settingsArr: any[] = dataString.split('%146%'); if (settingsArr[0] === 'all') { this.parent().storageLocal.showOptions = settingsArr[2]; const rows = settingsArr.slice(6); class LocalStorageWrapper { getItem(index: 'numberofrows' | number): string { if (index === 'numberofrows') { return '' + (rows.length - 1); } return rows[index]; } } window.app.uploading.createRevertPoint(); const crm = await this.parent()._transferCRMFromOld(settingsArr[4], new LocalStorageWrapper()); this.parent().settings.crm = crm; this.parent().editCRM.build(); this.parent()._setup.initCheckboxes(this.parent().storageLocal); this.parent().upload(); } else { alert('This method of importing no longer works, please export all your settings instead'); } } catch (e) { this.parent().$.importSettingsError.style.display = 'block'; return; } } this.parent().util.showToast(this.parent().___(I18NKeys.crmApp.code.importSuccess)); }; static exportData() { const toExport: { crm?: CRM.SafeTree; local?: CRM.StorageLocal; nonLocal?: CRM.SettingsStorage; } = {} as any; if (this.parent().$.exportCRM.checked) { toExport.crm = JSON.parse(JSON.stringify(this.parent().settings.crm)); for (let i = 0; i < toExport.crm.length; i++) { toExport.crm[i] = this.parent().editCRM.makeNodeSafe(toExport.crm[i] as CRM.Node); } } if (this.parent().$.exportSettings.checked) { toExport.local = this.parent().storageLocal; toExport.nonLocal = JSON.parse(JSON.stringify(this.parent().settings)); delete toExport.nonLocal.crm; } window.doc.exportSettingsSpinner.hide = false; window.setTimeout(() => { this.parent().$.exportSettingsOutput.value = JSON.stringify(toExport); window.requestAnimationFrame(() => { window.doc.exportSettingsSpinner.hide = true; }); }, 100); }; static addGlobalExcludeField() { this.parent().push('globalExcludes', ''); }; static _openLogging() { window.open(browserAPI.runtime.getURL('html/logging.html'), '_blank'); }; static hideGenericToast() { this.parent().$.messageToast.hide(); }; static nextUpdatedScript() { let index = this.parent().$.scriptUpdatesToast.index; this.parent().$.scriptUpdatesToast.text = this.parent()._getUpdatedScriptString( this.parent().$.scriptUpdatesToast.scripts[++index]); this.parent().$.scriptUpdatesToast.index = index; if (this.parent().$.scriptUpdatesToast.scripts.length - index > 1) { this.parent().$.nextScriptUpdateButton.style.display = 'inline'; } else { this.parent().$.nextScriptUpdateButton.style.display = 'none'; } }; static hideScriptUpdatesToast() { this.parent().$.scriptUpdatesToast.hide(); }; private static _copyFromElement(target: HTMLTextAreaElement, button: HTMLPaperIconButtonElement) { const snipRange = document.createRange(); snipRange.selectNode(target); const selection = window.getSelection(); selection.removeAllRanges(); selection.addRange(snipRange); try { document.execCommand('copy'); button.icon = 'done'; } catch (err) { // Copy command is not available console.error(err); button.icon = 'error'; } // Return to the copy button after a second. this.parent().async(function () { button.icon = 'content-copy'; }, 1000); selection.removeAllRanges(); } static copyExportDialogToClipboard() { this._copyFromElement(this.parent().$.exportJSONData, this.parent().$.dialogCopyButton); }; static copyExportToClipboard() { this._copyFromElement(this.parent().$.exportSettingsOutput, this.parent().$.exportCopyButton); } static goNextVersionUpdateTab() { if (this.parent().versionUpdateTab === 4) { this.parent().$.versionUpdateDialog.close(); } else { const nextTabIndex = this.parent().versionUpdateTab + 1; const tabs = (document.getElementsByClassName('versionUpdateTab') as any) as HTMLElement[]; const selector = tabs[nextTabIndex]; selector.style.height = 'auto'; let i; for (i = 0; i < tabs.length; i++) { tabs[i].style.display = 'none'; } const newHeight = $(selector).innerHeight(); for (i = 0; i < tabs.length; i++) { tabs[i].style.display = 'block'; } selector.style.height = '0'; const newHeightPx = newHeight + 'px'; const tabCont = this.parent().$.versionUpdateTabSlider; const currentHeight = tabCont.getBoundingClientRect().height; if (newHeight > currentHeight) { tabCont.animate([ { height: currentHeight + 'px' }, { height: newHeightPx } ], { duration: 500, easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)' }).onfinish = () => { tabCont.style.height = newHeightPx; selector.style.height = 'auto'; this.parent().versionUpdateTab = nextTabIndex; }; } else { selector.style.height = 'auto'; this.parent().versionUpdateTab = nextTabIndex; setTimeout(function () { tabCont.animate([ { height: currentHeight + 'px' }, { height: newHeightPx } ], { duration: 500, easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)' }).onfinish = function () { tabCont.style.height = newHeightPx; }; }, 500); } } } static goPrevVersionUpdateTab() { if (this.parent().versionUpdateTab !== 0) { const prevTabIndex = this.parent().versionUpdateTab - 1; const tabs = (document.getElementsByClassName('versionUpdateTab') as any) as HTMLElement[]; const selector = tabs[prevTabIndex]; selector.style.height = 'auto'; let i; for (i = 0; i < tabs.length; i++) { tabs[i].style.display = 'none'; } const newHeight = $(selector).innerHeight(); for (i = 0; i < tabs.length; i++) { tabs[i].style.display = 'block'; } selector.style.height = '0'; const newHeightPx = newHeight + 'px'; const tabCont = this.parent().$.versionUpdateTabSlider; const currentHeight = tabCont.getBoundingClientRect().height; if (newHeight > currentHeight) { tabCont.animate([{ height: currentHeight + 'px' }, { height: newHeightPx }], { duration: 500, easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)' }).onfinish = () => { tabCont.style.height = newHeightPx; selector.style.height = 'auto'; this.parent().versionUpdateTab = prevTabIndex; }; } else { selector.style.height = 'auto'; this.parent().versionUpdateTab = prevTabIndex; setTimeout(function () { tabCont.animate([{ height: currentHeight + 'px' }, { height: newHeightPx }], { duration: 500, easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)' }).onfinish = function () { tabCont.style.height = newHeightPx; }; }, 500); } } }; private static _applyAddedPermissions() { const panels = Array.prototype.slice.apply( window.doc.addedPermissionsTabContainer .querySelectorAll('.nodeAddedPermissionsCont')); panels.forEach((panel: HTMLElement) => { const node = this.parent().nodesById .get(~~(panel.getAttribute('data-id') as any) as CRM.GenericNodeId) as CRM.ScriptNode; const permissions = Array.prototype.slice.apply(panel.querySelectorAll('paper-checkbox')) .map(function (checkbox: HTMLPaperCheckboxElement) { if (checkbox.checked) { return checkbox.getAttribute('data-permission'); } return null; }).filter(function (permission: string) { return !!permission; }); if (!Array.isArray(node.permissions)) { node.permissions = []; } permissions.forEach(function (addedPermission: CRM.Permission) { if (node.permissions.indexOf(addedPermission) === -1) { node.permissions.push(addedPermission); } }); }); this.parent().upload(); }; static addedPermissionNext() { const cont = window.doc.addedPermissionsTabContainer as AddedPermissionsTabContainer; if (cont.tab === cont.maxTabs - 1) { window.doc.addedPermissionsDialog.close(); this._applyAddedPermissions(); return; } if (cont.tab + 2 !== cont.maxTabs) { (window.doc.addedPermissionNextButton.querySelector('.close') as HTMLElement).style.display = 'none'; (window.doc.addedPermissionNextButton.querySelector('.next') as HTMLElement).style.display = 'block'; } else { (window.doc.addedPermissionNextButton.querySelector('.close') as HTMLElement).style.display = 'block'; (window.doc.addedPermissionNextButton.querySelector('.next') as HTMLElement).style.display = 'none'; } cont.style.marginLeft = (++cont.tab * -800) + 'px'; window.doc.addedPermissionPrevButton.style.display = 'block'; }; static addedPermissionPrev() { const cont = window.doc.addedPermissionsTabContainer as AddedPermissionsTabContainer; cont.style.marginLeft = (--cont.tab * -800) + 'px'; window.doc.addedPermissionPrevButton.style.display = (cont.tab === 0 ? 'none' : 'block'); }; private static _getCodeSettingsFromDialog(): CRM.Options { const obj: CRM.Options = {}; Array.prototype.slice.apply(this.parent().shadowRoot.querySelectorAll('.codeSettingSetting')) .forEach((element: HTMLElement) => { let value: CRM.OptionsValue; const key = element.getAttribute('data-key'); const type = element.getAttribute('data-type') as CRM.OptionsValue['type']; const currentVal = (this.parent().$.codeSettingsDialog.item.value.options as CRM.Options)[key]; switch (type) { case 'number': value = this.parent().templates.mergeObjects(currentVal, { value: ~~element.querySelector('paper-input').value }); break; case 'string': value = this.parent().templates.mergeObjects(currentVal, { value: element.querySelector('paper-input').value }); break; case 'color': value = this.parent().templates.mergeObjects(currentVal, { value: element.querySelector('input').value }); break; case 'boolean': value = this.parent().templates.mergeObjects(currentVal, { value: element.querySelector('paper-checkbox').checked }); break; case 'choice': value = this.parent().templates.mergeObjects(currentVal, { selected: element.querySelector('paper-dropdown-menu').selected }); break; case 'array': const arrayInput = element.querySelector('paper-array-input'); arrayInput.saveSettings(); let values = arrayInput.values; if ((currentVal as CRM.OptionArray).items === 'string') { //Strings values = values.map(value => value + ''); } else { //Numbers values = values.map(value => ~~value); } value = this.parent().templates.mergeObjects(currentVal, { value: values }); break; } obj[key] = value; }); return obj; } static confirmCodeSettings() { this.parent().$.codeSettingsDialog.item.value.options = this._getCodeSettingsFromDialog(); this.parent().upload(); } static exitFullscreen() { window.app.util.getDialog().exitFullScreen(); } static toggleFullscreenOptions() { const dialog = window.app.util.getDialog(); dialog.toggleOptions(); } static setThemeWhite() { window.app.util.getDialog().setThemeWhite(); } static setThemeDark() { window.app.util.getDialog().setThemeDark(); } static fontSizeChange() { window.app.async(() => { window.app.util.getDialog().fontSizeChange(); }, 0); } static jsLintGlobalsChange() { window.app.async(() => { window.scriptEdit.jsLintGlobalsChange(); }, 0); } static onKeyBindingKeyDown(e: Polymer.PolymerKeyDownEvent) { if (this.parent().item.type === 'script') { window.scriptEdit.onKeyBindingKeyDown(e); } } static parent() { return window.app; } } /** * Any templates */ static templates = class CRMAppTemplates { /** * Merges two arrays */ static mergeArrays<T extends T[] | U[], U>(mainArray: T, additionArray: T): T { for (let i = 0; i < additionArray.length; i++) { if (mainArray[i] && typeof additionArray[i] === 'object' && mainArray[i] !== undefined && mainArray[i] !== null) { if (Array.isArray(additionArray[i])) { mainArray[i] = this.mergeArrays<T, U>(mainArray[i] as T, additionArray[i] as T); } else { mainArray[i] = this.mergeObjects(mainArray[i], additionArray[i]); } } else { mainArray[i] = additionArray[i]; } } return mainArray; }; /** * Merges two objects */ static mergeObjects<T extends { [key: string]: any; [key: number]: any; }, Y extends Partial<T>>(mainObject: T, additions: Y): T & Y { for (let key in additions) { if (additions.hasOwnProperty(key)) { if (typeof additions[key] === 'object' && typeof mainObject[key] === 'object' && mainObject[key] !== undefined && mainObject[key] !== null) { if (Array.isArray(additions[key])) { mainObject[key] = this.mergeArrays(mainObject[key], (additions as any)[key]); } else { mainObject[key] = this.mergeObjects(mainObject[key], additions[key]); } } else { (mainObject as any)[key] = (additions[key] as any) as T[keyof T]; } } } return mainObject as T & Y; }; static mergeArraysWithoutAssignment<T extends T[] | U[], U>(mainArray: T, additionArray: T) { for (let i = 0; i < additionArray.length; i++) { if (mainArray[i] && typeof additionArray[i] === 'object' && mainArray[i] !== undefined && mainArray[i] !== null) { if (Array.isArray(additionArray[i])) { this.mergeArraysWithoutAssignment<T, U>(mainArray[i] as T, additionArray[i] as T); } else { this.mergeObjectsWithoutAssignment(mainArray[i], additionArray[i]); } } else { mainArray[i] = additionArray[i]; } } } static mergeObjectsWithoutAssignment<T extends { [key: string]: any; [key: number]: any; }, Y extends Partial<T>>(mainObject: T, additions: Y) { for (let key in additions) { if (additions.hasOwnProperty(key)) { if (typeof additions[key] === 'object' && mainObject[key] !== undefined && mainObject[key] !== null) { if (Array.isArray(additions[key])) { this.mergeArraysWithoutAssignment(mainObject[key], (additions as any)[key]); } else { this.mergeObjectsWithoutAssignment(mainObject[key], additions[key]); } } else { (mainObject as any)[key] = additions[key]; } } } } static getDefaultNodeInfo(options: Partial<CRM.NodeInfo> = {}): CRM.NodeInfo { const defaultNodeInfo: Partial<CRM.NodeInfo> = { permissions: [], installDate: new Date().toLocaleDateString(), lastUpdatedAt: Date.now(), version: '1.0', isRoot: false, source: 'local' }; return this.mergeObjects(defaultNodeInfo, options) as CRM.NodeInfo; }; /** * Gets the default link node object with given options applied */ static getDefaultLinkNode(options: Partial<CRM.LinkNode> = {}): CRM.LinkNode { const defaultNode: Partial<CRM.LinkNode> = { name: this.parent().___(I18NKeys.crm.exampleLinkName), onContentTypes: [true, true, true, false, false, false], type: 'link', showOnSpecified: false, nodeInfo: this.getDefaultNodeInfo(options.nodeInfo), triggers: [{ url: '*://*.example.com/*', not: false }], isLocal: true, value: [ { newTab: true, url: 'https://www.example.com' } ] }; return this.mergeObjects(defaultNode, options) as CRM.LinkNode; }; /** * Gets the default stylesheet value object with given options applied */ static getDefaultStylesheetValue(options: Partial<CRM.StylesheetVal> = {}): CRM.StylesheetVal { const value: CRM.StylesheetVal = { stylesheet: [].join('\n'), launchMode: CRMLaunchModes.RUN_ON_CLICKING, toggle: false, defaultOn: false, options: {}, convertedStylesheet: null }; return this.mergeObjects(value, options) as CRM.StylesheetVal; }; /** * Gets the default script value object with given options applied */ static getDefaultScriptValue(options: Partial<CRM.ScriptVal> = {}): CRM.ScriptVal { const value: CRM.ScriptVal = { launchMode: CRMLaunchModes.RUN_ON_CLICKING, backgroundLibraries: [], libraries: [], script: [].join('\n'), backgroundScript: '', metaTags: {}, options: {}, ts: { enabled: false, backgroundScript: {}, script: {} } }; return this.mergeObjects(value, options) as CRM.ScriptVal; }; /** * Gets the default script node object with given options applied */ static getDefaultScriptNode(options: CRM.PartialScriptNode = {}): CRM.ScriptNode { const defaultNode: CRM.PartialScriptNode = { name: this.parent().___(I18NKeys.crm.exampleScriptName), onContentTypes: [true, true, true, false, false, false], type: 'script', isLocal: true, nodeInfo: this.getDefaultNodeInfo(options.nodeInfo), triggers: [ { url: '*://*.example.com/*', not: false } ], value: this.getDefaultScriptValue(options.value) }; return this.mergeObjects(defaultNode, options) as CRM.ScriptNode; }; /** * Gets the default stylesheet node object with given options applied */ static getDefaultStylesheetNode(options: CRM.PartialStylesheetNode = {}): CRM.StylesheetNode { const defaultNode: CRM.PartialStylesheetNode = { name: this.parent().___(I18NKeys.crm.exampleStylesheetName), onContentTypes: [true, true, true, false, false, false], type: 'stylesheet', isLocal: true, nodeInfo: this.getDefaultNodeInfo(options.nodeInfo), triggers: [ { url: '*://*.example.com/*', not: false } ], value: this.getDefaultStylesheetValue(options.value) }; return this.mergeObjects(defaultNode, options) as CRM.StylesheetNode; }; /** * Gets the default divider or menu node object with given options applied */ static getDefaultDividerOrMenuNode(options: Partial<CRM.PassiveNode>, type: 'divider' | 'menu'): CRM.DividerNode | CRM.MenuNode; static getDefaultDividerOrMenuNode(options: Partial<CRM.PassiveNode>, type: 'divider'): CRM.DividerNode; static getDefaultDividerOrMenuNode(options: Partial<CRM.PassiveNode>, type: 'menu'): CRM.MenuNode; static getDefaultDividerOrMenuNode(options: Partial<CRM.PassiveNode> = {}, type: 'divider' | 'menu'): CRM.DividerNode | CRM.MenuNode { const defaultNode: Partial<CRM.PassiveNode> = { name: type === 'menu' ? this.parent().___(I18NKeys.crm.exampleMenuName) : this.parent().___(I18NKeys.crm.exampleDividerName), type: type, nodeInfo: this.getDefaultNodeInfo(options.nodeInfo), onContentTypes: [true, true, true, false, false, false], isLocal: true, value: null, children: type === 'menu' ? [] : undefined }; return this.mergeObjects(defaultNode, options) as CRM.DividerNode | CRM.MenuNode; }; /** * Gets the default divider node object with given options applied */ static getDefaultDividerNode(options: Partial<CRM.DividerNode> = {}): CRM.DividerNode { return this.getDefaultDividerOrMenuNode(options, 'divider'); }; /** * Gets the default menu node object with given options applied */ static getDefaultMenuNode(options: Partial<CRM.MenuNode> = {}): CRM.MenuNode { return this.getDefaultDividerOrMenuNode(options, 'menu'); }; /** * Gets the default node of given type */ static getDefaultNodeOfType(type: CRM.NodeType, options: Partial<CRM.Node> = {}): CRM.Node { switch (type) { case 'link': return this.getDefaultLinkNode(options as Partial<CRM.LinkNode>); case 'script': return this.getDefaultScriptNode(options as Partial<CRM.ScriptNode>); case 'divider': return this.getDefaultDividerNode(options as Partial<CRM.DividerNode>); case 'menu': return this.getDefaultMenuNode(options as Partial<CRM.MenuNode>); case 'stylesheet': return this.getDefaultStylesheetNode(options as Partial<CRM.StylesheetNode>); } } /** * Gets all permissions that can be requested by this extension */ static getPermissions(): CRM.Permission[] { return [ 'alarms', 'activeTab', 'background', 'bookmarks', 'browsingData', 'clipboardRead', 'clipboardWrite', 'contentSettings', 'cookies', 'contentSettings', 'contextMenus', 'declarativeContent', 'desktopCapture', 'downloads', 'history', 'identity', 'idle', 'management', 'pageCapture', 'power', 'privacy', 'printerProvider', 'sessions', 'system.cpu', 'system.memory', 'system.storage', 'topSites', 'tabs', 'tabCapture', 'tts', 'webNavigation', 'webRequest', 'webRequestBlocking' ]; }; /** * Gets all permissions that can be requested by this extension including those specific to scripts */ static getScriptPermissions(): CRM.Permission[] { return [ 'alarms', 'activeTab', 'background', 'bookmarks', 'browsingData', 'clipboardRead', 'clipboardWrite', 'contentSettings', 'cookies', 'contentSettings', 'contextMenus', 'declarativeContent', 'desktopCapture', 'downloads', 'history', 'identity', 'idle', 'management', 'pageCapture', 'power', 'privacy', 'printerProvider', 'sessions', 'system.cpu', 'system.memory', 'system.storage', 'topSites', 'tabs', 'tabCapture', 'tts', 'webNavigation', 'webRequest', 'webRequestBlocking', //Script-specific permissions 'crmGet', 'crmWrite', 'crmRun', 'crmContextmenu', 'chrome', 'browser', //GM_Permissions 'GM_info', 'GM_deleteValue', 'GM_getValue', 'GM_listValues', 'GM_setValue', 'GM_getResourceText', 'GM_getResourceURL', 'GM_addStyle', 'GM_log', 'GM_openInTab', 'GM_registerMenuCommand', 'GM_setClipboard', 'GM_xmlhttpRequest', 'unsafeWindow' ]; }; /** * Gets the description for given permission */ static getPermissionDescription(permission: CRM.Permission): string { const descriptions = { alarms: this.parent().___(I18NKeys.permissions.alarms), activeTab: this.parent().___(I18NKeys.permissions.activeTab), background: this.parent().___(I18NKeys.permissions.background), bookmarks: this.parent().___(I18NKeys.permissions.bookmarks), browsingData: this.parent().___(I18NKeys.permissions.browsingData), clipboardRead: this.parent().___(I18NKeys.permissions.clipboardRead), clipboardWrite: this.parent().___(I18NKeys.permissions.clipboardWrite), cookies: this.parent().___(I18NKeys.permissions.cookies), contentSettings: this.parent().___(I18NKeys.permissions.contentSettings), contextMenus: this.parent().___(I18NKeys.permissions.contextMenus), declarativeContent: this.parent().___(I18NKeys.permissions.declarativeContent), desktopCapture: this.parent().___(I18NKeys.permissions.desktopCapture), downloads: this.parent().___(I18NKeys.permissions.downloads), history: this.parent().___(I18NKeys.permissions.history), identity: this.parent().___(I18NKeys.permissions.identity), idle: this.parent().___(I18NKeys.permissions.idle), management: this.parent().___(I18NKeys.permissions.management), notifications: this.parent().___(I18NKeys.permissions.notifications), pageCapture: this.parent().___(I18NKeys.permissions.pageCapture), power: this.parent().___(I18NKeys.permissions.power), privacy: this.parent().___(I18NKeys.permissions.privacy), printerProvider: this.parent().___(I18NKeys.permissions.printerProvider), sessions: this.parent().___(I18NKeys.permissions.sessions), "system.cpu": this.parent().___(I18NKeys.permissions.systemcpu), "system.memory": this.parent().___(I18NKeys.permissions.systemmemory), "system.storage": this.parent().___(I18NKeys.permissions.systemstorage), topSites: this.parent().___(I18NKeys.permissions.topSites), tabCapture: this.parent().___(I18NKeys.permissions.tabCapture), tabs: this.parent().___(I18NKeys.permissions.tabs), tts: this.parent().___(I18NKeys.permissions.tts), webNavigation: this.parent().___(I18NKeys.permissions.webNavigation) + ' (https://developer.chrome.com/extensions/webNavigation)', webRequest: this.parent().___(I18NKeys.permissions.webRequest), webRequestBlocking: this.parent().___(I18NKeys.permissions.webRequestBlocking), //Script-specific descriptions crmGet: this.parent().___(I18NKeys.permissions.crmGet), crmWrite: this.parent().___(I18NKeys.permissions.crmWrite), crmRun: this.parent().___(I18NKeys.permissions.crmRun), crmContextmenu: this.parent().___(I18NKeys.permissions.crmContextmenu), chrome: this.parent().___(I18NKeys.permissions.chrome), browser: this.parent().___(I18NKeys.permissions.browser), //Tampermonkey APIs GM_addStyle: this.parent().___(I18NKeys.permissions.GMAddStyle), GM_deleteValue: this.parent().___(I18NKeys.permissions.GMDeleteValue), GM_listValues: this.parent().___(I18NKeys.permissions.GMListValues), GM_addValueChangeListener: this.parent().___(I18NKeys.permissions.GMAddValueChangeListener), GM_removeValueChangeListener: this.parent().___(I18NKeys.permissions.GMRemoveValueChangeListener), GM_setValue: this.parent().___(I18NKeys.permissions.GMSetValue), GM_getValue: this.parent().___(I18NKeys.permissions.GMGetValue), GM_log: this.parent().___(I18NKeys.permissions.GMLog), GM_getResourceText: this.parent().___(I18NKeys.permissions.GMGetResourceText), GM_getResourceURL: this.parent().___(I18NKeys.permissions.GMGetResourceURL), GM_registerMenuCommand: this.parent().___(I18NKeys.permissions.GMRegisterMenuCommand), GM_unregisterMenuCommand: this.parent().___(I18NKeys.permissions.GMUnregisterMenuCommand), GM_openInTab: this.parent().___(I18NKeys.permissions.GMOpenInTab), GM_xmlhttpRequest: this.parent().___(I18NKeys.permissions.GMXmlhttpRequest), GM_download: this.parent().___(I18NKeys.permissions.GMDownload), GM_getTab: this.parent().___(I18NKeys.permissions.GMGetTab), GM_saveTab: this.parent().___(I18NKeys.permissions.GMSaveTab), GM_getTabs: this.parent().___(I18NKeys.permissions.GMGetTabs), GM_notification: this.parent().___(I18NKeys.permissions.GMNotification), GM_setClipboard: this.parent().___(I18NKeys.permissions.GMSetClipboard), GM_info: this.parent().___(I18NKeys.permissions.GMInfo), unsafeWindow: this.parent().___(I18NKeys.permissions.unsafeWindow) }; return descriptions[permission as keyof typeof descriptions]; }; static parent(): CrmApp { return window.app; } }; /** * CRM functions. */ static crm = class CRMAppCRMFunctions { static getI18NNodeType(nodeType: CRM.NodeType) { switch (nodeType) { case 'link': return this._parent().___(I18NKeys.crm.link); case 'script': return this._parent().___(I18NKeys.crm.script); case 'stylesheet': return this._parent().___(I18NKeys.crm.stylesheet); case 'menu': return this._parent().___(I18NKeys.crm.menu); case 'divider': return this._parent().___(I18NKeys.crm.divider); } } static lookup(path: number[], returnArray?: boolean): CRM.Node | CRM.Node[]; static lookup(path: number[], returnArray: false): CRM.Node; static lookup(path: number[], returnArray: true): CRM.Node[]; static lookup(path: number[]): CRM.Node; static lookup(path: number[], returnArray: boolean = false): CRM.Node | CRM.Node[] { const pathCopy = JSON.parse(JSON.stringify(path)); if (returnArray) { pathCopy.splice(pathCopy.length - 1, 1); } if (path.length === 0) { return window.app.settings.crm; } if (path.length === 1) { return (returnArray ? window.app.settings.crm : window.app.settings.crm[path[0]]); } let currentTree = window.app.settings.crm; let currentItem: CRM.Node = null; let parent: CRM.Node[] = null; for (let i = 0; i < path.length; i++) { parent = currentTree; if (i !== path.length - 1) { currentTree = currentTree[path[i]].children as CRM.Node[]; } else { currentItem = currentTree[path[i]]; } } return (returnArray ? parent : currentItem); }; private static _lookupId(id: CRM.GenericNodeId, returnArray: boolean, node: CRM.Node): CRM.Node[] | CRM.Node | void; private static _lookupId(id: CRM.GenericNodeId, returnArray: false, node: CRM.Node): CRM.Node; private static _lookupId(id: CRM.GenericNodeId, returnArray: true, node: CRM.Node): CRM.Node[]; private static _lookupId(id: CRM.GenericNodeId, returnArray: boolean, node: CRM.Node): CRM.Node[] | CRM.Node | void { const nodeChildren = node.children; if (nodeChildren) { let el; for (let i = 0; i < nodeChildren.length; i++) { if (nodeChildren[i].id === id) { return (returnArray ? nodeChildren : node); } el = this._lookupId(id, returnArray, nodeChildren[i]); if (el) { return el; } } } return null; }; static lookupId(id: CRM.GenericNodeId, returnArray: boolean): CRM.Node[] | CRM.Node; static lookupId(id: CRM.GenericNodeId, returnArray: true): CRM.Node[]; static lookupId(id: CRM.GenericNodeId, returnArray: false): CRM.Node; static lookupId(id: CRM.GenericNodeId, returnArray: boolean): CRM.Node[] | CRM.Node { if (!returnArray) { return window.app.nodesById.get(id); } let el; for (let i = 0; i < window.app.settings.crm.length; i++) { if (window.app.settings.crm[i].id === id) { return window.app.settings.crm; } el = this._lookupId(id, returnArray, window.app.settings.crm[i]); if (el) { return el; } } return null; }; /** * Adds value to the CRM */ static add<T extends CRM.Node>(value: T, position: string = 'last') { if (position === 'first') { this._parent().settings.crm = this._parent().util.insertInto(value, this._parent().settings.crm, 0); } else if (position === 'last' || position === undefined) { this._parent().settings.crm[this._parent().settings.crm.length] = value; } else { this._parent().settings.crm = this._parent().util.insertInto(value, this._parent().settings.crm); } window.app.upload(); window.app.editCRM.build({ setItems: window.app.editCRM.setMenus }); }; /** * Moves a value in the CRM from one place to another */ static move(toMove: number[], target: number[]) { const toMoveContainer = this.lookup(toMove, true); let toMoveIndex = toMove[toMove.length - 1]; const toMoveItem = toMoveContainer[toMoveIndex]; const newTarget = this.lookup(target, true); const targetIndex = target[target.length - 1]; const sameColumn = toMoveContainer === newTarget; if (sameColumn && toMoveIndex > targetIndex) { this._parent().util.insertInto(toMoveItem, newTarget, targetIndex); toMoveContainer.splice((~~toMoveIndex) + 1, 1); } else { this._parent().util.insertInto(toMoveItem, newTarget, sameColumn ? targetIndex + 1 : targetIndex); toMoveContainer.splice(toMoveIndex, 1); } window.app.upload(); //Check if setMenus are still valid for (let i = 1; i <= window.app.editCRM.setMenus.length - 1; i++) { const lookedup = this.lookup(window.app.editCRM.setMenus.slice(0, i), false); if (!lookedup || lookedup.type !== 'menu') { window.app.editCRM.setMenus = [-1]; break; } } window.app.editCRM.build({ setItems: window.app.editCRM.setMenus, quick: true }); }; static buildNodePaths(tree: CRM.Tree, currentPath: number[] = []) { for (let i = 0; i < tree.length; i++) { const childPath = currentPath.concat([i]); const node = tree[i]; node.path = childPath; if (node.children) { this.buildNodePaths(node.children, childPath); } } }; private static _parent(): CrmApp { return window.app; } }; /** * Various util functions */ static util = class CRMAppUtil { static iteratePath<T>(e: { path: HTMLElement[]; }|{ Aa: HTMLElement[]; }|Polymer.CustomEvent, condition: (element: Polymer.PolymerElement|DocumentFragment|HTMLElement) => T): T { let index = 0; const path = this.getPath(e); let result: T = condition(path[index]); while (path[index + 1] && result === null) { result = condition(path[++index]); } return result; } static arraysOverlap<T>(arr1: T[], arr2: T[]): boolean { for (let i = 0; i < arr1.length; i++) { if (arr1[i] && arr2[i]) { return true; } } return false; } static wait(time: number) { return new Promise<void>((resolve) => { window.setTimeout(() => { resolve(null); }, time); }); } static createArray(length: number): void[] { const arr = []; for (let i = 0; i < length; i++) { arr[i] = undefined; } return arr; } static getChromeVersion() { return this.parent().getChromeVersion(); } static xhr(path: string, local: boolean): Promise<string> { return new Promise<string>((resolve, reject) => { const xhr: XMLHttpRequest = new window.XMLHttpRequest(); xhr.open('GET', local ? browserAPI.runtime.getURL(path) : path); xhr.onreadystatechange = () => { if (xhr.readyState === XMLHttpRequest.DONE) { if (xhr.status === 200) { resolve(xhr.responseText); } else { reject(xhr.status); } } } xhr.send(); }); } static showToast(text: string) { const toast = window.app.$.messageToast; toast.text = text; toast.show(); } static createElement<K extends keyof ElementTagNameMaps, T extends ElementTagNameMaps[K]>(tagName: K, options: { id?: string; classes?: string[]; props?: { [key: string]: string|number; } onclick?: (el: T, event: Event) => void; onhover?: (el: T, event: Event) => void; onblur?: (el: T, event: Event) => void; ref?: (el: T) => void; }, children: (any|string)[] = []): T { const el = document.createElement(tagName) as T; options.id && (el.id = options.id); options.classes && el.classList.add.apply(el.classList, options.classes); for (const key in options.props || {}) { el.setAttribute(key, options.props[key] + ''); } options.onclick && el.addEventListener('click', (e) => { options.onclick(el, e); }); options.onhover && el.addEventListener('mouseenter', (e) => { options.onhover(el, e); }); options.onblur && el.addEventListener('mouseleave', (e) => { options.onblur(el, e); }); options.ref && options.ref(el); for (const child of children) { if (typeof child === 'string') { (el as HTMLSpanElement).innerText = child; } else { el.appendChild(child); } } return el; } static createSVG<K extends keyof SVGElementTagNameMap, T extends SVGElementTagNameMap[K]>(tag: K, options: { id?: string; classes?: string[]; props?: { [key: string]: string; } }, children: any[] = []): T { const el = document.createElementNS('http://www.w3.org/2000/svg', tag); options.id && (el.id = options.id); options.classes && el.classList.add.apply(el.classList, options.classes); for (const key in options.props || {}) { el.setAttributeNS(null, key, options.props[key] + ''); } for (const child of children) { el.appendChild(child); } return el as T; } private static _toArray<T>(iterable: ArrayLike<T>): T[] { const arr = []; for (let i = 0; i < iterable.length; i++) { arr.push(iterable[i]); } return arr; } private static _generatePathFrom(element: HTMLElement): HTMLElement[] { const path = []; while (element) { path.push(element); element = element.parentElement; } path.push(document.documentElement, window); return path as HTMLElement[]; } static getPath(e: { path: HTMLElement[]; }|{ Aa: HTMLElement[]; }|{ target: HTMLElement; }|Polymer.CustomEvent) { if ('path' in e && e.path) { return this._toArray(e.path); } else if ('Aa' in e && e.Aa) { return this._toArray(e.Aa); } return this._generatePathFrom((e as { target: HTMLElement; }).target); } private static _dummy: HTMLElement = null; static getDummy(): HTMLElement { if (this._dummy) { return this._dummy; } this._dummy = document.createElement('div'); this.parent().appendChild(this._dummy); return this._dummy; } static findElementWithTagname<T extends keyof ElementTagNameMaps>(event: { path: HTMLElement[]; }|{ Aa: HTMLElement[]; }|Polymer.CustomEvent, tagName: T): ElementTagNameMaps[T] { return this.iteratePath(event, (node) => { if (node && 'tagName' in node && (node as Polymer.PolymerElement).tagName.toLowerCase() === tagName) { return node; } return null; }) as ElementTagNameMaps[T]; } static findElementWithClassName(event: { path: HTMLElement[]; }|{ Aa: HTMLElement[]; }|Polymer.CustomEvent, className: string): Polymer.PolymerElement { return this.iteratePath(event, (node) => { if (node && 'classList' in node && (node as Polymer.PolymerElement).classList.contains(className)) { return node; } return null; }) as Polymer.PolymerElement }; static findElementWithId(event: { path: HTMLElement[]; }|{ Aa: HTMLElement[]; }|Polymer.CustomEvent, id: string): Polymer.PolymerElement { return this.iteratePath(event, (node) => { if (node && 'id' in node && (node as Polymer.PolymerElement).id === id) { return node; } return null; }) as Polymer.PolymerElement; } /** * Inserts the value into given array */ static insertInto<T>(toAdd: T, target: T[], position: number = null): T[] { if (position !== null) { let temp1, i; let temp2 = toAdd; for (i = position; i < target.length; i++) { temp1 = target[i]; target[i] = temp2; temp2 = temp1; } target[i] = temp2; } else { target.push(toAdd); } return target; }; static compareObj(firstObj: { [key: string]: any; }, secondObj: { [key: string]: any; }): boolean { if (!secondObj) { return !firstObj; } if (!firstObj) { return false; } for (let key in firstObj) { if (firstObj.hasOwnProperty(key)) { if (typeof firstObj[key] === 'object') { if (typeof secondObj[key] !== 'object') { return false; } if (Array.isArray(firstObj[key])) { if (!Array.isArray(secondObj[key])) { return false; } // ReSharper disable once FunctionsUsedBeforeDeclared if (!this.compareArray(firstObj[key], secondObj[key])) { return false; } } else if (Array.isArray(secondObj[key])) { return false; } else { if (!this.compareObj(firstObj[key], secondObj[key])) { return false; } } } else if (firstObj[key] !== secondObj[key]) { return false; } } } return true; }; static compareArray(firstArray: any[], secondArray: any[]): boolean { if (!firstArray !== !secondArray) { return false; } else if (!firstArray || !secondArray) { return false; } const firstLength = firstArray.length; if (firstLength !== secondArray.length) { return false; } let i; for (i = 0; i < firstLength; i++) { if (typeof firstArray[i] === 'object') { if (typeof secondArray[i] !== 'object') { return false; } if (Array.isArray(firstArray[i])) { if (!Array.isArray(secondArray[i])) { return false; } if (!this.compareArray(firstArray[i], secondArray[i])) { return false; } } else if (Array.isArray(secondArray[i])) { return false; } else { if (!this.compareObj(firstArray[i], secondArray[i])) { return false; } } } else if (firstArray[i] !== secondArray[i]) { return false; } } return true; } static treeForEach(node: CRM.Node, fn: (node: CRM.Node) => any) { fn(node); if (node.children) { for (let i = 0; i < node.children.length; i++) { this.treeForEach(node.children[i], fn); } } } static crmForEach(tree: CRM.Node[], fn: (node: CRM.Node) => void): CRM.Tree { for (let i = 0; i < tree.length; i++) { const node = tree[i]; if (node.type === 'menu' && node.children) { this.crmForEach(node.children, fn); } fn(node); } return tree; }; static getQuerySlot() { return window.Polymer.PaperDropdownBehavior.querySlot; } static getDialog(): CodeEditBehaviorInstance { return this.parent().item.type === 'script' ? window.scriptEdit : window.stylesheetEdit; } static parent(): CrmApp { return window.app; } } static pageDemo = class CRMAppPageDemo { private static _active: boolean = false; private static _root: HTMLElement = null; private static _listeners: { event: string; handler: EventListener; }[] = []; private static _setContentTypeClasses(el: HTMLElement, node: CRM.Node) { const contentTypes = node.onContentTypes; for (let i = 0; i < contentTypes.length; i++) { contentTypes[i] && el.classList.add(`contentType${i}`); } } private static _editNodeFromClick(node: CRM.Node) { if (window.app.item) { window.app.$.messageToast.text = this.parent().___(I18NKeys.crmApp.code.alreadyEditingNode); window.app.$.messageToast.show(); } else { const elements = window.app.editCRM.shadowRoot.querySelectorAll('edit-crm-item'); for (let i = 0; i < elements.length; i++) { const element = elements[i]; if (element.item && element.item.id && element.item.id === node.id) { element.openEditPage(); break; } } } } private static _genDividerNode(node: CRM.DividerNode) { return this.parent().util.createElement('div', { classes: ['contextMenuDivider'], props: { title: node.name }, onclick: () => { if (window.app.storageLocal.editCRMInRM) { this._editNodeFromClick(node); } } }); } private static _genLinkNode(node: CRM.LinkNode) { return this.parent().util.createElement('div', { classes: ['contextMenuLink'], onclick: () => { if (window.app.storageLocal.editCRMInRM) { this._editNodeFromClick(node); } else { node.value.forEach((link) => { window.open(link.url, '_blank'); }); } }, props: { title: `Link node ${node.name}` } }, [ this.parent().util.createElement('div', { classes: ['contextMenuLinkText'] }, [node.name]) ]); } private static _genScriptNode(node: CRM.ScriptNode) { return this.parent().util.createElement('div', { classes: ['contextMenuScript'], onclick: () => { if (window.app.storageLocal.editCRMInRM) { this._editNodeFromClick(node); } else { window.app.$.messageToast.text = this.parent().___(I18NKeys.crmApp.code.wouldExecuteScript); window.app.$.messageToast.show(); } }, props: { title: `Script node ${node.name}` } }, [ this.parent().util.createElement('div', { classes: ['contextMenuScriptText'] }, [node.name]) ]); } private static _genStylesheetNode(node: CRM.StylesheetNode) { return this.parent().util.createElement('div', { classes: ['contextMenuStylesheet'], onclick: () => { if (window.app.storageLocal.editCRMInRM) { this._editNodeFromClick(node); } else { window.app.$.messageToast.text = this.parent().___(I18NKeys.crmApp.code.wouldExecuteStylesheet); window.app.$.messageToast.show(); } }, props: { title: `Stylesheet node ${node.name}` } }, [ this.parent().util.createElement('div', { classes: ['contextMenuStylesheetText'] }, [node.name]) ]); } private static _genMenuNode(node: CRM.MenuNode) { let timer: number = null; let thisEl: HTMLElement = null; let container: HTMLElement = null; let childrenContainer: HTMLElement = null; return this.parent().util.createElement('div', { classes: ['contextMenuMenu'], ref(el) { thisEl = el; }, onclick: () => { if (window.app.storageLocal.editCRMInRM) { this._editNodeFromClick(node); } else { window.app.$.messageToast.text = this.parent().___(I18NKeys.crmApp.code.wouldExecuteStylesheet); thisEl.parentElement.classList.add('forcedVisible'); timer && window.clearTimeout(timer); timer = window.setTimeout(() => { thisEl.parentElement.classList.remove('forcedVisible'); childrenContainer.classList.add('hidden'); container.classList.remove('hover'); timer = null; }, 3000); } }, onhover(_el, e) { if (!thisEl.parentElement.classList.contains('forcedVisible')) { childrenContainer.classList.remove('hidden'); container.classList.add('hover'); e.stopPropagation(); } }, onblur(_el, e) { if (!thisEl.parentElement.classList.contains('forcedVisible')) { childrenContainer.classList.add('hidden'); container.classList.remove('hover'); e.stopPropagation(); } }, props: { title: `Menu node ${node.name}` } }, [ this.parent().util.createElement('div', { classes: ['contextMenuMenuContainer'], ref(_container) { container = _container; } }, [ this.parent().util.createElement('div', { classes: ['contextMenuMenuText'] }, [node.name]), this.parent().util.createElement('div', { classes: ['contextMenuMenuArrow'] }, [ this.parent().util.createSVG('svg', { classes: ['contextMenuMenuArrowImage'], props: { width: '48', height: '48', viewBox: '0 0 48 48' } }, [ this.parent().util.createSVG('path', { props: { d: 'M16 10v28l22-14z' } }) ]) ]) ]), this.parent().util.createElement('div', { classes: ['contextMenuMenuSubmenu', 'contextMenuMenuChildren', 'hidden'], ref(el) { childrenContainer = el; } }, (node.children || []).map(childNode => this._genNode(childNode))) ]); } private static _genNodeElement(node: CRM.Node) { switch (node.type) { case 'divider': return this._genDividerNode(node); case 'link': return this._genLinkNode(node); case 'script': return this._genScriptNode(node); case 'stylesheet': return this._genStylesheetNode(node); case 'menu': return this._genMenuNode(node); } } private static _genNode(node: CRM.Node): HTMLElement { const el = this._genNodeElement(node); el.classList.add('contextMenuNode'); if (window.app.storageLocal.editCRMInRM) { el.classList.add('clickable'); } this._setContentTypeClasses(el, node); return el; } private static _genMenu(): HTMLElement { const root = document.createElement('div'); root.classList.add('contextMenuRoot', 'contextMenuMenuSubmenu', 'rootHidden'); const crm = window.app.settings.crm; for (const node of crm) { root.appendChild(this._genNode(node)); } return root; } private static _setAllContentTypeClasses(el: HTMLElement, op: 'add'|'remove') { const arr: [undefined, undefined, undefined, undefined, undefined, undefined] = [ undefined, undefined, undefined, undefined, undefined, undefined ]; el.classList[op](...arr.map((_item: any, i: number) => `hidden${i}`)); el.classList[op]('rootHidden'); } private static _setMenuPosition(menu: HTMLElement, e: Polymer.ClickEvent) { menu.style.left = `${e.clientX}px`; menu.classList.remove('rootHidden'); const bcr = menu.getBoundingClientRect(); menu.classList.add('rootHidden'); if (window.innerHeight > bcr.height + e.clientY) { menu.style.top = `${e.clientY}px`; } else { menu.style.top = `${e.clientY - bcr.height}px`; } } private static _showMenu(menu: HTMLElement, e: Polymer.ClickEvent) { this._setMenuPosition(menu, e); if (window.app.util.findElementWithId(e, 'mainCont')) { //Get the current content type for (let i = 0; i < 6; i++) { if (window.app.crmTypes[i]) { menu.classList.remove(`hidden${i}`); } } menu.classList.remove('rootHidden'); } else { this._setAllContentTypeClasses(menu, 'remove'); } } private static _listen(event: string, handler: EventListener) { window.addEventListener(event, handler); this._listeners.push({ event, handler }); } private static _setListeners(menu: HTMLElement) { this._listen('contextmenu', (e: MouseEvent) => { e.preventDefault(); this._showMenu(menu, e as any); }); this._listen('click', () => { this._setAllContentTypeClasses(menu, 'add'); }); this._listen('scroll', () => { this._setAllContentTypeClasses(menu, 'add'); }); } private static _unsetListeners() { this._listeners.forEach(({ event, handler }) => { window.removeEventListener(event, handler); }); } private static _enable() { this._root = this._genMenu(); this._setListeners(this._root); this._setAllContentTypeClasses(this._root, 'add'); document.body.appendChild(this._root); } private static _disable() { this._root.remove(); this._unsetListeners(); this._active = false; } static async create() { if (this._active) { this._disable(); } if (!window.app.storageLocal.CRMOnPage) { return; } await window.onExistsChain(window, 'app', 'settings', 'crm'); this._active = true; this._enable(); } static parent(): CrmApp { return window.app; } } }; export type CrmApp = Polymer.El<'crm-app', typeof CA & typeof crmAppProperties & { editCRM: EditCrm; }>; if (window.objectify) { window.register(CA); } else { window.addEventListener('RegisterReady', () => { window.register(CA); }); } } export type CrmApp = CRMAppElement.CrmApp;
the_stack
* DescribeRechargeRecords返回参数结构体 */ export interface DescribeRechargeRecordsResponse { /** * 账户类型 1:设备接入 2:云存 注意:此字段可能返回 null,表示取不到有效值。 */ AccountType?: number /** * 充值记录列表 注意:此字段可能返回 null,表示取不到有效值。 */ Records?: Array<RechargeRecord> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * UploadOtaVersion返回参数结构体 */ export interface UploadOtaVersionResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * CreateGencode返回参数结构体 */ export interface CreateGencodeResponse { /** * 生成的源代码(zip压缩后的base64编码) 注意:此字段可能返回 null,表示取不到有效值。 */ ZipCode?: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeLogs请求参数结构体 */ export interface DescribeLogsRequest { /** * 设备TID */ Tid: string /** * 当前分页的最大条数,0<取值范围<=100 */ Limit: number /** * 分页偏移量,取值范围>0 */ Offset: number /** * 日志类型 1.在线状态变更 2.ProConst变更 3.ProWritable变更 4.Action控制 5.ProReadonly变更 6.Event事件 */ LogType?: number /** * 查询的起始时间 UNIX时间戳,单位秒 */ StartTime?: number /** * 物模型对象索引,用于模糊查询,字符长度<=255,每层节点的字符长度<=16 */ DataObject?: string /** * 查询的结束时间 UNIX时间戳,单位秒 */ EndTime?: number } /** * CreateIotDataType请求参数结构体 */ export interface CreateIotDataTypeRequest { /** * 用户自定义数据类型,json格式的字符串 */ IotDataType: string } /** * DescribePubVersions请求参数结构体 */ export interface DescribePubVersionsRequest { /** * 产品ID */ ProductId: string } /** * RefundStorageService返回参数结构体 */ export interface RefundStorageServiceResponse { /** * 云存服务ID */ ServiceId: string /** * 云存服务所在的区域 */ StorageRegion: string /** * 设备TID */ Tid: string /** * 视频流通道号。(对于存在多路视频流的设备,如NVR设备,与设备实际视频流通道号对应) */ ChnNum: number /** * 终端用户在IoT Video平台的注册ID */ AccessId: string /** * 服务开始时间 */ StartTime: number /** * 服务失效时间 */ EndTime: number /** * 服务状态 1:正常使用中 2:待续费。设备云存服务已到期,但是历史云存数据未过期。续费后仍可查看这些历史数据。 3:已过期。查询不到设备保存在云端的数据。 4:等待服务生效。 */ Status: number /** * 有效云存定单列表 */ Data: Array<StorageOrder> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeIotModel返回参数结构体 */ export interface DescribeIotModelResponse { /** * 物模型定义,json格式的字符串 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DisableDevice返回参数结构体 */ export interface DisableDeviceResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeRunLog请求参数结构体 */ export interface DescribeRunLogRequest { /** * 设备TID */ Tid: string } /** * DescribeProducts请求参数结构体 */ export interface DescribeProductsRequest { /** * 分页大小,当前页面中显示的最大数量,值范围 1-100 */ Limit: number /** * 分页偏移,Offset从0开始 */ Offset: number /** * 产器型号(APP产品,为APP包名) */ ProductModel?: string /** * 开始时间 ,UNIX 时间戳,单位秒 */ StartTime?: number /** * 结束时间 ,UNIX 时间戳,单位秒 */ EndTime?: number } /** * DeleteTraceIds请求参数结构体 */ export interface DeleteTraceIdsRequest { /** * 设备TID列表 */ Tids: Array<string> } /** * 物模型历史版本 */ export interface IotModelData { /** * 版本号 */ Revision: number /** * 发布时间 */ ReleaseTime: number } /** * CreateStorageService请求参数结构体 */ export interface CreateStorageServiceRequest { /** * 云存套餐ID: yc1m3d : 全时3天存储月套餐。 yc1m7d : 全时7天存储月套餐。 yc1m30d :全时30天存储月套餐。 yc1y3d :全时3天存储年套餐。 yc1y7d :全时7天存储年套餐。 yc1y30d :全时30天存储年套餐。 ye1m3d :事件3天存储月套餐。 ye1m7d :事件7天存储月套餐。 ye1m30d :事件30天存储月套餐 。 ye1y3d :事件3天存储年套餐。 ye1y7d :事件7天存储年套餐。 ye1y30d :事件30天存储年套餐。 yc1w7d : 全时7天存储周套餐。 ye1w7d : 事件7天存储周套餐。 */ PkgId: string /** * 设备TID */ Tid: string /** * 订单数量,可一次性创建多个订单 */ OrderCount: number /** * 云存服务所在的区域,如ap-guangzhou,ap-singapore, na-siliconvalley, eu-frankfurt */ StorageRegion: string /** * 视频流通道号。(对于存在多路视频流的设备,如NVR设备,与设备实际视频流通道号对应) */ ChnNum?: number /** * 设备主人用户在IoT Video平台的注册ID。该参数用于验证Paas/Saas平台的设备/用户关系链是否一致 */ AccessId?: string /** * 服务生效时间,若不指定此参数,服务立即生效 */ EnableTime?: number } /** * DescribeOsList返回参数结构体 */ export interface DescribeOsListResponse { /** * 系统类型 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: SystemType /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * ModifyProduct返回参数结构体 */ export interface ModifyProductResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * ModifyProduct请求参数结构体 */ export interface ModifyProductRequest { /** * 产品ID */ ProductId: string /** * 产品名称 */ ProductName: string /** * 产品描述 */ ProductDescription: string /** * 主芯片产商ID */ ChipManufactureId?: string /** * 主芯片ID */ ChipId?: string } /** * DisableDeviceStream请求参数结构体 */ export interface DisableDeviceStreamRequest { /** * 设备TID列表 */ Tids: Array<string> } /** * CreateIotModel请求参数结构体 */ export interface CreateIotModelRequest { /** * 产品ID */ ProductId: string /** * 物模型json串 */ IotModel: string } /** * DescribeTraceStatus返回参数结构体 */ export interface DescribeTraceStatusResponse { /** * 设备追踪状态列表 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<TraceStatus> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * CreateUploadPath请求参数结构体 */ export interface CreateUploadPathRequest { /** * 产品ID */ ProductId: string /** * 固件文件名 */ FileName: string } /** * CreateDevToken请求参数结构体 */ export interface CreateDevTokenRequest { /** * 客户的终端用户在IoT Video上的唯一标识ID */ AccessId: string /** * 设备TID列表,0<元素数量<=100 */ Tids: Array<string> /** * Token的TTL(time to alive)分钟数 */ TtlMinutes: number } /** * DescribeStorageService返回参数结构体 */ export interface DescribeStorageServiceResponse { /** * 云存服务ID */ ServiceId: string /** * 云存服务所在的区域 */ StorageRegion: string /** * 设备TID */ Tid: string /** * 视频流通道号。(对于存在多路视频流的设备,如NVR设备,与设备实际视频流通道号对应) */ ChnNum: number /** * 终端用户在IoT Video平台的注册ID */ AccessId: string /** * 服务开始时间 */ StartTime: number /** * 服务失效时间 */ EndTime: number /** * 服务状态 1:正常使用中 2:待续费。设备云存服务已到期,但是历史云存数据未过期。续费后仍可查看这些历史数据。 3:已过期。查询不到设备保存在云端的数据。 4:等待服务生效。 */ Status: number /** * 云存定单列表 */ Data: Array<StorageOrder> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DeleteIotDataType请求参数结构体 */ export interface DeleteIotDataTypeRequest { /** * 自定义数据类型的标识符 */ TypeId: string } /** * DescribeRechargeRecords请求参数结构体 */ export interface DescribeRechargeRecordsRequest { /** * 账户类型 1:设备接入 2:云存。 */ AccountType: number /** * 从第几条记录开始显示, 默认值为0。 */ Offset?: number /** * 总共查询多少条记录,默认为值50。 */ Limit?: number } /** * ModifyVerContent返回参数结构体 */ export interface ModifyVerContentResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeMessageQueue请求参数结构体 */ export interface DescribeMessageQueueRequest { /** * 产品ID */ ProductId: string } /** * DeleteAppUsr返回参数结构体 */ export interface DeleteAppUsrResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeRegistrationStatus返回参数结构体 */ export interface DescribeRegistrationStatusResponse { /** * 终端用户注册状态列表 */ Data?: Array<RegisteredStatus> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeProduct请求参数结构体 */ export interface DescribeProductRequest { /** * 产品ID */ ProductId: string } /** * DescribeProducts返回参数结构体 */ export interface DescribeProductsResponse { /** * 产品详细信息列表 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<ProductData> /** * 产品总数 */ TotalCount?: number /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * 操作系统信息 */ export interface OsData { /** * 芯片型号 注意:此字段可能返回 null,表示取不到有效值。 */ ChipId: string /** * 芯片厂商 注意:此字段可能返回 null,表示取不到有效值。 */ ChipManufacture: string } /** * 设备证书及密钥 */ export interface DeviceCertificate { /** * 设备TID */ Tid: string /** * 设备初始证书信息,base64编码 */ Certificate: string /** * 设备私钥下载地址 */ WhiteBoxSoUrl: string } /** * UpgradeDevice请求参数结构体 */ export interface UpgradeDeviceRequest { /** * 设备TID */ Tid: string /** * 固件版本号 */ OtaVersion: string /** * 是否立即升级 */ UpgradeNow: boolean } /** * 设备日志信息 */ export interface LogData { /** * 发生时间 UNIX时间戳,单位秒 */ Occurtime: number /** * 日志类型 1在线状态变更 2FP变更 3SP变更 4CO控制 5ST变更 6EV事件 */ LogType: number /** * 物模型对象索引 注意:此字段可能返回 null,表示取不到有效值。 */ DataObject: string /** * 物模型旧值 json串 注意:此字段可能返回 null,表示取不到有效值。 */ OldValue: string /** * 物模型新值 json串 注意:此字段可能返回 null,表示取不到有效值。 */ NewValue: string } /** * DescribeModelDataRet返回参数结构体 */ export interface DescribeModelDataRetResponse { /** * 设备响应结果 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * CreateDevToken返回参数结构体 */ export interface CreateDevTokenResponse { /** * 返回的用户token列表 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<DevTokenInfo> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * RunIotModel返回参数结构体 */ export interface RunIotModelResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * RunIotModel请求参数结构体 */ export interface RunIotModelRequest { /** * 产品ID */ ProductId: string /** * 物模型定义,json格式的字符串 */ IotModel: string } /** * 产品发布过的全部版本 */ export interface OtaPubHistory { /** * 版本名称 */ OtaVersion: string /** * 发布时间,unix时间戳,单位:秒 */ PublishTime: number } /** * DeleteProduct请求参数结构体 */ export interface DeleteProductRequest { /** * 产品ID */ ProductId: string } /** * DescribeBindUsr返回参数结构体 */ export interface DescribeBindUsrResponse { /** * 具有绑定关系的终端用户信息列表 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<BindUsrInfo> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * 设备信息 */ export interface DeviceData { /** * 设备TID 注意:此字段可能返回 null,表示取不到有效值。 */ Tid: string /** * 激活时间 0代表未激活 注意:此字段可能返回 null,表示取不到有效值。 */ ActiveTime: number /** * 设备是否被禁用 注意:此字段可能返回 null,表示取不到有效值。 */ Disabled: boolean /** * 固件版本 注意:此字段可能返回 null,表示取不到有效值。 */ OtaVersion: string /** * 设备在线状态 注意:此字段可能返回 null,表示取不到有效值。 */ Online: number /** * 设备最后上线时间(mqtt连接成功时间),UNIX时间戳,单位秒 注意:此字段可能返回 null,表示取不到有效值。 */ LastOnlineTime: number /** * 物模型json数据 注意:此字段可能返回 null,表示取不到有效值。 */ IotModel: string /** * 设备名称 注意:此字段可能返回 null,表示取不到有效值。 */ DeviceName: string /** * 产品ID 注意:此字段可能返回 null,表示取不到有效值。 */ ProductId: string /** * 设备初始证书信息,base64编码 注意:此字段可能返回 null,表示取不到有效值。 */ Certificate: string /** * 设备私钥下载地址 注意:此字段可能返回 null,表示取不到有效值。 */ WhiteBoxSoUrl: string /** * 设备推流状态 注意:此字段可能返回 null,表示取不到有效值。 */ StreamStatus: boolean } /** * DescribeStream请求参数结构体 */ export interface DescribeStreamRequest { /** * 设备TID */ Tid?: string /** * 终端用户ID */ AccessId?: string /** * 直播协议, 可选值:RTSP、RTMP、HLS、HLS-fmp4 */ Protocol?: string /** * 音视频流地址 */ Address?: string /** * 设备访问token,访问用户未绑定的设备时,需提供该参数 */ AccessToken?: string } /** * 设备绑定的终端用户 */ export interface BindUsrInfo { /** * IotVideo平台分配给终端用户的用户id */ AccessId: string /** * 用户角色,owner:主人,guest:访客 */ Role: string } /** * DeleteOtaVersion请求参数结构体 */ export interface DeleteOtaVersionRequest { /** * 产品ID */ ProductId: string /** * 固件版本号,格式为x.y.z, x,y 范围0-63,z范围1~524288 */ OtaVersion: string /** * 操作人 */ Operator?: string } /** * DescribeIotDataType返回参数结构体 */ export interface DescribeIotDataTypeResponse { /** * 自定义数据类型,json格式的字符串 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<string> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * 产品信息 */ export interface ProductData { /** * 产品ID 注意:此字段可能返回 null,表示取不到有效值。 */ ProductId: string /** * 产品名称 注意:此字段可能返回 null,表示取不到有效值。 */ ProductName: string /** * 产品描述信息 注意:此字段可能返回 null,表示取不到有效值。 */ ProductDescription: string /** * 创建时间,UNIX 时间戳,单位秒 注意:此字段可能返回 null,表示取不到有效值。 */ CreateTime: number /** * 物模型发布版本号,0代表物模型尚未发布 注意:此字段可能返回 null,表示取不到有效值。 */ IotModelRevision: number /** * 产品密钥 注意:此字段可能返回 null,表示取不到有效值。 */ SecretKey: string /** * 设备功能码 注意:此字段可能返回 null,表示取不到有效值。 */ Features: Array<string> /** * 产器型号(APP产品,为APP包名) 注意:此字段可能返回 null,表示取不到有效值。 */ ProductModel: string /** * 主芯片厂商id 注意:此字段可能返回 null,表示取不到有效值。 */ ChipManufactureId: string /** * 主芯片型号 注意:此字段可能返回 null,表示取不到有效值。 */ ChipId: string /** * 产品类别,0:普通视频设备;1:NVR设备 注意:此字段可能返回 null,表示取不到有效值。 */ ProductCate: number /** * 产品地区 China-Mainland(中国大陆) China-Hong Kong, Macao and Taiwan(港澳台地区) America(美国) Europe(欧洲) India(印度) Other-Overseas(其他境外地区) 注意:此字段可能返回 null,表示取不到有效值。 */ ProductRegion: string /** * 接入模型,bit0是0:公版小程序未接入,bit0是1:公版小程序已接入 注意:此字段可能返回 null,表示取不到有效值。 */ AccessMode: number /** * linux,android,liteos 注意:此字段可能返回 null,表示取不到有效值。 */ Os: string } /** * CreateBinding请求参数结构体 */ export interface CreateBindingRequest { /** * 终端用户在IoT Video上的唯一标识ID */ AccessId: string /** * 设备TID */ Tid: string /** * 用户角色,owner:主人,guest:访客 */ Role: string /** * 是否踢掉之前的主人,true:踢掉;false:不踢掉。当role为guest时,可以不填 */ ForceBind?: boolean /** * 设备昵称,最多不超过64个字符 */ Nick?: string /** * 绑定过程中的会话token,由设备通过SDK接口确认是否允许绑定的token,用于增加设备被绑定的安全性 */ BindToken?: string } /** * DeleteDevice请求参数结构体 */ export interface DeleteDeviceRequest { /** * 设备TID列表 */ Tids: Array<string> } /** * 布尔值,标识指定设备是否在白名单中 */ export interface TraceStatus { /** * 设备TID */ Tid: string /** * 设备追踪状态 */ IsExist: boolean } /** * CreateAppUsr请求参数结构体 */ export interface CreateAppUsrRequest { /** * 标识用户的唯一ID,防止同一个用户多次注册 */ CunionId: string /** * 用于小程序关联手机号 */ Mobile?: string } /** * ModifyDeviceProperty请求参数结构体 */ export interface ModifyDevicePropertyRequest { /** * 设备TID */ Tid: string /** * 如果设备处于休眠状态,是否唤醒设备 */ Wakeup: boolean /** * 物模型的分支路径 */ Branch: string /** * 写入的物模型数据,如果是json需要转义成字符串 */ Value: string /** * Value字段是否为数值(float、int) */ IsNum?: boolean } /** * RunDeviceStream请求参数结构体 */ export interface RunDeviceStreamRequest { /** * 设备TID 列表 */ Tids: Array<string> } /** * RunDevice返回参数结构体 */ export interface RunDeviceResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DeleteIotDataType返回参数结构体 */ export interface DeleteIotDataTypeResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeTraceStatus请求参数结构体 */ export interface DescribeTraceStatusRequest { /** * 设备TID列表 */ Tids: Array<string> } /** * CreateUsrToken返回参数结构体 */ export interface CreateUsrTokenResponse { /** * 终端用户在IoT Video上的唯一标识ID */ AccessId: string /** * IoT Video平台的AccessToken */ AccessToken: string /** * Token的过期时间,单位秒(UTC时间) */ ExpireTime: number /** * 终端ID */ TerminalId: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * 用于终端用户临时访问设备的token授权信息 */ export interface DevTokenInfo { /** * 客户的终端用户在IotVideo上的唯一标识id */ AccessId: string /** * 设备TID */ Tid: string /** * IotVideo平台的accessToken */ AccessToken: string /** * Token的过期时间,单位秒(UTC时间) */ ExpireTime: number } /** * CreateTraceIds请求参数结构体 */ export interface CreateTraceIdsRequest { /** * 设备TID列表 */ Tids: Array<string> } /** * DeleteMessageQueue返回参数结构体 */ export interface DeleteMessageQueueResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeOtaVersions请求参数结构体 */ export interface DescribeOtaVersionsRequest { /** * 分页偏移量 */ Offset: number /** * 每页数量,0<取值范围<=100 */ Limit: number /** * 产品ID,为空时查询客户所有产品的版本信息 */ ProductId?: string /** * 版本号,支持模糊匹配 */ OtaVersion?: string /** * 版本类型 1未发布 2测试发布 3正式发布 4禁用 */ PubStatus?: number } /** * DescribeTraceIds返回参数结构体 */ export interface DescribeTraceIdsResponse { /** * 设备TID列表,列表元素之间以“,”分隔 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * ClearDeviceActiveCode返回参数结构体 */ export interface ClearDeviceActiveCodeResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * CreateProduct返回参数结构体 */ export interface CreateProductResponse { /** * 产品详细信息 */ Data?: ProductBase /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeRegistrationStatus请求参数结构体 */ export interface DescribeRegistrationStatusRequest { /** * 终端用户的唯一ID列表,0<元素数量<=100 */ CunionIds: Array<string> } /** * CreateStorage请求参数结构体 */ export interface CreateStorageRequest { /** * 云存套餐ID */ PkgId: string /** * 设备TID */ Tid: string /** * 用户唯一标识,由厂商保证内部唯一性 */ UserTag: string } /** * UpgradeDevice返回参数结构体 */ export interface UpgradeDeviceResponse { /** * 设备端返回的数据 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DisableOtaVersion返回参数结构体 */ export interface DisableOtaVersionResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeBindDev返回参数结构体 */ export interface DescribeBindDevResponse { /** * 绑定的设备列表信息 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<BindDevInfo> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * CreateProduct请求参数结构体 */ export interface CreateProductRequest { /** * 产器型号(APP产品,为APP包名) */ ProductModel: string /** * 产品名称 仅支持中文、英文、数字、下划线,不超过32个字符 */ ProductName: string /** * 产品描述信息 不支持单引号、双引号、退格符、回车符、换行符、制表符、反斜杠、下划线、“%”、“#”、“$”,不超过128字符 */ ProductDescription: string /** * 设备功能码(ypsxth:音频双向通话 ,spdxth:视频单向通话) */ Features?: Array<string> /** * 主芯片产商ID */ ChipManufactureId?: string /** * 主芯片ID */ ChipId?: string /** * 地域: China-Mainland(中国大陆) China-Hong Kong, Macao and Taiwan(港澳台地区) America(美国) Europe(欧洲) India(印度) Other-Overseas(其他境外地区) */ ProductRegion?: string /** * 设备类型, 0-普通视频设备,1-NVR设备 */ ProductCate?: number /** * 接入模型,bit0是0:公版小程序未接入,bit0是1:公版小程序已接入 */ AccessMode?: number /** * Linux,Android,Liteos等系统 */ Os?: string /** * 芯片架构,只是针对操作系统为android的 */ ChipArch?: string } /** * 系统类型 */ export interface SystemType { /** * 安卓系统 注意:此字段可能返回 null,表示取不到有效值。 */ Android: Array<OsData> /** * linux系统 注意:此字段可能返回 null,表示取不到有效值。 */ Linux: Array<OsData> /** * LiteOs系统 注意:此字段可能返回 null,表示取不到有效值。 */ LiteOs: Array<OsData> } /** * RunTestOtaVersion返回参数结构体 */ export interface RunTestOtaVersionResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * RunTestOtaVersion请求参数结构体 */ export interface RunTestOtaVersionRequest { /** * 产品ID */ ProductId: string /** * 固件版本号,格式为x.y.z, x,y 范围0-63,z范围1~524288 */ OtaVersion: string /** * 指定可升级的设备TID */ Tids: Array<string> /** * 操作人 */ Operator?: string /** * 备注信息 */ Remark?: string } /** * DescribeDevice返回参数结构体 */ export interface DescribeDeviceResponse { /** * 设备信息 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: DeviceData /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * SetMessageQueue返回参数结构体 */ export interface SetMessageQueueResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * 版本发布的描述信息,需要国际化,可以为空 */ export interface Contents { /** * 英文,长度不超过300个字符 注意:此字段可能返回 null,表示取不到有效值。 */ En?: string /** * 中文简体,长度不超过300个字符 注意:此字段可能返回 null,表示取不到有效值。 */ Cn?: string /** * 中文繁体(Traditional Chinese),长度不超过300个字符 注意:此字段可能返回 null,表示取不到有效值。 */ Tc?: string /** * 默认语言,最多不超过300个字符 注意:此字段可能返回 null,表示取不到有效值。 */ Default?: string } /** * DeleteOtaVersion返回参数结构体 */ export interface DeleteOtaVersionResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * UploadOtaVersion请求参数结构体 */ export interface UploadOtaVersionRequest { /** * 产品ID */ ProductId: string /** * 固件版本号,格式为x.y.z, x,y 范围0-63,z范围1~524288 */ OtaVersion: string /** * 固件版本URL */ VersionUrl: string /** * 文件大小,单位:byte */ FileSize?: number /** * 文件md5校验码(32字符) */ Md5?: string /** * 操作人 */ Operator?: string /** * 备注信息 */ Remark?: string /** * 版本发布的描述信息,需要国际化,可以为空 */ Contents?: Contents } /** * DescribeIotModels请求参数结构体 */ export interface DescribeIotModelsRequest { /** * 产品ID */ ProductId: string } /** * DescribeModelDataRet请求参数结构体 */ export interface DescribeModelDataRetRequest { /** * 任务ID */ TaskId: string } /** * CreateTraceIds返回参数结构体 */ export interface CreateTraceIdsResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeDevices返回参数结构体 */ export interface DescribeDevicesResponse { /** * 设备信息 列表 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<DevicesData> /** * 设备总数 */ TotalCount?: number /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * ModifyVerContent请求参数结构体 */ export interface ModifyVerContentRequest { /** * 产品id */ ProductId: string /** * 需要修改的版本号 */ OtaVersion: string /** * 操作人,字符长度<=64 */ Operator?: string /** * 备注信息 */ Remark?: string /** * 版本发布的描述信息,需要国际化,可以为空 */ Contents?: Contents } /** * DeleteBinding请求参数结构体 */ export interface DeleteBindingRequest { /** * 终端用户在IoT Video上的唯一标识ID */ AccessId: string /** * 设备TID */ Tid: string /** * 用户角色,owner:主人,guest:访客 */ Role: string } /** * DescribeOtaVersions返回参数结构体 */ export interface DescribeOtaVersionsResponse { /** * 版本数量 */ TotalCount?: number /** * 版本详细信息 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<VersionData> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DeleteProduct返回参数结构体 */ export interface DeleteProductResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * RunOtaVersion请求参数结构体 */ export interface RunOtaVersionRequest { /** * 产品ID */ ProductId: string /** * 固件版本号,格式为x.y.z, x,y 范围0-63,z范围1~524288 */ OtaVersion: string /** * 灰度值,取值范围0-100,为0时相当于暂停发布 */ GrayValue: number /** * 指定的旧版本 */ OldVersions?: Array<string> /** * 操作人 */ Operator?: string /** * 备注信息 */ Remark?: string /** * 版本发布的描述信息,需要国际化,可以为空 */ Contents?: Contents } /** * CreateDevices返回参数结构体 */ export interface CreateDevicesResponse { /** * 新创建设备的认证信息 */ Data?: Array<DeviceCertificate> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * 设备物模型数据 */ export interface DeviceModelData { /** * 设备TID */ Tid: string /** * 物模型分支路径 注意:此字段可能返回 null,表示取不到有效值。 */ Branch: string /** * 物模型数据 注意:此字段可能返回 null,表示取不到有效值。 */ IotModel: string } /** * SetMessageQueue请求参数结构体 */ export interface SetMessageQueueRequest { /** * 产品ID */ ProductId: string /** * 消息队列类型 1-CMQ; 2-Ckafka */ MsgQueueType: number /** * 消息类型,整型值(0-31)之间以“,”分隔 0.设备在线状态变更 1.常亮属性(ProConst)变更 2.可写属性(ProWritable)变更 3.只读属性(ProReadonly)变更 4.设备控制(Action) 5.设备事件(Event) 6.系统事件(System) */ MsgType: string /** * 消息队列主题,不超过32字符 */ Topic: string /** * kafka消息队列的实例名,不超过64字符 */ Instance: string /** * 消息地域,不超过32字符 */ MsgRegion: string } /** * CreateStorageService返回参数结构体 */ export interface CreateStorageServiceResponse { /** * 标志是否为续订 */ IsRenew: boolean /** * 云存服务ID */ ServiceId: string /** * 云存服务所在的区域 */ StorageRegion: string /** * 设备TID */ Tid: string /** * 视频流通道号。(对于存在多路视频流的设备,如NVR设备,与设备实际视频流通道号对应) */ ChnNum: number /** * 终端用户在IoT Video平台的注册ID */ AccessId: string /** * 服务开始时间 */ StartTime: number /** * 服务失效时间 */ EndTime: number /** * 服务状态 1:正常使用中 2:待续费。设备云存服务已到期,但是历史云存数据未过期。续费后仍可查看这些历史数据。 3:已过期。查询不到设备保存在云端的数据。 4:等待服务生效。 */ Status: number /** * 新增的云存定单列表 */ Data: Array<StorageOrder> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeIotDataType请求参数结构体 */ export interface DescribeIotDataTypeRequest { /** * 自定义数据类型的标识符,为空则返回全量自定义类型的列表 */ TypeId?: string } /** * SendOnlineMsg请求参数结构体 */ export interface SendOnlineMsgRequest { /** * 设备TID */ Tid: string /** * 如果设备处于休眠状态,是否唤醒设备 */ Wakeup: boolean /** * 等待回应类型 0:不等待设备回应直接响应请求; 1:要求设备确认消息已接收,或等待超时后返回; 2:要求设备进行响应处理,收到设备的响应数据后,将设备响应数据回应给请求方; */ WaitResp: number /** * 消息主题 */ MsgTopic: string /** * 消息内容,最大长度不超过8k字节 */ MsgContent: string } /** * 接口DescribeStream输出参数 */ export interface Data { /** * 直播协议 注意:此字段可能返回 null,表示取不到有效值。 */ Protocol: string /** * 流媒体播放地址 注意:此字段可能返回 null,表示取不到有效值。 */ URI: string /** * 流媒体地址过期时间 注意:此字段可能返回 null,表示取不到有效值。 */ ExpireTime: number /** * 视频编码 注意:此字段可能返回 null,表示取不到有效值。 */ VideoCodec: string /** * 音频编码 注意:此字段可能返回 null,表示取不到有效值。 */ AudioCodec: string } /** * DescribeDevice请求参数结构体 */ export interface DescribeDeviceRequest { /** * 设备TID */ Tid: string } /** * DescribeRunLog返回参数结构体 */ export interface DescribeRunLogResponse { /** * 设备运行日志文本信息 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DeliverStorageService返回参数结构体 */ export interface DeliverStorageServiceResponse { /** * 被转出的云存服务ID */ SrcServiceId: string /** * 被转入的云存服务ID */ ServiceId: string /** * 云存服务所在的区域 */ StorageRegion: string /** * 设备TID */ Tid: string /** * 视频流通道号。(对于存在多路视频流的设备,如NVR设备,与设备实际视频流通道号对应) */ ChnNum: number /** * 终端用户在IoT Video平台的注册ID */ AccessId: string /** * 服务开始时间 */ StartTime: number /** * 服务失效时间 */ EndTime: number /** * 服务状态 1:正常使用中 2:待续费。设备云存服务已到期,但是历史云存数据未过期。续费后仍可查看这些历史数据。 3:已过期。查询不到设备保存在云端的数据。 4:等待服务生效。 */ Status: number /** * 新增的云存定单列表 */ Data: Array<StorageOrder> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeIotModel请求参数结构体 */ export interface DescribeIotModelRequest { /** * 产品ID */ ProductId: string /** * 物模型版本号, -1表示最新编辑的(未发布) */ Revision: number } /** * DescribeTraceIds请求参数结构体 */ export type DescribeTraceIdsRequest = null /** * CreateAppUsr返回参数结构体 */ export interface CreateAppUsrResponse { /** * 厂商云标识用户的唯一ID */ CunionId?: string /** * 客户的终端用户在IoT Video上的唯一标识ID */ AccessId?: string /** * 用户是否为新创建 */ NewRegist?: boolean /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DeleteTraceIds返回参数结构体 */ export interface DeleteTraceIdsResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DeleteMessageQueue请求参数结构体 */ export interface DeleteMessageQueueRequest { /** * 产品ID */ ProductId: string } /** * CreateUsrToken请求参数结构体 */ export interface CreateUsrTokenRequest { /** * 终端用户在IoT Video上的唯一标识ID */ AccessId: string /** * 终端唯一ID,用于区分同一个用户的多个终端 */ UniqueId: string /** * Token的TTL(time to alive)分钟数 */ TtlMinutes: number /** * 旧的AccessToken。续期Token时,此参数为必须。 */ OldAccessToken?: string } /** * RunDevice请求参数结构体 */ export interface RunDeviceRequest { /** * TID列表 ≤100 */ Tids: Array<string> } /** * RunOtaVersion返回参数结构体 */ export interface RunOtaVersionResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * ModifyDevice请求参数结构体 */ export interface ModifyDeviceRequest { /** * 设备ID */ Tid: string /** * 用户ID */ AccessId: string /** * 设备昵称,最多不超过64个字符 */ Nick: string } /** * DescribeDeviceModel返回参数结构体 */ export interface DescribeDeviceModelResponse { /** * 设备物模型信息 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: DeviceModelData /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeStorageService请求参数结构体 */ export interface DescribeStorageServiceRequest { /** * 云存服务ID */ ServiceId: string /** * 是否返回已结束的订单信息(已过期/已退订/已转移) */ GetFinishedOrder?: boolean } /** * 产品信息摘要 */ export interface ProductBase { /** * 产品ID */ ProductId: string /** * 产器型号(APP产品,为APP包名) */ ProductModel: string /** * 产品名称 */ ProductName: string /** * 产品描述信息 */ ProductDescription: string /** * 创建时间,UNIX 时间戳,单位秒 */ CreateTime: number /** * 物模型发布版本号,0代表物模型尚未发布 */ IotModelRevision: number /** * 产品密钥 */ SecretKey: string /** * 设备功能码 ypsxth : 音频双向通话; spdxth : 视频单向通话(监控); NVR0824 : NVR设备,大于8路,小于等于24路; WifiKeepalive : Wifi保活(低功耗产品); Alexa : Alexa接入; Google : Google接入; 注意:此字段可能返回 null,表示取不到有效值。 */ FuncCode: Array<string> /** * 产品类别,0 : 普通视频设备;1 : NVR设备 注意:此字段可能返回 null,表示取不到有效值。 */ ProductCate: number /** * 产品地域 China-Mainland(中国大陆) China-Hong Kong, Macao and Taiwan(港澳台地区) America(美国) Europe(欧洲) India(印度) Other-Overseas(其他境外地区) 注意:此字段可能返回 null,表示取不到有效值。 */ ProductRegion: string } /** * CreateGencode请求参数结构体 */ export interface CreateGencodeRequest { /** * 产品ID */ ProductId: string /** * 物模型发布版本号,-1代表未发布的,保存的是草稿箱的版本。1代表已发布的物模型。 */ Revision: number } /** * DescribePubVersions返回参数结构体 */ export interface DescribePubVersionsResponse { /** * 历史发布的版本列表 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<OtaPubHistory> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * ClearDeviceActiveCode请求参数结构体 */ export interface ClearDeviceActiveCodeRequest { /** * 设备TID列表,0<元素数量<=100 */ Tids: Array<string> } /** * ModifyDeviceAction请求参数结构体 */ export interface ModifyDeviceActionRequest { /** * 设备TID */ Tid: string /** * 如果设备处于休眠状态,是否唤醒设备 */ Wakeup: boolean /** * 物模型的分支路径 */ Branch: string /** * 写入的物模型数据,如果是json需要转义成字符串 */ Value: string /** * Value字段的类型是否为数值(float、int) */ IsNum?: boolean } /** * CreateIotModel返回参数结构体 */ export interface CreateIotModelResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * 终端用户绑定的设备 */ export interface BindDevInfo { /** * 设备TID */ Tid: string /** * 设备名称 */ DeviceName: string /** * 设备型号 注意:此字段可能返回 null,表示取不到有效值。 */ DeviceModel: string /** * 用户角色,owner:主人,guest:访客 */ Role: string } /** * DescribeBindDev请求参数结构体 */ export interface DescribeBindDevRequest { /** * 终端用户在IoT Video上的唯一标识ID */ AccessId: string } /** * 云存订单信息 */ export interface StorageOrder { /** * 定单唯一性ID */ OrderId: string /** * 云存套餐ID */ PkgId: string /** * 定单服务状态 1;订单正在使用。 2:订单未开始。 3:订单已经使用过,现在暂时未开始使用(该订单从其他服务转移而来)。 4:订单已过期。 5:订单已被退订。 6:定单已被转移到其他云存服务。 */ Status: number /** * 定单服务生效时间 */ StartTime: number /** * 定单服务失效时间 */ EndTime: number } /** * 产品转发消息队列配置 */ export interface MsgQueueData { /** * 消息队列类型 1:CMQ 2:kafka */ MsgQueueType: number /** * 消息类型列表,整型值(0-31)之间以“,”分隔 */ MsgType: string /** * 主题名称 */ Topic: string /** * 实例名称 */ Instance: string /** * 消息地域 */ MsgRegion: string } /** * 终端用户注册状态 */ export interface RegisteredStatus { /** * 终端用户的唯一ID */ CunionId: string /** * 注册状态 */ IsRegisted: boolean } /** * CreateAnonymousAccessToken请求参数结构体 */ export interface CreateAnonymousAccessTokenRequest { /** * Token的TTL(time to alive)分钟数,最大值1440(即24小时) */ TtlMinutes: number /** * 设备ID。创建Token时, 此参数为必须项 */ Tid?: string /** * 旧的AccessToken。续期Token时,此参数为必须 */ OldAccessToken?: string } /** * DeliverStorageService请求参数结构体 */ export interface DeliverStorageServiceRequest { /** * 待转移的源云存服务ID */ SrcServiceId: string /** * 设备TID */ Tid: string /** * 视频流通道号。(对于存在多路视频流的设备,如NVR设备,与设备实际视频流通道号对应) */ ChnNum?: number /** * 设备主人用户在IoT Video平台的注册ID。该参数用于验证Paas/Saas平台的设备/用户关系链是否一致 */ AccessId?: string } /** * RunDeviceStream返回参数结构体 */ export interface RunDeviceStreamResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * ModifyDevice返回参数结构体 */ export interface ModifyDeviceResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeDeviceModel请求参数结构体 */ export interface DescribeDeviceModelRequest { /** * 设备TID */ Tid: string /** * 物模型的分支路径 */ Branch?: string } /** * DescribeLogs返回参数结构体 */ export interface DescribeLogsResponse { /** * 设备日志信息 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<LogData> /** * Data数组所包含的信息条数 */ TotalCount?: number /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * ModifyDeviceAction返回参数结构体 */ export interface ModifyDeviceActionResponse { /** * 设备端的响应结果 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: string /** * 任务ID 若设备端未能及时响应时,会返回此字段,用户可以通过DescribeModelDataRet获取设备的最终响应结果。 注意:此字段可能返回 null,表示取不到有效值。 */ TaskId?: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * CreateBinding返回参数结构体 */ export interface CreateBindingResponse { /** * 访问设备的AccessToken */ AccessToken: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeAccountBalance请求参数结构体 */ export interface DescribeAccountBalanceRequest { /** * 账户类型 1:设备接入 2:云存 */ AccountType: number } /** * CreateDevices请求参数结构体 */ export interface CreateDevicesRequest { /** * 产品ID */ ProductId: string /** * 创建设备的数量,数量范围1-100 */ Number: number /** * 设备名称前缀,支持英文、数字,不超过10字符 */ NamePrefix?: string /** * 操作人 */ Operator?: string } /** * DisableDevice请求参数结构体 */ export interface DisableDeviceRequest { /** * 设备TID ≤100 */ Tids: Array<string> } /** * DescribeStream返回参数结构体 */ export interface DescribeStreamResponse { /** * 返回参数结构 注意:此字段可能返回 null,表示取不到有效值。 */ Data: Data /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeAccountBalance返回参数结构体 */ export interface DescribeAccountBalanceResponse { /** * 账户类型 1=设备接入;2=云存。 注意:此字段可能返回 null,表示取不到有效值。 */ AccountType?: number /** * 余额, 单位 : 分(人民币)。 注意:此字段可能返回 null,表示取不到有效值。 */ Balance?: number /** * 账户状态,1=正常;8=冻结;9=销户。 注意:此字段可能返回 null,表示取不到有效值。 */ State?: number /** * 最后修改时间,UTC值。 注意:此字段可能返回 null,表示取不到有效值。 */ LastUpdateTime?: number /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DisableOtaVersion请求参数结构体 */ export interface DisableOtaVersionRequest { /** * 产品ID */ ProductId: string /** * 固件版本号,格式为x.y.z, x,y 范围0-63,z范围1~524288 */ OtaVersion: string /** * 操作人 */ Operator?: string } /** * DescribeProduct返回参数结构体 */ export interface DescribeProductResponse { /** * 产品详情 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: ProductData /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * CreateAnonymousAccessToken返回参数结构体 */ export interface CreateAnonymousAccessTokenResponse { /** * 终端用户在IoT Video上的唯一标识ID */ AccessId?: string /** * IoT Video平台的AccessToken */ AccessToken?: string /** * Token的过期时间,单位秒(UTC时间) */ ExpireTime?: number /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeBindUsr请求参数结构体 */ export interface DescribeBindUsrRequest { /** * 设备TID */ Tid: string /** * 设备主人的AccessId */ AccessId?: string } /** * SendOnlineMsg返回参数结构体 */ export interface SendOnlineMsgResponse { /** * 若返回此项则表明需要用户用此taskID进行查询请求是否成功(只有waitresp不等于0的情况下才可能会返回该taskID项) */ TaskId?: string /** * 设备响应信息 */ Data?: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DeleteAppUsr请求参数结构体 */ export interface DeleteAppUsrRequest { /** * 客户的终端用户在IoT Video上的唯一标识ID */ AccessId: string } /** * DescribeIotModels返回参数结构体 */ export interface DescribeIotModelsResponse { /** * 历史版本列表 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: Array<IotModelData> /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeMessageQueue返回参数结构体 */ export interface DescribeMessageQueueResponse { /** * 消息队列配置 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: MsgQueueData /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DescribeOsList请求参数结构体 */ export type DescribeOsListRequest = null /** * DescribeDevices请求参数结构体 */ export interface DescribeDevicesRequest { /** * 产品ID */ ProductId: string /** * 是否返回全量数据 当该值为false时,返回值中的设备物模型、固件版本、在线状态、最后在线时间字段等字段,都将返回数据类型的零值。 */ ReturnModel: boolean /** * 分页数量,0<取值范围<=100 */ Limit: number /** * 分页偏移,取值>0 */ Offset: number /** * 指定固件版本号,为空查询此产品下所有设备 */ OtaVersion?: string /** * 设备名称,支持左前缀模糊匹配 */ DeviceName?: string } /** * CreateIotDataType返回参数结构体 */ export interface CreateIotDataTypeResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * ModifyDeviceProperty返回参数结构体 */ export interface ModifyDevicePropertyResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DeleteBinding返回参数结构体 */ export interface DeleteBindingResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * RefundStorageService请求参数结构体 */ export interface RefundStorageServiceRequest { /** * 云存服务ID */ ServiceId: string /** * 云存子订单ID。如果指定子订单ID,则仅退订该子订单,如果未指定子定单ID,则退订所有子订单 */ OrderId?: string } /** * CreateStorage返回参数结构体 */ export interface CreateStorageResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DisableDeviceStream返回参数结构体 */ export interface DisableDeviceStreamResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * DeleteDevice返回参数结构体 */ export interface DeleteDeviceResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * CreateUploadPath返回参数结构体 */ export interface CreateUploadPathResponse { /** * 固件上传地址URL,用户可将本地的固件文件通过该URL以PUT的请求方式上传。 注意:此字段可能返回 null,表示取不到有效值。 */ Data?: string /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string } /** * 充值记录列表 */ export interface RechargeRecord { /** * 流水记录号。 注意:此字段可能返回 null,表示取不到有效值。 */ WaterId: number /** * 充值前的余额,单位0.01元。 注意:此字段可能返回 null,表示取不到有效值。 */ BalanceBeforeRecharge: number /** * 充值金额,单位0.01元。 注意:此字段可能返回 null,表示取不到有效值。 */ Money: number /** * 充值时间, UTC值。 注意:此字段可能返回 null,表示取不到有效值。 */ OperateTime: number } /** * 固件版本详细信息 */ export interface VersionData { /** * 产品ID 注意:此字段可能返回 null,表示取不到有效值。 */ ProductId: string /** * 固件版本号 注意:此字段可能返回 null,表示取不到有效值。 */ OtaVersion: string /** * 版本类型 1未发布 2测试发布 3正式发布 4禁用 注意:此字段可能返回 null,表示取不到有效值。 */ PubStatus: number /** * 固件版本存储路径URL 注意:此字段可能返回 null,表示取不到有效值。 */ VersionUrl: string /** * 文件大小,byte 注意:此字段可能返回 null,表示取不到有效值。 */ FileSize: number /** * 文件校验码 注意:此字段可能返回 null,表示取不到有效值。 */ Md5: string /** * 指定的允许升级的旧版本,PubStatus=3时有效 注意:此字段可能返回 null,表示取不到有效值。 */ OldVersions: string /** * 指定的允许升级的旧设备id,PubStatus=2时有效 注意:此字段可能返回 null,表示取不到有效值。 */ Tids: string /** * 灰度值(0-100),PubStatus=3时有效,表示n%的升级总量 注意:此字段可能返回 null,表示取不到有效值。 */ GrayValue: number /** * 最近一次发布时间,UNIX时间戳,单位秒 注意:此字段可能返回 null,表示取不到有效值。 */ PublishTime: number /** * 此版本激活的设备总数 注意:此字段可能返回 null,表示取不到有效值。 */ ActiveCount: number /** * 此版本在线的设备总数 注意:此字段可能返回 null,表示取不到有效值。 */ OnlineCount: number /** * 上传固件文件的时间,UNIX时间戳,单位秒 注意:此字段可能返回 null,表示取不到有效值。 */ UpdateTime: number /** * 发布记录的最后变更时间,UNIX时间戳,单位秒 注意:此字段可能返回 null,表示取不到有效值。 */ UploadTime: number /** * 该固件版本发布的变更次数 注意:此字段可能返回 null,表示取不到有效值。 */ ModifyTimes: number /** * 备注信息 注意:此字段可能返回 null,表示取不到有效值。 */ Remark: string /** * 版本发布的描述信息,需要国际化,可以为空 注意:此字段可能返回 null,表示取不到有效值。 */ Contents: Contents /** * 月活设备数,当月第一天开始有上线的设备数量。 注意:此字段可能返回 null,表示取不到有效值。 */ AliveInMonthCnt: number } /** * 设备列表元素所包含的设备基本信息 */ export interface DevicesData { /** * 设备TID */ Tid: string /** * 设备名称 */ DeviceName: string /** * 激活时间 0代表未激活 */ ActiveTime: number /** * 设备是否被禁用 */ Disabled: boolean /** * 设备推流状态 */ StreamStatus: boolean /** * 固件版本 */ OtaVersion: string /** * 设备在线状态 */ Online: number /** * 设备最后上线时间(mqtt连接成功时间),UNIX时间戳,单位秒 */ LastOnlineTime: number /** * 物模型json数据 */ IotModel: string /** * 设备固件最新更新时间,UNIX时间戳,单位秒 */ LastUpdateTime: number }
the_stack
import axios from "axios"; import { success, warning, info, error } from "../toast/toast"; import { getDevice } from "../device"; import { Everything } from "../interfaces"; import { MoveRelProps, MinOsFeatureLookup, SourceFwConfig, Axis, MoveProps, } from "./interfaces"; import { Thunk } from "../redux/interfaces"; import { McuParams, TaggedFirmwareConfig, ParameterApplication, ALLOWED_PIN_MODES, FirmwareHardware, Pair, rpcRequest, SafeZ, MoveBodyItem, SpeedOverwrite, Xyz, AxisOverwrite, } from "farmbot"; import { oneOf, versionOK, trim } from "../util"; import { Actions, Content } from "../constants"; import { mcuParamValidator } from "./update_interceptor"; import { edit, save as apiSave } from "../api/crud"; import { CONFIG_DEFAULTS } from "farmbot/dist/config"; import { Log } from "farmbot/dist/resources/api_resources"; import { FbosConfig } from "farmbot/dist/resources/configs/fbos"; import { FirmwareConfig } from "farmbot/dist/resources/configs/firmware"; import { getFirmwareConfig, getFbosConfig } from "../resources/getters"; import { isObject, isString, get, noop } from "lodash"; import { t } from "../i18next_wrapper"; import { ExternalUrl } from "../external_urls"; import { goToFbosSettings } from "../settings/maybe_highlight"; import { ToastOptions } from "../toast/interfaces"; import { forceOnline } from "./must_be_online"; import { store } from "../redux/store"; const ON = 1, OFF = 0; export type ConfigKey = keyof McuParams; // Already filtering messages in FarmBot OS and the API- this is just for // an additional layer of safety. const BAD_WORDS = ["WPA", "PSK", "PASSWORD", "NERVES"]; const MESSAGE: keyof Log = "message"; export function isLog(x: unknown): x is Log { const msg = get(x, MESSAGE); const yup = isObject(x) && isString(msg); if (yup) { if (oneOf(BAD_WORDS, msg.toUpperCase())) { // SECURITY CRITICAL CODE. console.error("Refusing to display log: " + JSON.stringify(x)); return false; } return true; } else { return false; } } /** Toast message upon request error. */ export const commandErr = (noun = "Command") => () => error(t(`${noun} failed`)); /** Toast message upon request success. */ export const commandOK = (noun = "Command", message?: string) => () => { if (forceOnline()) { return maybeNoop(); } const msg = message || (t(noun) + t(" request sent to device.")); success(msg, { title: t("Request sent") }); }; const maybeNoop = () => forceOnline() && info(t("Sorry, that feature is unavailable in demo accounts."), { title: t("Unavailable") }); const maybeAlertLocked = () => store.getState().bot.hardware.informational_settings.locked && error(t("Command not available while locked."), { title: t("Emergency stop active") }); /** Update FBOS. */ export function checkControllerUpdates() { const noun = t("Check for Updates"); commandOK(noun)(); getDevice() .checkUpdates() .catch(commandErr(noun)); } /** Shutdown FBOS. */ export function powerOff() { const noun = t("Power Off Bot"); maybeNoop(); getDevice() .powerOff() .then(commandOK(noun), commandErr(noun)); } /** Soft reset FBOS. */ export function softReset() { if (!confirm(t(Content.SOFT_RESET_ALERT))) { return; } maybeNoop(); getDevice().resetOS(); } /** Reboot FBOS. */ export function reboot() { const noun = t("Reboot Bot"); maybeNoop(); getDevice() .reboot() .then(commandOK(noun), commandErr(noun)); } /** Restart Farmduino firmware serial connection. */ export function restartFirmware() { const noun = t("Restart Firmware"); maybeNoop(); const device = getDevice(); return device .rebootFirmware() .then(device.emergencyLock) .then(device.emergencyUnlock) .then(commandOK(noun), commandErr(noun)); } export function flashFirmware(firmwareName: FirmwareHardware) { const noun = t("Flash Firmware"); maybeNoop(); getDevice() .flashFirmware(firmwareName) .then(commandOK(noun), commandErr(noun)); } export function emergencyLock() { const noun = t("Emergency stop"); maybeNoop(); getDevice() .emergencyLock() .then(commandOK(noun), commandErr(noun)); } export function emergencyUnlock(force = false) { const noun = t("Emergency unlock"); if (force || confirm(t("Are you sure you want to unlock the device?"))) { maybeNoop(); getDevice() .emergencyUnlock() .then(commandOK(noun), commandErr(noun)); } } export function sync(): Thunk { const noun = t("Sync"); return function (_dispatch, getState) { const currentFBOSversion = getState().bot.hardware.informational_settings.controller_version; const IS_OK = versionOK(currentFBOSversion); if (IS_OK) { maybeNoop(); getDevice() .sync() .catch(commandErr(noun)); } else { if (currentFBOSversion) { badVersion(); } else { info(t("FarmBot is not connected."), { title: t("Disconnected"), color: "red", }); } } }; } export function execSequence( sequenceId: number | undefined, bodyVariables?: ParameterApplication[], ) { const noun = t("Sequence execution"); if (sequenceId) { commandOK(noun)(); return getDevice() .execSequence(sequenceId, bodyVariables) .catch((x: Error) => { if (x && (typeof x == "object") && (typeof x.message == "string")) { error(x.message); } else { commandErr(noun)(); } }); } else { throw new Error(t("Can't execute unsaved sequences")); } } export function takePhoto() { maybeNoop(); getDevice().takePhoto() .then(commandOK("", Content.PROCESSING_PHOTO)) .catch(() => error(t("Error taking photo"))); } export function runFarmware( farmwareName: string, pairs?: Pair[], errorMsg?: string, ) { maybeNoop(); getDevice().execScript(farmwareName, pairs) .then(maybeNoop, errorMsg ? commandErr(errorMsg) : noop); } export function updateFarmware(farmwareName: string) { maybeNoop(); getDevice() .updateFarmware(farmwareName) .then(maybeNoop) .catch(commandErr("Update")); } /** * Structure and type checks for fetched minimum FBOS version feature object. * @param x axios response data */ function validMinOsFeatureLookup(x: MinOsFeatureLookup): boolean { return isObject(x) && Object.entries(x).every(([key, val]) => typeof key === "string" && // feature name typeof val === "string" && // version string val.split(".").length > 2); // "0.0.0" } /** * Fetch and save minimum FBOS version data for UI feature display. */ export const fetchMinOsFeatureData = () => (dispatch: Function) => { axios .get<MinOsFeatureLookup>(ExternalUrl.featureMinVersions) .then(resp => { const data = resp.data; if (validMinOsFeatureLookup(data)) { dispatch({ type: Actions.FETCH_MIN_OS_FEATURE_INFO_OK, payload: data }); } else { console.log(`Warning! Got '${JSON.stringify(data)}', ` + "expected min OS feature data."); } }) .catch((ferror) => { dispatch({ type: Actions.FETCH_MIN_OS_FEATURE_INFO_ERROR, payload: ferror }); }); }; /** * Fetch and save FBOS release notes. */ export const fetchOsReleaseNotes = () => (dispatch: Function) => { axios .get<string>(ExternalUrl.osReleaseNotes) .then(resp => { dispatch({ type: Actions.FETCH_OS_RELEASE_NOTES_OK, payload: resp.data }); }) .catch((ferror) => { dispatch({ type: Actions.FETCH_OS_RELEASE_NOTES_ERROR, payload: ferror }); }); }; /** Factory reset all firmware settings. */ export function MCUFactoryReset() { if (!confirm(t(Content.MCU_RESET_ALERT))) { return; } maybeNoop(); return getDevice().resetMCU().catch(commandErr("MCU Reset")); } /** Toggle a firmware setting. */ export function settingToggle( key: ConfigKey, sourceFwConfig: SourceFwConfig, displayAlert?: string | undefined, ) { return function (dispatch: Function, getState: () => Everything) { if (displayAlert) { alert(trim(displayAlert)); } const update = { [key]: (sourceFwConfig(key).value === 0) ? ON : OFF }; const firmwareConfig = getFirmwareConfig(getState().resources.index); const toggleFirmwareConfig = (fwConfig: TaggedFirmwareConfig) => { dispatch(edit(fwConfig, update)); dispatch(apiSave(fwConfig.uuid)); }; if (firmwareConfig) { return toggleFirmwareConfig(firmwareConfig); } }; } export function moveRelative(props: MoveRelProps) { maybeNoop(); maybeAlertLocked(); return getDevice() .moveRelative(props) .then(maybeNoop, commandErr("Relative movement")); } export function moveAbsolute(props: MoveRelProps) { const noun = t("Absolute movement"); maybeNoop(); maybeAlertLocked(); return getDevice() .moveAbsolute(props) .then(maybeNoop, commandErr(noun)); } export function move(props: MoveProps) { const noun = t("Movement"); maybeNoop(); maybeAlertLocked(); const safeZ: SafeZ = { kind: "safe_z", args: {} }; const speedOverwrite = (axis: Xyz, speed: number): SpeedOverwrite => ({ kind: "speed_overwrite", args: { axis, speed_setting: { kind: "numeric", args: { number: speed } } }, }); const positionOverwrite = (axis: Xyz): AxisOverwrite => ({ kind: "axis_overwrite", args: { axis, axis_operand: { kind: "coordinate", args: { x: props.x, y: props.y, z: props.z, } }, } }); const body: MoveBodyItem[] = [ positionOverwrite("x"), positionOverwrite("y"), positionOverwrite("z"), ...(props.speed ? [speedOverwrite("x", props.speed)] : []), ...(props.speed ? [speedOverwrite("y", props.speed)] : []), ...(props.speed ? [speedOverwrite("z", props.speed)] : []), ...(props.safeZ ? [safeZ] : []), ]; return getDevice() .send(rpcRequest([{ kind: "move", args: {}, body }])) .then(maybeNoop, commandErr(noun)); } export function pinToggle(pin_number: number) { const noun = t("Setting toggle"); maybeNoop(); maybeAlertLocked(); return getDevice() .togglePin({ pin_number }) .then(maybeNoop, commandErr(noun)); } export function readPin( pin_number: number, label: string, pin_mode: ALLOWED_PIN_MODES, ) { const noun = t("Read pin"); maybeNoop(); return getDevice() .readPin({ pin_number, label, pin_mode }) .then(maybeNoop, commandErr(noun)); } export function writePin( pin_number: number, pin_value: number, pin_mode: ALLOWED_PIN_MODES, ) { const noun = t("Write pin"); maybeNoop(); maybeAlertLocked(); return getDevice() .writePin({ pin_number, pin_mode, pin_value }) .then(maybeNoop, commandErr(noun)); } export function moveToHome(axis: Axis) { const noun = t("'Move To Home' command"); maybeNoop(); maybeAlertLocked(); getDevice() .home({ axis, speed: CONFIG_DEFAULTS.speed }) .catch(commandErr(noun)); } export function findHome(axis: Axis) { const noun = t("'Find Home' command"); maybeNoop(); maybeAlertLocked(); getDevice() .findHome({ axis, speed: CONFIG_DEFAULTS.speed }) .catch(commandErr(noun)); } export function setHome(axis: Axis) { const noun = t("'Set Home' command"); maybeNoop(); getDevice() .setZero(axis) .catch(commandErr(noun)); } export function findAxisLength(axis: Axis) { const noun = t("'Find Axis Length' command"); maybeNoop(); maybeAlertLocked(); getDevice() .calibrate({ axis }) .catch(commandErr(noun)); } /** Update firmware setting. */ export function updateMCU(key: ConfigKey, val: string) { return function (dispatch: Function, getState: () => Everything) { const firmwareConfig = getFirmwareConfig(getState().resources.index); const getParams = () => { if (firmwareConfig) { return firmwareConfig.body; } else { return getState().bot.hardware.mcu_params; } }; function proceed() { if (firmwareConfig) { dispatch(edit(firmwareConfig, { [key]: val } as Partial<FirmwareConfig>)); dispatch(apiSave(firmwareConfig.uuid)); } } const dont = (err: string) => warning(err); const validate = mcuParamValidator(key, parseInt(val, 10), getParams()); validate(proceed, dont); }; } /** Update FBOS setting. */ export function updateConfig(config: Partial<FbosConfig>) { return function (dispatch: Function, getState: () => Everything) { const fbosConfig = getFbosConfig(getState().resources.index); if (fbosConfig) { dispatch(edit(fbosConfig, config)); dispatch(apiSave(fbosConfig.uuid)); } }; } /** Change jog button movement amount. */ export function changeStepSize(integer: number) { return { type: Actions.CHANGE_STEP_SIZE, payload: integer }; } export function badVersion(options: ToastOptions = { noDismiss: true }) { goToFbosSettings(); error(t(Content.OLD_FBOS_UNSUPPORTED), { title: t("Please Update"), noTimer: true, idPrefix: "EOL", ...options, }); }
the_stack
import { Injectable } from '@angular/core'; import { Observable } from 'rxjs'; import { HttpClient } from '@angular/common/http'; import { Dictionary } from '../collections'; import { environment } from '../../../environments/environment'; import { HTTPMethod } from '../util'; // we can now access environment.apiUrl const API_URL = environment.apiUrl; @Injectable() export class ApplicationServiceFacade { private static readonly LOGIN = 'login'; private static readonly LOGOUT = 'logout'; private static readonly AUTHORIZE = 'authorize'; private static readonly REFRESH_TOKEN = 'refresh_token'; private static readonly OAUTH = 'oauth'; private static readonly ACCESS_KEY = 'access_key'; private static readonly ACTIVE_LIST = 'active_list'; private static readonly FULL_ACTIVE_LIST = 'full_active_list'; private static readonly ENV = 'environment'; private static readonly PROJECT_KEY_GENERATE = 'access_key_generate'; private static readonly PROVISIONED_RESOURCES = 'provisioned_resources'; private static readonly EXPLORATORY_ENVIRONMENT = 'exploratory_environment'; private static readonly IMAGE = 'image'; private static readonly SCHEDULER = 'scheduler'; private static readonly TEMPLATES = 'templates'; private static readonly COMPUTATION_TEMPLATES = 'computation_templates'; private static readonly COMPUTATIONAL_RESOURCES_TEMLATES = 'computational_templates'; private static readonly COMPUTATIONAL_RESOURCES = 'computational_resources'; private static readonly COMPUTATIONAL_RESOURCES_DATAENGINE = 'computational_resources_dataengine'; private static readonly COMPUTATIONAL_RESOURCES_DATAENGINESERVICE = 'computational_resources_dataengineservice'; private static readonly BUCKET = 'bucket'; private static readonly USER_PREFERENCES = 'user_preferences'; private static readonly BUDGET = 'budget'; private static readonly ENVIRONMENT_HEALTH_STATUS = 'environment_health_status'; private static readonly META_DATA = 'meta'; private static readonly ROLES = 'roles'; private static readonly GROUPS = 'groups'; private static readonly GROUP_ROLE = 'group_role'; private static readonly GROUP_USER = 'group_user'; private static readonly BACKUP = 'backup'; private static readonly EDGE_NODE_START = 'edge_node_start'; private static readonly EDGE_NODE_STOP = 'edge_node_stop'; private static readonly EDGE_NODE_RECREATE = 'edge_node_recreate'; private static readonly SNN_MONITOR = 'ssn_monitor'; private static readonly LIB_GROUPS = 'lib_groups'; private static readonly LIB_LIST = 'lib_list'; private static readonly LIB_INSTALL = 'lib_install'; private static readonly INSTALLED_LIBS_FORMAT = 'installed_libs_format'; private static readonly INSTALLED_LIBS = 'installed_libs'; private static readonly GIT_CREDS = 'git_creds'; private static readonly BILLING = 'billing'; private static readonly DOWNLOAD_REPORT = 'download_report'; private static readonly SETTINGS = 'settings'; private static readonly PROJECT = 'project'; private static readonly ODAHU = 'odahu'; private static readonly ENDPOINT = 'endpoint'; private static readonly ENDPOINT_CONNECTION = 'endpoint_connection'; private static readonly AUDIT = 'audit'; private static readonly CONFIG = 'config'; private static readonly QUOTA = 'quota'; private requestRegistry: Dictionary<string>; constructor(private http: HttpClient) { this.setupRegistry(); } public buildLoginRequest(body: any): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.LOGIN), body, { responseType: 'text', observe: 'response' }); } public buildLogoutRequest(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.LOGOUT), '', { observe: 'response' }); } public buildAuthorizeRequest(body: any): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.AUTHORIZE), body, { responseType: 'text', headers: { 'Content-Type': 'text/plain' }, observe: 'response' }); } public buildRefreshToken(param: any): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.REFRESH_TOKEN) + param, null); } public buildLocationCheck(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.OAUTH), null, { responseType: 'text' }); } public buildGetAuthToken(body: any): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.OAUTH) + body, null, { observe: 'response' }); } public buildCheckUserAccessKeyRequest(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.ACCESS_KEY), null, { observe: 'response' }); } public buildGenerateAccessKey(): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.PROJECT_KEY_GENERATE), null, { observe: 'response', responseType: 'text' }); } public buildRegenerateAccessKey(option): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.PROJECT_KEY_GENERATE) + option, null, { observe: 'response', responseType: 'text' }); } public buildUploadUserAccessKeyRequest(body: any): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.ACCESS_KEY), body, { observe: 'response', headers: { 'Upload': 'true' } }); } public buildReuploadUserAccessKeyRequest(body: any, option: string): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.ACCESS_KEY) + option, body, { observe: 'response', headers: { 'Upload': 'true' } }); } public buildGetUserProvisionedResourcesRequest(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.PROVISIONED_RESOURCES), null); } public buildGetTemplatesRequest(params): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.TEMPLATES) + params, null); } public buildGetComputationTemplatesRequest(params, provider): Observable<any> { return this.buildRequest(HTTPMethod.GET, '/api/' + provider + this.requestRegistry.Item(ApplicationServiceFacade.COMPUTATION_TEMPLATES) + params, null); } public buildCreateExploratoryEnvironmentRequest(data): Observable<any> { return this.buildRequest(HTTPMethod.PUT, this.requestRegistry.Item(ApplicationServiceFacade.EXPLORATORY_ENVIRONMENT), data, { responseType: 'text', observe: 'response' }); } public buildGetExploratoryEnvironmentRequest(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.EXPLORATORY_ENVIRONMENT), null, { observe: 'response' }); } public buildRunExploratoryEnvironmentRequest(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.EXPLORATORY_ENVIRONMENT), data, { responseType: 'text', observe: 'response' }); } public buildSuspendExploratoryEnvironmentRequest(data): Observable<any> { return this.buildRequest(HTTPMethod.DELETE, this.requestRegistry.Item(ApplicationServiceFacade.EXPLORATORY_ENVIRONMENT), data, { responseType: 'text', observe: 'response' }); } public buildCreateComputationalResources_DataengineServiceRequest(data, provider): Observable<any> { return this.buildRequest(HTTPMethod.PUT, '/api/' + provider + this.requestRegistry.Item(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES_DATAENGINESERVICE), data, { observe: 'response' }); } public buildCreateComputationalResources_DataengineRequest(data, provider): Observable<any> { return this.buildRequest(HTTPMethod.PUT, '/api/' + provider + this.requestRegistry.Item(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES_DATAENGINE), data, { observe: 'response' }); } public buildDeleteComputationalResourcesRequest(data, provider): Observable<any> { return this.buildRequest(HTTPMethod.DELETE, '/api/' + provider + this.requestRegistry.Item(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES), data); } public buildStopSparkClusterAction(data, provider): Observable<any> { return this.buildRequest(HTTPMethod.DELETE, '/api/' + provider + this.requestRegistry.Item(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES), data); } public buildStartSparkClusterAction(params, provider): Observable<any> { return this.buildRequest(HTTPMethod.PUT, '/api/' + provider + this.requestRegistry.Item(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES) + params, null); } public buildGetUserPreferences(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.USER_PREFERENCES), null); } public buildGetBucketData(data): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.BUCKET), data); } public buildUploadFileToBucket(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.BUCKET) + '/upload', data, { reportProgress: true, observe: 'events' }); } public buildCreateFolderInBucket(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.BUCKET) + '/folder/upload', data); } public buildDownloadFileFromBucket(data) { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.BUCKET), data, { dataType : 'binary', processData : false, responseType : 'arraybuffer', reportProgress: true, observe: 'events' } ); } public buildDeleteFileFromBucket(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.BUCKET) + '/objects/delete', data ); } public buildUpdateUserPreferences(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.USER_PREFERENCES), data); } public buildGetEnvironmentHealthStatus(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.ENVIRONMENT_HEALTH_STATUS), null, { observe: 'response' }); } public buildGetEnvironmentStatuses(data): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.ENVIRONMENT_HEALTH_STATUS), data); } public buildGetQuotaStatus(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.QUOTA), null ); } public buildRunEdgeNodeRequest(): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.EDGE_NODE_START), null, { responseType: 'text' }); } public buildSuspendEdgeNodeRequest(): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.EDGE_NODE_STOP), null, { responseType: 'text', observe: 'response' }); } public buildRecreateEdgeNodeRequest(): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.EDGE_NODE_RECREATE), null, { responseType: 'text' }); } public buildGetGroupsList(data): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.LIB_GROUPS), data); } public buildGetAvailableLibrariesList(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.LIB_LIST), data); } public buildGetAvailableDependenciest(params): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.LIB_LIST) + params, null); } public buildInstallLibraries(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.LIB_INSTALL), data, { observe: 'response', responseType: 'text' }); } public buildGetInstalledLibrariesList(data): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.INSTALLED_LIBS_FORMAT), data); } public buildGetInstalledLibsByResource(data): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.INSTALLED_LIBS), data); } public buildGetGitCreds(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.GIT_CREDS), null); } public buildUpdateGitCredentials(data): Observable<any> { return this.buildRequest(HTTPMethod.PUT, this.requestRegistry.Item(ApplicationServiceFacade.GIT_CREDS), data); } public buildGetGeneralBillingData(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.BILLING), data); } public buildDownloadReportData(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.DOWNLOAD_REPORT), data, { observe: 'response', responseType: 'text' }); } public buildCreateBackupRequest(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.BACKUP), data, { responseType: 'text', observe: 'response' }); } public buildGetBackupStatusRequest(uuid): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.BACKUP), uuid); } public buildGetUserImages(image): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.IMAGE), image); } public buildGetImagesList(param): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.IMAGE) + param, null); } public buildCreateAMI(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.IMAGE), data, { observe: 'response', responseType: 'text' }); } public buildGetExploratorySchedule(data): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.SCHEDULER), data); } public buildSetExploratorySchedule(param, data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.SCHEDULER) + param, data, { observe: 'response' }); } public buildResetScheduleSettings(data): Observable<any> { return this.buildRequest(HTTPMethod.DELETE, this.requestRegistry.Item(ApplicationServiceFacade.SCHEDULER), data); } public BuildGetActiveSchcedulersData(param): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.SCHEDULER) + param, null); } public buildGetActiveUsers(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.ACTIVE_LIST), null); } public buildGetAllEnvironmentData(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.FULL_ACTIVE_LIST), null); } public buildEnvironmentManagement(param, data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.ENV) + param, data, { observe: 'response', headers: { 'Content-Type': 'text/plain' } }); } public buildGetSsnMonitorData(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.SNN_MONITOR), null); } public buildGetTotalBudgetData(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.SETTINGS), null); } public buildUpdateTotalBudgetData(param, method: number): Observable<any> { return this.buildRequest(method, this.requestRegistry.Item(ApplicationServiceFacade.SETTINGS) + param, null, { observe: 'response' }); } public buildGetGroupsData(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.GROUPS), null); } public buildGetRolesData(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.ROLES), null); } public buildSetupNewGroup(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.GROUPS), data); } public buildUpdateGroupData(data): Observable<any> { return this.buildRequest(HTTPMethod.PUT, this.requestRegistry.Item(ApplicationServiceFacade.GROUPS), data); } public buildSetupRolesForGroup(data): Observable<any> { return this.buildRequest(HTTPMethod.PUT, this.requestRegistry.Item(ApplicationServiceFacade.GROUP_ROLE), data); } public buildSetupUsersForGroup(data): Observable<any> { return this.buildRequest(HTTPMethod.PUT, this.requestRegistry.Item(ApplicationServiceFacade.GROUP_USER), data); } public buildRemoveUsersForGroup(data): Observable<any> { return this.buildRequest(HTTPMethod.DELETE, this.requestRegistry.Item(ApplicationServiceFacade.GROUP_USER), data); } public buildRemoveGroupById(data): Observable<any> { return this.buildRequest(HTTPMethod.DELETE, this.requestRegistry.Item(ApplicationServiceFacade.GROUPS), data); } public buildGetClusterConfiguration(param, provider): Observable<any> { return this.buildRequest(HTTPMethod.GET, '/api/' + provider + this.requestRegistry.Item(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES) + param, null); } public buildEditClusterConfiguration(param, data, provider): Observable<any> { return this.buildRequest(HTTPMethod.PUT, '/api/' + provider + this.requestRegistry.Item(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES) + param, data); } public buildGetExploratorySparkConfiguration(param): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.EXPLORATORY_ENVIRONMENT) + param, null); } public buildEditExploratorySparkConfiguration(param, data): Observable<any> { return this.buildRequest(HTTPMethod.PUT, this.requestRegistry.Item(ApplicationServiceFacade.EXPLORATORY_ENVIRONMENT) + param, data); } public buildGetAppMetaData(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.META_DATA), null); } public buildCreateProject(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.PROJECT), data); } public buildUpdateProject(data): Observable<any> { return this.buildRequest(HTTPMethod.PUT, this.requestRegistry.Item(ApplicationServiceFacade.PROJECT), data); } public buildGetProjectsList(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.PROJECT), null); } public buildGetUserProjectsList(params?): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.PROJECT) + params, null); } public buildToggleProjectStatus(param, data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.PROJECT) + param, data); } public buildUpdateProjectsBudget(param, data): Observable<any> { return this.buildRequest(HTTPMethod.PUT, this.requestRegistry.Item(ApplicationServiceFacade.PROJECT) + param, data, { observe: 'response' }); } public buildGetEndpointsData(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.ENDPOINT), null); } public getEndpointsResource(endpoint): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.ENDPOINT) + `/${endpoint}/resources`, null); } public buildCreateEndpoint(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.ENDPOINT), data); } public buildDeleteEndpoint(param): Observable<any> { return this.buildRequest(HTTPMethod.DELETE, this.requestRegistry.Item(ApplicationServiceFacade.ENDPOINT) + param, null); } public getEndpointConnectionStatus(endpointUrl): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.ENDPOINT_CONNECTION) + endpointUrl, null); } public getAuditList(data): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.AUDIT), data); } public postActionToAudit(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.AUDIT), data); } public createOdahuCluster(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.ODAHU), data); } public getOdahuList(): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.ODAHU), null); } public odahuStartStop(data, params): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.ODAHU) + `/${params}`, data); } public buildGetServiceConfig(data): Observable<any> { return this.buildRequest(HTTPMethod.GET, this.requestRegistry.Item(ApplicationServiceFacade.CONFIG), data ); } public buildSetServiceConfig(data, body): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.CONFIG) + '/' + data, body); } public buildRestartServices(data): Observable<any> { return this.buildRequest(HTTPMethod.POST, this.requestRegistry.Item(ApplicationServiceFacade.CONFIG) + '/restart', data ); } private setupRegistry(): void { this.requestRegistry = new Dictionary<string>(); // Security this.requestRegistry.Add(ApplicationServiceFacade.LOGIN, '/api/user/login'); this.requestRegistry.Add(ApplicationServiceFacade.LOGOUT, '/api/oauth/logout'); this.requestRegistry.Add(ApplicationServiceFacade.AUTHORIZE, '/api/oauth/authorize'); this.requestRegistry.Add(ApplicationServiceFacade.REFRESH_TOKEN, '/api/oauth/refresh'); this.requestRegistry.Add(ApplicationServiceFacade.ACTIVE_LIST, '/api/environment/user'); this.requestRegistry.Add(ApplicationServiceFacade.FULL_ACTIVE_LIST, '/api/environment/all'); this.requestRegistry.Add(ApplicationServiceFacade.ENV, '/api/environment'); this.requestRegistry.Add(ApplicationServiceFacade.OAUTH, '/api/oauth'); this.requestRegistry.Add(ApplicationServiceFacade.ACCESS_KEY, '/api/user/access_key'); this.requestRegistry.Add(ApplicationServiceFacade.PROJECT_KEY_GENERATE, '/api/project/keys'); // Exploratory Environment this.requestRegistry.Add(ApplicationServiceFacade.PROVISIONED_RESOURCES, '/api/infrastructure/info'); this.requestRegistry.Add(ApplicationServiceFacade.EXPLORATORY_ENVIRONMENT, '/api/infrastructure_provision/exploratory_environment'); this.requestRegistry.Add(ApplicationServiceFacade.TEMPLATES, '/api/infrastructure_templates'); this.requestRegistry.Add(ApplicationServiceFacade.COMPUTATION_TEMPLATES, '/infrastructure_provision/computational_resources'); this.requestRegistry.Add(ApplicationServiceFacade.IMAGE, '/api/infrastructure_provision/exploratory_environment/image'); this.requestRegistry.Add(ApplicationServiceFacade.SCHEDULER, '/api/infrastructure_provision/exploratory_environment/scheduler'); // Computational Resources this.requestRegistry.Add(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES, '/infrastructure_provision/computational_resources'); this.requestRegistry.Add(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES_DATAENGINESERVICE, '/infrastructure_provision/computational_resources/dataengine-service'); // emr(aws) this.requestRegistry.Add(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES_DATAENGINE, '/infrastructure_provision/computational_resources/dataengine'); // spark (azure|aws) this.requestRegistry.Add(ApplicationServiceFacade.COMPUTATIONAL_RESOURCES_TEMLATES, '/api/infrastructure_templates/computational_templates'); // Bucket browser this.requestRegistry.Add(ApplicationServiceFacade.BUCKET, '/api/bucket'); // Filtering Configuration this.requestRegistry.Add(ApplicationServiceFacade.USER_PREFERENCES, '/api/user/settings'); this.requestRegistry.Add(ApplicationServiceFacade.BUDGET, '/api/user/settings/budget'); // Environment Health Status this.requestRegistry.Add(ApplicationServiceFacade.ENVIRONMENT_HEALTH_STATUS, '/api/infrastructure/status'); this.requestRegistry.Add(ApplicationServiceFacade.META_DATA, '/api/infrastructure/meta'); this.requestRegistry.Add(ApplicationServiceFacade.EDGE_NODE_START, '/api/infrastructure/edge/start'); this.requestRegistry.Add(ApplicationServiceFacade.EDGE_NODE_STOP, '/api/infrastructure/edge/stop'); this.requestRegistry.Add(ApplicationServiceFacade.EDGE_NODE_RECREATE, '/api/user/access_key/recover'); this.requestRegistry.Add(ApplicationServiceFacade.BACKUP, '/api/infrastructure/backup'); this.requestRegistry.Add(ApplicationServiceFacade.SNN_MONITOR, '/api/sysinfo'); this.requestRegistry.Add(ApplicationServiceFacade.ROLES, '/api/role'); this.requestRegistry.Add(ApplicationServiceFacade.GROUPS, '/api/group'); this.requestRegistry.Add(ApplicationServiceFacade.GROUP_ROLE, 'api/group/role'); this.requestRegistry.Add(ApplicationServiceFacade.GROUP_USER, '/api/group/user'); this.requestRegistry.Add(ApplicationServiceFacade.SETTINGS, '/api/settings'); // Libraries Installation this.requestRegistry.Add(ApplicationServiceFacade.LIB_GROUPS, '/api/infrastructure_provision/exploratory_environment/lib-groups'); this.requestRegistry.Add(ApplicationServiceFacade.LIB_LIST, '/api/infrastructure_provision/exploratory_environment/search/lib_list'); this.requestRegistry.Add(ApplicationServiceFacade.LIB_INSTALL, '/api/infrastructure_provision/exploratory_environment/lib_install'); this.requestRegistry.Add(ApplicationServiceFacade.INSTALLED_LIBS_FORMAT, '/api/infrastructure_provision/exploratory_environment/lib_list/formatted'); this.requestRegistry.Add(ApplicationServiceFacade.INSTALLED_LIBS, '/api/infrastructure_provision/exploratory_environment/lib_list'); // UnGit credentials this.requestRegistry.Add(ApplicationServiceFacade.GIT_CREDS, '/api/user/git_creds'); // billing report this.requestRegistry.Add(ApplicationServiceFacade.BILLING, '/api/billing/report'); this.requestRegistry.Add(ApplicationServiceFacade.DOWNLOAD_REPORT, '/api/billing/report/download'); this.requestRegistry.Add(ApplicationServiceFacade.QUOTA, '/api/billing/quota'); // project this.requestRegistry.Add(ApplicationServiceFacade.PROJECT, '/api/project'); this.requestRegistry.Add(ApplicationServiceFacade.ENDPOINT, '/api/endpoint'); this.requestRegistry.Add(ApplicationServiceFacade.ENDPOINT_CONNECTION, '/api/endpoint/url/'); // Odahu this.requestRegistry.Add(ApplicationServiceFacade.ODAHU, '/api/odahu'); // audit this.requestRegistry.Add(ApplicationServiceFacade.AUDIT, '/api/audit'); // configuration this.requestRegistry.Add(ApplicationServiceFacade.CONFIG, '/api/config/multiple'); } private buildRequest(method: HTTPMethod, url_path: string, body: any, opt?) { // added to simplify development process const url = environment.production ? url_path : API_URL + url_path; // if (url_path.indexOf('/api/bucket') !== -1) { // url = 'https://35.233.183.55' + url_path; // } if (method === HTTPMethod.POST) { return this.http.post(url, body, opt); } else if (method === HTTPMethod.DELETE) { return this.http.delete(body ? url + JSON.parse(body) : url, opt); } else if (method === HTTPMethod.PUT) { return this.http.put(url, body, opt); } else { return this.http.get(body ? (url + body) : url, opt); } } }
the_stack
import { Injectable } from '@angular/core'; import { CoreError } from '@classes/errors/error'; import { CoreSite, CoreSiteWSPreSets } from '@classes/site'; import { CoreCourseCommonModWSOptions } from '@features/course/services/course'; import { CoreCourseLogHelper } from '@features/course/services/log-helper'; import { CoreTagItem } from '@features/tag/services/tag'; import { CoreApp } from '@services/app'; import { CoreNavigator } from '@services/navigator'; import { CoreSites, CoreSitesCommonWSOptions, CoreSitesReadingStrategy } from '@services/sites'; import { CoreUtils } from '@services/utils/utils'; import { CoreWSExternalFile, CoreWSExternalWarning, CoreWSFile } from '@services/ws'; import { makeSingleton, Translate } from '@singletons'; import { CoreEvents } from '@singletons/events'; import { AddonModWikiPageDBRecord } from './database/wiki'; import { AddonModWikiOffline } from './wiki-offline'; import { AddonModWikiAutoSyncData, AddonModWikiManualSyncData, AddonModWikiSyncProvider } from './wiki-sync'; const ROOT_CACHE_KEY = 'mmaModWiki:'; /** * Service that provides some features for wikis. */ @Injectable({ providedIn: 'root' }) export class AddonModWikiProvider { static readonly COMPONENT = 'mmaModWiki'; static readonly PAGE_CREATED_EVENT = 'addon_mod_wiki_page_created'; static readonly RENEW_LOCK_TIME = 30000; // Milliseconds. protected subwikiListsCache: {[wikiId: number]: AddonModWikiSubwikiListData} = {}; protected wikiFirstViewedPage: Record<string, Record<number, string>> = {}; protected editedPage?: AddonModWikiEditedPageData; constructor() { // Clear subwiki lists cache on logout. CoreEvents.on(CoreEvents.LOGIN, () => { this.clearSubwikiList(); }); } /** * Clear subwiki list cache for a certain wiki or all of them. * * @param wikiId wiki Id, if not provided all will be cleared. */ clearSubwikiList(wikiId?: number): void { if (typeof wikiId == 'undefined') { this.subwikiListsCache = {}; } else { delete this.subwikiListsCache[wikiId]; } } /** * Delete and return the edited page data if any. * * @return Edited page data, undefined if no data. */ consumeEditedPageData(): AddonModWikiEditedPageData | undefined { const editedPage = this.editedPage; delete this.editedPage; return editedPage; } /** * Save wiki contents on a page or section. * * @param pageId Page ID. * @param content content to be saved. * @param section section to get. * @return Promise resolved with the page ID. */ async editPage(pageId: number, content: string, section?: string, siteId?: string): Promise<number> { const site = await CoreSites.getSite(siteId); const params: AddonModWikiEditPageWSParams = { pageid: pageId, content: content, }; if (section) { params.section = section; } const response = await site.write<AddonModWikiEditPageWSResponse>('mod_wiki_edit_page', params); return response.pageid; } /** * Get the first page opened for a wiki in the app if it isn't the current one. * * @param wikiId Wiki ID. * @param path Path. */ getFirstWikiPageOpened(wikiId: number, path: string): string | undefined { const tab = CoreNavigator.getMainMenuTabFromPath(path); if (!tab) { return; } if (this.wikiFirstViewedPage[tab] && this.wikiFirstViewedPage[tab][wikiId] !== path) { return this.wikiFirstViewedPage[tab][wikiId]; } } /** * Get a wiki page contents. * * @param pageId Page ID. * @param options Other options. * @return Promise resolved with the page data. */ async getPageContents(pageId: number, options: CoreCourseCommonModWSOptions = {}): Promise<AddonModWikiPageContents> { const site = await CoreSites.getSite(options.siteId); const params: AddonModWikiGetPageContentsWSParams = { pageid: pageId, }; const preSets: CoreSiteWSPreSets = { cacheKey: this.getPageContentsCacheKey(pageId), updateFrequency: CoreSite.FREQUENCY_SOMETIMES, component: AddonModWikiProvider.COMPONENT, componentId: options.cmId, ...CoreSites.getReadingStrategyPreSets(options.readingStrategy), // Include reading strategy preSets. }; const response = await site.read<AddonModWikiGetPageContentsWSResponse>('mod_wiki_get_page_contents', params, preSets); return response.page; } /** * Get cache key for wiki Pages Contents WS calls. * * @param pageId Wiki Page ID. * @return Cache key. */ protected getPageContentsCacheKey(pageId: number): string { return ROOT_CACHE_KEY + 'page:' + pageId; } /** * Get a wiki page contents for editing. It does not cache calls. * * @param pageId Page ID. * @param section Section to get. * @param lockOnly Just renew lock and not return content. * @param siteId Site ID. If not defined, current site. * @return Promise resolved with page contents. */ async getPageForEditing( pageId: number, section?: string, lockOnly?: boolean, siteId?: string, ): Promise<AddonModWikiWSEditPageSection> { const site = await CoreSites.getSite(siteId); const params: AddonModWikiGetPageForEditingWSParams = { pageid: pageId, }; if (section) { params.section = section; } // This parameter requires Moodle 3.2. It saves network usage. if (lockOnly && site.isVersionGreaterEqualThan('3.2')) { params.lockonly = true; } const response = await site.write<AddonModWikiGetPageForEditingWSResponse>('mod_wiki_get_page_for_editing', params); return response.pagesection; } /** * Gets the list of files from a specific subwiki. * * @param wikiId Wiki ID. * @param options Other options. * @return Promise resolved with subwiki files. */ async getSubwikiFiles(wikiId: number, options: AddonModWikiGetSubwikiFilesOptions = {}): Promise<CoreWSFile[]> { const site = await CoreSites.getSite(options.siteId); const groupId = options.groupId || -1; const userId = options.userId || 0; const params: AddonModWikiGetSubwikiFilesWSParams = { wikiid: wikiId, groupid: groupId, userid: userId, }; const preSets: CoreSiteWSPreSets = { cacheKey: this.getSubwikiFilesCacheKey(wikiId, groupId, userId), updateFrequency: CoreSite.FREQUENCY_SOMETIMES, component: AddonModWikiProvider.COMPONENT, componentId: options.cmId, ...CoreSites.getReadingStrategyPreSets(options.readingStrategy), // Include reading strategy preSets. }; const response = await site.read<AddonModWikiGetSubwikiFilesWSResponse>('mod_wiki_get_subwiki_files', params, preSets); return response.files; } /** * Get cache key for wiki Subwiki Files WS calls. * * @param wikiId Wiki ID. * @param groupId Group ID. * @param userId User ID. * @return Cache key. */ protected getSubwikiFilesCacheKey(wikiId: number, groupId: number, userId: number): string { return this.getSubwikiFilesCacheKeyPrefix(wikiId) + ':' + groupId + ':' + userId; } /** * Get cache key for all wiki Subwiki Files WS calls. * * @param wikiId Wiki ID. * @return Cache key. */ protected getSubwikiFilesCacheKeyPrefix(wikiId: number): string { return ROOT_CACHE_KEY + 'subwikifiles:' + wikiId; } /** * Get a list of subwikis and related data for a certain wiki from the cache. * * @param wikiId wiki Id * @return Subwiki list and related data. */ getSubwikiList(wikiId: number): AddonModWikiSubwikiListData { return this.subwikiListsCache[wikiId]; } /** * Get the list of Pages of a SubWiki. * * @param wikiId Wiki ID. * @param options Other options. * @return Promise resolved with wiki subwiki pages. */ async getSubwikiPages(wikiId: number, options: AddonModWikiGetSubwikiPagesOptions = {}): Promise<AddonModWikiSubwikiPage[]> { const site = await CoreSites.getSite(options.siteId); const groupId = options.groupId || -1; const userId = options.userId || 0; const sortBy = options.sortBy || 'title'; const sortDirection = options.sortDirection || 'ASC'; const params: AddonModWikiGetSubwikiPagesWSParams = { wikiid: wikiId, groupid: groupId, userid: userId, options: { sortby: sortBy, sortdirection: sortDirection, includecontent: options.includeContent ? 1 : 0, }, }; const preSets: CoreSiteWSPreSets = { cacheKey: this.getSubwikiPagesCacheKey(wikiId, groupId, userId), updateFrequency: CoreSite.FREQUENCY_SOMETIMES, component: AddonModWikiProvider.COMPONENT, componentId: options.cmId, ...CoreSites.getReadingStrategyPreSets(options.readingStrategy), // Include reading strategy preSets. }; const response = await site.read<AddonModWikiGetSubwikiPagesWSResponse>('mod_wiki_get_subwiki_pages', params, preSets); return response.pages; } /** * Get cache key for wiki Subwiki Pages WS calls. * * @param wikiId Wiki ID. * @param groupId Group ID. * @param userId User ID. * @return Cache key. */ protected getSubwikiPagesCacheKey(wikiId: number, groupId: number, userId: number): string { return this.getSubwikiPagesCacheKeyPrefix(wikiId) + ':' + groupId + ':' + userId; } /** * Get cache key for all wiki Subwiki Pages WS calls. * * @param wikiId Wiki ID. * @return Cache key. */ protected getSubwikiPagesCacheKeyPrefix(wikiId: number): string { return ROOT_CACHE_KEY + 'subwikipages:' + wikiId; } /** * Get all the subwikis of a wiki. * * @param wikiId Wiki ID. * @param options Other options. * @return Promise resolved with subwikis. */ async getSubwikis(wikiId: number, options: CoreCourseCommonModWSOptions = {}): Promise<AddonModWikiSubwiki[]> { const site = await CoreSites.getSite(options.siteId); const params: AddonModWikiGetSubwikisWSParams = { wikiid: wikiId, }; const preSets = { cacheKey: this.getSubwikisCacheKey(wikiId), updateFrequency: CoreSite.FREQUENCY_RARELY, component: AddonModWikiProvider.COMPONENT, componentId: options.cmId, ...CoreSites.getReadingStrategyPreSets(options.readingStrategy), // Include reading strategy preSets. }; const response = await site.read<AddonModWikiGetSubwikisWSResponse>('mod_wiki_get_subwikis', params, preSets); return response.subwikis; } /** * Get cache key for get wiki subWikis WS calls. * * @param wikiId Wiki ID. * @return Cache key. */ protected getSubwikisCacheKey(wikiId: number): string { return ROOT_CACHE_KEY + 'subwikis:' + wikiId; } /** * Get a wiki by module ID. * * @param courseId Course ID. * @param cmId Course module ID. * @param options Other options. * @return Promise resolved when the wiki is retrieved. */ getWiki(courseId: number, cmId: number, options: CoreSitesCommonWSOptions = {}): Promise<AddonModWikiWiki> { return this.getWikiByField(courseId, 'coursemodule', cmId, options); } /** * Get a wiki with key=value. If more than one is found, only the first will be returned. * * @param courseId Course ID. * @param key Name of the property to check. * @param value Value to search. * @param options Other options. * @return Promise resolved when the wiki is retrieved. */ protected async getWikiByField( courseId: number, key: string, value: unknown, options: CoreSitesCommonWSOptions = {}, ): Promise<AddonModWikiWiki> { const site = await CoreSites.getSite(options.siteId); const params: AddonModWikiGetWikisByCoursesWSParams = { courseids: [courseId], }; const preSets: CoreSiteWSPreSets = { cacheKey: this.getWikiDataCacheKey(courseId), updateFrequency: CoreSite.FREQUENCY_RARELY, component: AddonModWikiProvider.COMPONENT, ...CoreSites.getReadingStrategyPreSets(options.readingStrategy), // Include reading strategy preSets. }; const response = await site.read<AddonModWikiGetWikisByCoursesWSResponse>('mod_wiki_get_wikis_by_courses', params, preSets); const currentWiki = response.wikis.find((wiki) => wiki[key] == value); if (currentWiki) { return currentWiki; } throw new CoreError('Wiki not found.'); } /** * Get a wiki by wiki ID. * * @param courseId Course ID. * @param id Wiki ID. * @param options Other options. * @return Promise resolved when the wiki is retrieved. */ getWikiById(courseId: number, id: number, options: CoreSitesCommonWSOptions = {}): Promise<AddonModWikiWiki> { return this.getWikiByField(courseId, 'id', id, options); } /** * Get cache key for wiki data WS calls. * * @param courseId Course ID. * @return Cache key. */ protected getWikiDataCacheKey(courseId: number): string { return ROOT_CACHE_KEY + 'wiki:' + courseId; } /** * Gets a list of files to download for a wiki, using a format similar to module.contents from get_course_contents. * * @param wiki Wiki. * @param options Other options. * @return Promise resolved with the list of files. */ async getWikiFileList(wiki: AddonModWikiWiki, options: CoreSitesCommonWSOptions = {}): Promise<CoreWSFile[]> { options.siteId = options.siteId || CoreSites.getCurrentSiteId(); let files: CoreWSFile[] = []; const modOptions = { cmId: wiki.coursemodule, ...options, // Include all options. }; const subwikis = await this.getSubwikis(wiki.id, modOptions); await Promise.all(subwikis.map(async (subwiki) => { const subwikiOptions = { groupId: subwiki.groupid, userId: subwiki.userid, ...modOptions, // Include all options. }; const subwikiFiles = await this.getSubwikiFiles(subwiki.wikiid, subwikiOptions); files = files.concat(subwikiFiles); })); return files; } /** * Gets a list of all pages for a Wiki. * * @param wiki Wiki. * @param options Other options. * @return Page list. */ async getWikiPageList(wiki: AddonModWikiWiki, options: CoreSitesCommonWSOptions = {}): Promise<AddonModWikiSubwikiPage[]> { options.siteId = options.siteId || CoreSites.getCurrentSiteId(); let pages: AddonModWikiSubwikiPage[] = []; const modOptions = { cmId: wiki.coursemodule, ...options, // Include all options. }; const subwikis = await this.getSubwikis(wiki.id, modOptions); await Promise.all(subwikis.map(async (subwiki) => { const subwikiPages = await this.getSubwikiPages(subwiki.wikiid, { groupId: subwiki.groupid, userId: subwiki.userid, ...modOptions, // Include all options. }); pages = pages.concat(subwikiPages); })); return pages; } /** * Invalidate the prefetched content except files. * To invalidate files, use invalidateFiles. * * @param moduleId The module ID. * @param courseId Course ID. * @param siteId Site ID. If not defined, current site. * @return Promise resolved when done. */ async invalidateContent(moduleId: number, courseId: number, siteId?: string): Promise<void> { siteId = siteId || CoreSites.getCurrentSiteId(); const wiki = await this.getWiki(courseId, moduleId, { siteId }); await Promise.all([ this.invalidateWikiData(courseId, siteId), this.invalidateSubwikis(wiki.id, siteId), this.invalidateSubwikiPages(wiki.id, siteId), this.invalidateSubwikiFiles(wiki.id, siteId), ]); } /** * Invalidates page content WS call for a certain page. * * @param pageId Wiki Page ID. * @param siteId Site ID. If not defined, current site. * @return Promise resolved when the data is invalidated. */ async invalidatePage(pageId: number, siteId?: string): Promise<void> { const site = await CoreSites.getSite(siteId); await site.invalidateWsCacheForKey(this.getPageContentsCacheKey(pageId)); } /** * Invalidates all the subwiki files WS calls for a certain wiki. * * @param wikiId Wiki ID. * @param siteId Site ID. If not defined, current site. * @return Promise resolved when the data is invalidated. */ async invalidateSubwikiFiles(wikiId: number, siteId?: string): Promise<void> { const site = await CoreSites.getSite(siteId); await site.invalidateWsCacheForKeyStartingWith(this.getSubwikiFilesCacheKeyPrefix(wikiId)); } /** * Invalidates all the subwiki pages WS calls for a certain wiki. * * @param wikiId Wiki ID. * @param siteId Site ID. If not defined, current site. * @return Promise resolved when the data is invalidated. */ async invalidateSubwikiPages(wikiId: number, siteId?: string): Promise<void> { const site = await CoreSites.getSite(siteId); await site.invalidateWsCacheForKeyStartingWith(this.getSubwikiPagesCacheKeyPrefix(wikiId)); } /** * Invalidates all the get subwikis WS calls for a certain wiki. * * @param wikiId Wiki ID. * @param siteId Site ID. If not defined, current site. * @return Promise resolved when the data is invalidated. */ async invalidateSubwikis(wikiId: number, siteId?: string): Promise<void> { this.clearSubwikiList(wikiId); const site = await CoreSites.getSite(siteId); await site.invalidateWsCacheForKey(this.getSubwikisCacheKey(wikiId)); } /** * Invalidates wiki data. * * @param courseId Course ID. * @param siteId Site ID. If not defined, current site. * @return Promise resolved when the data is invalidated. */ async invalidateWikiData(courseId: number, siteId?: string): Promise<void> { const site = await CoreSites.getSite(siteId); await site.invalidateWsCacheForKey(this.getWikiDataCacheKey(courseId)); } /** * Check if a page title is already used. * * @param wikiId Wiki ID. * @param subwikiId Subwiki ID. * @param title Page title. * @param options Other options. * @return Promise resolved with true if used, resolved with false if not used or cannot determine. */ async isTitleUsed( wikiId: number, subwikiId: number, title: string, options: CoreCourseCommonModWSOptions = {}, ): Promise<boolean> { try { // First get the subwiki. const subwikis = await this.getSubwikis(wikiId, options); // Search the subwiki. const subwiki = subwikis.find((subwiki) => subwiki.id == subwikiId); if (!subwiki) { return false; } // Now get all the pages of the subwiki. const pages = await this.getSubwikiPages(wikiId, { groupId: subwiki.groupid, userId: subwiki.userid, ...options, // Include all options. }); // Check if there's any page with the same title. const page = pages.find((page) => page.title == title); return !!page; } catch { return false; } } /** * Report a wiki page as being viewed. * * @param id Page ID. * @param wikiId Wiki ID. * @param name Name of the wiki. * @param siteId Site ID. If not defined, current site. * @return Promise resolved when the WS call is successful. */ logPageView(id: number, wikiId: number, name?: string, siteId?: string): Promise<void> { const params: AddonModWikiViewPageWSParams = { pageid: id, }; return CoreCourseLogHelper.logSingle( 'mod_wiki_view_page', params, AddonModWikiProvider.COMPONENT, wikiId, name, 'wiki', params, siteId, ); } /** * Report the wiki as being viewed. * * @param id Wiki ID. * @param name Name of the wiki. * @param siteId Site ID. If not defined, current site. * @return Promise resolved when the WS call is successful. */ logView(id: number, name?: string, siteId?: string): Promise<void> { const params: AddonModWikiViewWikiWSParams = { wikiid: id, }; return CoreCourseLogHelper.logSingle( 'mod_wiki_view_wiki', params, AddonModWikiProvider.COMPONENT, id, name, 'wiki', {}, siteId, ); } /** * Create a new page on a subwiki. * * @param title Title to create the page. * @param content Content to save on the page. * @param options Other options. * @return Promise resolved with page ID if page was created in server, -1 if stored in device. */ async newPage(title: string, content: string, options: AddonModWikiNewPageOptions = {}): Promise<number> { options.siteId = options.siteId || CoreSites.getCurrentSiteId(); // Convenience function to store a new page to be synchronized later. const storeOffline = async (): Promise<number> => { if (options.wikiId && options.subwikiId) { // We have wiki ID, check if there's already an online page with this title and subwiki. const used = await CoreUtils.ignoreErrors(this.isTitleUsed(options.wikiId, options.subwikiId, title, { cmId: options.cmId, readingStrategy: CoreSitesReadingStrategy.PREFER_CACHE, siteId: options.siteId, })); if (used) { throw new CoreError(Translate.instant('addon.mod_wiki.pageexists')); } } await AddonModWikiOffline.saveNewPage( title, content, options.subwikiId, options.wikiId, options.userId, options.groupId, options.siteId, ); return -1; }; if (!CoreApp.isOnline()) { // App is offline, store the action. return storeOffline(); } // Discard stored content for this page. If it exists it means the user is editing it. await AddonModWikiOffline.deleteNewPage( title, options.subwikiId, options.wikiId, options.userId, options.groupId, options.siteId, ); try { // Try to create it in online. return this.newPageOnline(title, content, options); } catch (error) { if (CoreUtils.isWebServiceError(error)) { // The WebService has thrown an error, this means that the page cannot be added. throw error; } // Couldn't connect to server, store in offline. return storeOffline(); } } /** * Create a new page on a subwiki. It will fail if offline or cannot connect. * * @param title Title to create the page. * @param content Content to save on the page. * @param options Other options. * @return Promise resolved with the page ID if created, rejected otherwise. */ async newPageOnline(title: string, content: string, options: AddonModWikiNewPageOnlineOptions = {}): Promise<number> { const site = await CoreSites.getSite(options.siteId); const params: AddonModWikiNewPageWSParams = { title: title, content: content, contentformat: 'html', }; const subwikiId = AddonModWikiOffline.convertToPositiveNumber(options.subwikiId); const wikiId = AddonModWikiOffline.convertToPositiveNumber(options.wikiId); if (subwikiId && subwikiId > 0) { params.subwikiid = subwikiId; } else if (wikiId) { params.wikiid = wikiId; params.userid = AddonModWikiOffline.convertToPositiveNumber(options.userId); params.groupid = AddonModWikiOffline.convertToPositiveNumber(options.groupId); } const response = await site.write<AddonModWikiNewPageWSResponse>('mod_wiki_new_page', params); return response.pageid; } /** * Set edited page data. * * @param data Data. */ setEditedPageData(data: AddonModWikiEditedPageData): void { this.editedPage = data; } /** * Save subwiki list for a wiki to the cache. * * @param wikiId Wiki Id. * @param subwikis List of subwikis. * @param count Number of subwikis in the subwikis list. * @param subwikiId Subwiki Id currently selected. * @param userId User Id currently selected. * @param groupId Group Id currently selected. */ setSubwikiList( wikiId: number, subwikis: AddonModWikiSubwikiListGrouping[], count: number, subwikiId: number, userId: number, groupId: number, ): void { this.subwikiListsCache[wikiId] = { count: count, subwikiSelected: subwikiId, userSelected: userId, groupSelected: groupId, subwikis: subwikis, }; } /** * Sort an array of wiki pages by title. * * @param pages Pages to sort. * @param desc True to sort in descendent order, false to sort in ascendent order. Defaults to false. * @return Sorted pages. */ sortPagesByTitle<T extends AddonModWikiSubwikiPage | AddonModWikiPageDBRecord>( pages: T[], desc?: boolean, ): T[] { return pages.sort((a, b) => { let result = a.title >= b.title ? 1 : -1; if (desc) { result = -result; } return result; }); } /** * Check if a wiki has a certain subwiki. * * @param wikiId Wiki ID. * @param subwikiId Subwiki ID to search. * @param options Other options. * @return Promise resolved with true if it has subwiki, resolved with false otherwise. */ async wikiHasSubwiki(wikiId: number, subwikiId: number, options: CoreCourseCommonModWSOptions = {}): Promise<boolean> { try { // Get the subwikis to check if any of them matches the one passed as param. const subwikis = await this.getSubwikis(wikiId, options); const subwiki = subwikis.find((subwiki) => subwiki.id == subwikiId); return !!subwiki; } catch { // Not found, return false. return false; } } /** * If this page is the first opened page for a wiki, remove the stored path so it's no longer the first viewed page. * * @param wikiId Wiki ID. * @param path Path. */ wikiPageClosed(wikiId: number, path: string): void { const tab = CoreNavigator.getMainMenuTabFromPath(path); if (!tab) { return; } this.wikiFirstViewedPage[tab] = this.wikiFirstViewedPage[tab] || {}; if (this.wikiFirstViewedPage[tab][wikiId] === path) { delete this.wikiFirstViewedPage[tab][wikiId]; } } /** * If this page is the first opened page for a wiki, save its path so we can go back to it. * * @param wikiId Wiki ID. * @param path Path. */ wikiPageOpened(wikiId: number, path: string): void { const tab = CoreNavigator.getMainMenuTabFromPath(path); if (!tab) { return; } this.wikiFirstViewedPage[tab] = this.wikiFirstViewedPage[tab] || {}; if (this.wikiFirstViewedPage[tab][wikiId]) { // There's already an opened page for this wiki. return; } this.wikiFirstViewedPage[tab][wikiId] = path; } } export const AddonModWiki = makeSingleton(AddonModWikiProvider); declare module '@singletons/events' { /** * Augment CoreEventsData interface with events specific to this service. * * @see https://www.typescriptlang.org/docs/handbook/declaration-merging.html#module-augmentation */ export interface CoreEventsData { [AddonModWikiProvider.PAGE_CREATED_EVENT]: AddonModWikiPageCreatedData; [AddonModWikiSyncProvider.AUTO_SYNCED]: AddonModWikiAutoSyncData; [AddonModWikiSyncProvider.MANUAL_SYNCED]: AddonModWikiManualSyncData; } } /** * Options to pass to getSubwikiFiles. */ export type AddonModWikiGetSubwikiFilesOptions = CoreCourseCommonModWSOptions & { userId?: number; // User to get files from. groupId?: number; // Group to get files from. }; /** * Options to pass to getSubwikiPages. */ export type AddonModWikiGetSubwikiPagesOptions = CoreCourseCommonModWSOptions & { userId?: number; // User to get pages from. groupId?: number; // Group to get pages from. sortBy?: string; // The attribute to sort the returned list. Defaults to 'title'. sortDirection?: string; // Direction to sort the returned list (ASC | DESC). Defaults to 'ASC'. includeContent?: boolean; // Whether the pages have to include their content. }; /** * Options to pass to newPageOnline. */ export type AddonModWikiNewPageOnlineOptions = { subwikiId?: number; // Subwiki ID. If not defined, wikiId, userId and groupId should be defined. wikiId?: number; // Wiki ID. Optional, will be used to create a new subwiki if subwikiId not supplied. userId?: number; // User ID. Optional, will be used to create a new subwiki if subwikiId not supplied. groupId?: number; // Group ID. Optional, will be used to create a new subwiki if subwikiId not supplied. siteId?: string; // Site ID. If not defined, current site. }; /** * Options to pass to newPage. */ export type AddonModWikiNewPageOptions = AddonModWikiNewPageOnlineOptions & { cmId?: number; // Module ID. }; export type AddonModWikiSubwikiListData = { count: number; // Number of subwikis. subwikiSelected: number; // Subwiki ID currently selected. userSelected: number; // User of the subwiki currently selected. groupSelected: number; // Group of the subwiki currently selected. subwikis: AddonModWikiSubwikiListGrouping[]; // List of subwikis, grouped by a certain label. }; export type AddonModWikiSubwikiListGrouping = { label: string; subwikis: AddonModWikiSubwikiListSubwiki[]; }; export type AddonModWikiSubwikiListSubwiki = { name: string; id: number; userid: number; groupid: number; groupLabel: string; canedit: boolean; }; /** * Params of mod_wiki_edit_page WS. */ export type AddonModWikiEditPageWSParams = { pageid: number; // Page ID. content: string; // Page contents. section?: string; // Section page title. }; /** * Data returned by mod_wiki_edit_page WS. */ export type AddonModWikiEditPageWSResponse = { pageid: number; // Edited page id. warnings?: CoreWSExternalWarning[]; }; /** * Params of mod_wiki_get_page_contents WS. */ export type AddonModWikiGetPageContentsWSParams = { pageid: number; // Page ID. }; /** * Data returned by mod_wiki_get_page_contents WS. */ export type AddonModWikiGetPageContentsWSResponse = { page: AddonModWikiPageContents; // Page. warnings?: CoreWSExternalWarning[]; }; /** * Page data returned by mod_wiki_get_page_contents WS. */ export type AddonModWikiPageContents = { id: number; // Page ID. wikiid: number; // Page's wiki ID. subwikiid: number; // Page's subwiki ID. groupid: number; // Page's group ID. userid: number; // Page's user ID. title: string; // Page title. cachedcontent: string; // Page contents. contentformat?: number; // Cachedcontent format (1 = HTML, 0 = MOODLE, 2 = PLAIN or 4 = MARKDOWN). caneditpage: boolean; // True if user can edit the page. version?: number; // Latest version of the page. tags?: CoreTagItem[]; // Tags. }; /** * Params of mod_wiki_get_page_for_editing WS. */ export type AddonModWikiGetPageForEditingWSParams = { pageid: number; // Page ID to edit. section?: string; // Section page title. lockonly?: boolean; // Just renew lock and not return content. }; /** * Data returned by mod_wiki_get_page_for_editing WS. */ export type AddonModWikiGetPageForEditingWSResponse = { pagesection: AddonModWikiWSEditPageSection; }; /** * Page section data returned by mod_wiki_get_page_for_editing WS. */ export type AddonModWikiWSEditPageSection = { content?: string; // The contents of the page-section to be edited. contentformat?: string; // Format of the original content of the page. version: number; // Latest version of the page. warnings?: CoreWSExternalWarning[]; }; /** * Params of mod_wiki_get_subwiki_files WS. */ export type AddonModWikiGetSubwikiFilesWSParams = { wikiid: number; // Wiki instance ID. groupid?: number; // Subwiki's group ID, -1 means current group. It will be ignored if the wiki doesn't use groups. userid?: number; // Subwiki's user ID, 0 means current user. It will be ignored in collaborative wikis. }; /** * Data returned by mod_wiki_get_subwiki_files WS. */ export type AddonModWikiGetSubwikiFilesWSResponse = { files: CoreWSExternalFile[]; warnings?: CoreWSExternalWarning[]; }; /** * Params of mod_wiki_get_subwiki_pages WS. */ export type AddonModWikiGetSubwikiPagesWSParams = { wikiid: number; // Wiki instance ID. groupid?: number; // Subwiki's group ID, -1 means current group. It will be ignored if the wiki doesn't use groups. userid?: number; // Subwiki's user ID, 0 means current user. It will be ignored in collaborative wikis. options?: { sortby?: string; // Field to sort by (id, title, ...). sortdirection?: string; // Sort direction: ASC or DESC. includecontent?: number; // Include each page contents or just the contents size. }; // Options. }; /** * Data returned by mod_wiki_get_subwiki_pages WS. */ export type AddonModWikiGetSubwikiPagesWSResponse = { pages: AddonModWikiSubwikiPage[]; warnings?: CoreWSExternalWarning[]; }; /** * Page data returned by mod_wiki_get_subwiki_pages WS. */ export type AddonModWikiSubwikiPage = { id: number; // Page ID. subwikiid: number; // Page's subwiki ID. title: string; // Page title. timecreated: number; // Time of creation. timemodified: number; // Time of last modification. timerendered: number; // Time of last renderization. userid: number; // ID of the user that last modified the page. pageviews: number; // Number of times the page has been viewed. readonly: number; // 1 if readonly, 0 otherwise. caneditpage: boolean; // True if user can edit the page. firstpage: boolean; // True if it's the first page. cachedcontent?: string; // Page contents. contentformat?: number; // Cachedcontent format (1 = HTML, 0 = MOODLE, 2 = PLAIN or 4 = MARKDOWN). contentsize?: number; // Size of page contents in bytes (doesn't include size of attached files). tags?: CoreTagItem[]; // Tags. }; /** * Params of mod_wiki_get_subwikis WS. */ export type AddonModWikiGetSubwikisWSParams = { wikiid: number; // Wiki instance ID. }; /** * Data returned by mod_wiki_get_subwikis WS. */ export type AddonModWikiGetSubwikisWSResponse = { subwikis: AddonModWikiSubwiki[]; warnings?: CoreWSExternalWarning[]; }; /** * Subwiki data returned by mod_wiki_get_subwikis WS. */ export type AddonModWikiSubwiki = { id: number; // Subwiki ID. wikiid: number; // Wiki ID. groupid: number; // Group ID. userid: number; // User ID. canedit: boolean; // True if user can edit the subwiki. }; /** * Params of mod_wiki_get_wikis_by_courses WS. */ export type AddonModWikiGetWikisByCoursesWSParams = { courseids?: number[]; // Array of course ids. }; /** * Data returned by mod_wiki_get_wikis_by_courses WS. */ export type AddonModWikiGetWikisByCoursesWSResponse = { wikis: AddonModWikiWiki[]; warnings?: CoreWSExternalWarning[]; }; /** * Wiki data returned by mod_wiki_get_wikis_by_courses WS. */ export type AddonModWikiWiki = { id: number; // Wiki ID. coursemodule: number; // Course module ID. course: number; // Course ID. name: string; // Wiki name. intro?: string; // Wiki intro. introformat?: number; // Wiki intro format. format (1 = HTML, 0 = MOODLE, 2 = PLAIN or 4 = MARKDOWN). introfiles?: CoreWSExternalFile[]; timecreated?: number; // Time of creation. timemodified?: number; // Time of last modification. firstpagetitle?: string; // First page title. wikimode?: string; // Wiki mode (individual, collaborative). defaultformat?: string; // Wiki's default format (html, creole, nwiki). forceformat?: number; // 1 if format is forced, 0 otherwise. editbegin?: number; // Edit begin. editend?: number; // Edit end. section?: number; // Course section ID. visible?: number; // 1 if visible, 0 otherwise. groupmode?: number; // Group mode. groupingid?: number; // Group ID. cancreatepages: boolean; // True if user can create pages. }; /** * Params of mod_wiki_view_page WS. */ export type AddonModWikiViewPageWSParams = { pageid: number; // Wiki page ID. }; /** * Params of mod_wiki_view_wiki WS. */ export type AddonModWikiViewWikiWSParams = { wikiid: number; // Wiki instance ID. }; /** * Params of mod_wiki_new_page WS. */ export type AddonModWikiNewPageWSParams = { title: string; // New page title. content: string; // Page contents. contentformat?: string; // Page contents format. If an invalid format is provided, default wiki format is used. subwikiid?: number; // Page's subwiki ID. wikiid?: number; // Page's wiki ID. Used if subwiki does not exists. userid?: number; // Subwiki's user ID. Used if subwiki does not exists. groupid?: number; // Subwiki's group ID. Used if subwiki does not exists. }; /** * Data returned by mod_wiki_new_page WS. */ export type AddonModWikiNewPageWSResponse = { pageid: number; // New page id. warnings?: CoreWSExternalWarning[]; }; /** * Data passed to PAGE_CREATED event. */ export type AddonModWikiPageCreatedData = { pageId: number; subwikiId: number; pageTitle: string; }; /** * Data about a page that was just edited. */ export type AddonModWikiEditedPageData = { cmId?: number; courseId?: number; wikiId: number; pageTitle: string; subwikiId?: number; userId?: number; groupId?: number; pageId?: number; };
the_stack
import { Action } from "./Action"; import { TypedActionString } from "./TypedActionString"; /** * A central concept to Redoodle, a TypedAction is a stricter flavor of * Action that associates a specific Action type string with a matching payload. * * To use TypedActions: * * 1. Create a Definition, usually through `TypedAction.define()`. For example, * * ``` * export const RemoveBarAction = TypedAction.define("myapp::remove_bar")<{bar: string}>(); * ``` * * 2. Create an Action through `Definition.create()`. For example, * * ``` * const action = RemoveBarAction.create({bar: "three"}); * ``` * * * 3. Dispatch the action as usual, e.g. to a Redux `Store.dispatch`. * * The true benefit of TypedActions come on the Reducer-side. See * the TypedReducer class for more on creating a TypedAction-savvy Reducer for Redux. * * Conforms to Flux Standard Action recommendations. * * @see TypedActionDef#create */ export interface TypedAction<T, E extends string = string> { /** * The type string of the action, used to uniquely identify the Action with its Definition. * * The TypeScript typing of this value is refined to the actual string given to `TypedAction.define()`. */ type: E; /** * The payload associated with the action, whose shape is dictated by the Definition * that generated this action. As allowed by the payload restrictions chosen by the consumer, * this payload could possibly be `undefined` or `null`. * * N.B. A NoPayloadDefinition doesn't actually define this key for generated actions, so such * actions are usually just `{type: "..."}`. */ payload: T; /** * Optional metadata assigned to this action, which has no restrictions. * Interesting usages of metadata: * * 1. To add a timestamp for when the action was first created. * 1. To correlate a set of actions behind a single user event (such as a clickId). * 1. To track progression of an async task, such as a {loading => success} or {loading => error} set. * 1. To identify which actions are being triggered by a continually running job. * */ meta?: any; } export namespace TypedAction { /** * **DEPRECATED**: As of Redoodle 2.5.0, consumers should prefer `defineAction()` * over than `TypedAction.define()`. See https://github.com/palantir/redoodle/issues/35 * * Options to TypedAction.define(). * * @deprecated */ export interface DefineOptions<T> { /** * A function used to validate the (runtime) correctness of payloads attached to a Definition's * actions. This can be useful to track down a noncompliant _source_ of actions, * as otherwise debugging where incorrect actions are coming from on the Reducer side can be challenging. * * Not run in production. */ validate?: (payload: T) => boolean; } /** * **DEPRECATED**: As of Redoodle 2.5.0, consumers should prefer `defineAction()` * over than `TypedAction.define()`. See https://github.com/palantir/redoodle/issues/35 * * One of the core functions of Redoodle, `TypedAction.define` creates a Definition * to manage all Redux actions of a specific type string, such as `"myapp::set_foo_value"`. * * Each Definition also associates a payload type `T` for all of its matching actions. * For example, the `"myapp::set_foo_value"` Action can associate a required payload shape * `{foo: string, value: number}`, which means that all actions in the application * with type `"myapp::set_foo_value"` *must* have payloads with a `foo` and a `value`. * * The syntax for invoking the function is slightly awkward, in favor of more predictable type inferencing. * An example invocation is below; note the extra `()` after the payload type declaration in `<{}>`s. * * * ``` * export const SetFooValueAction = TypedAction.define("myapp::set_foo_value")<{ * foo: string; * value: number; * }>(); * ``` * * * All Definitions for a Redux-enabled application MUST have unique strings. * * @deprecated */ export function define<E extends string>( type: E, ): <T>(options?: DefineOptions<T>) => Definition<E, T> { return <T>(options?: DefineOptions<T>) => { if ( process.env.NODE_ENV !== "production" && options !== undefined && options.validate !== undefined ) { return createDefinitionWithValidator<E, T>(type, options.validate); } else { return createDefinition<E, T>(type); } }; } /** * **DEPRECATED**: As of Redoodle 2.5.0, consumers should prefer `defineAction()` * over than `TypedAction.define()`. See https://github.com/palantir/redoodle/issues/35 * * Similar to TypedAction.define, creates a NoPayloadDefinition for the given Action type * string, like `"example::clear_foo"`. In practice, actions without payloads are * usually of the "clear" or "invalidate" variety. * * The syntax for invoking the function is slightly awkward, in favor of more predictable type inferencing. * An example invocation is below; note the extra pair of `()`, for consistency with its sibling `define` * function and for better future-compatibility of options. * * * ``` * export const SetFooValueAction = TypedAction.defineWithoutPayload("myapp::set_foo_value")(); * ``` * * * All Definitions for a Redux-enabled application MUST have unique strings. * * @deprecated */ export function defineWithoutPayload<E extends string>( type: E, ): () => NoPayloadDefinition<E> { return () => { return createNoPayloadDefinition<E>(type); }; } /** * A central type of Redoodle, the TypedAction.Definition manages all Redux Actions * of a specific type string, such as `"myapp::set_foo_value"`. * * - Definitions should be used to create Actions. * - Definitions can be used to identify an Action, based on its own `type`. * * All Definitions for a Redux-enabled application MUST have unique strings. */ export interface Definition<E extends string, T> { /** * Creates an Action of this type with the given payload. * Functionally equivalent to the explicit Definition.create(). */ (payload: T): { type: E; payload: T }; /** * The Type of a TypedAction refers to the physical `{type}` string * given to matching Actions. This TypedActionString is branded * with the payload type as well for e.g. TypedReducer type inferencing. */ TYPE: TypedActionString<T, E>; /** * Hidden field used for some workflows that need to extract the payload type back out of * a TypedAction definition. For example, `const payload: typeof MyAction.__PAYLOAD = { ... };` * can be used to define a payload conforming to MyAction. * * This value should only be used for constructing Types in TypeScript. It never holds a real value. * Future versions of Redoodle may throw when attempting accessing this value at runtime * to catch accidental misuse. */ __PAYLOAD: T; /** * Creates an Action of this type with the given payload. */ create(payload: T): { type: E; payload: T }; /** * Creates an Action of this type with the given payload and meta. */ createWithMeta<M>(payload: T, meta: M): { type: E; payload: T; meta: M }; /** * Checks whether the given Action matches this Definition, based on its own `type`. * If so, we can safely narrow the Action's payload type based on this Definition. * * While this function can be used for action identification while Reducing, * TypedReducers provide much stronger utilities when working with TypedActions. */ is(action: Action): action is TypedAction<T, E>; } export type PayloadOf< D extends Definition<any, any> > = D["TYPE"]["__type__"]["withPayload"]; /** * A TypedAction.NoPayloadDefinition manages all Redux actions of a specific type string, * such as `"myapp::clear_foo"`. Unlike the sibling TypedAction.Definition, * actions matching this Definition are associated with no payload data. In practice, * actions without payloads are usually of the "clear" or "invalidate" variety. * * - Definitions should be used to create Actions. * - Definitions can be used to identify an Action, based on its own `type`. * * All Definitions for a Redux-enabled application MUST have unique strings. */ export interface NoPayloadDefinition<E extends string> { /** * Creates an Action of this type (and no payload). * Functionally equivalent to the explicit NoPayloadDefinition.create(). */ (): { type: E; payload: never }; /** * The Type of a TypedAction refers to the physical `{type}` string * given to matching Actions. This TypedActionString is branded * with the payload type as well for e.g. TypedReducer type inferencing. */ TYPE: TypedActionString<never, E>; /** * Creates an Action of this type (and no payload). */ create(): { type: E; payload: never }; /** * Creates an Action of this type with the given meta (and no payload). */ createWithMeta<M>(meta: M): { type: E; payload: never; meta: M }; /** * Checks whether the given Action matches this Definition, based on its own `type`. * If so, we can safely narrow the Action's payload type based on this Definition. * * While this function can be used for action identification while Reducing, * TypedReducers provide much stronger utilities when working with TypedActions. */ is(action: Action): action is TypedAction<never, E>; } function createDefinition<E extends string, T>(type: E): Definition<E, T> { const create = (payload: T): { type: E; payload: T } => { return { type, payload }; }; const createWithMeta = <M>( payload: T, meta: M, ): { type: E; payload: T; meta: M } => { return { type, payload, meta }; }; const is = (action: Action): action is TypedAction<T, E> => { return action.type === type; }; const def = create as Definition<E, T>; def.create = create; def.createWithMeta = createWithMeta; def.is = is; def.TYPE = type as TypedActionString<T, E>; return def; } function createDefinitionWithValidator<E extends string, T>( type: E, validate: (payload: T) => boolean, ): Definition<E, T> { const create = (payload: T): { type: E; payload: T } => { if (!validate(payload)) { throw new Error(`'${type}' validation failed`); } return { type, payload }; }; const createWithMeta = <M>( payload: T, meta: M, ): { type: E; payload: T; meta: M } => { if (!validate(payload)) { throw new Error(`'${type}' validation failed`); } return { type, payload, meta }; }; const is = (action: Action): action is TypedAction<T, E> => { return action.type === type; }; const def = create as Definition<E, T>; def.create = create; def.createWithMeta = createWithMeta; def.is = is; def.TYPE = type as TypedActionString<T, E>; return def; } function createNoPayloadDefinition<E extends string>( type: E, ): NoPayloadDefinition<E> { const create = (): { type: E; payload: never } => { return { type } as { type: E; payload: never }; }; const createWithMeta = <M>( meta: M, ): { type: E; payload: never; meta: M } => { return { type, meta } as { type: E; payload: never; meta: M }; }; const is = (action: Action): action is TypedAction<never, E> => { return action.type === type; }; const def = create as NoPayloadDefinition<E>; def.create = create; def.createWithMeta = createWithMeta; def.is = is; def.TYPE = type as TypedActionString<never, E>; return def; } }
the_stack
import { exec } from 'child_process'; import { promisify } from 'util'; import merge from 'lodash/merge'; import find from 'lodash/find'; import flatten from 'lodash/flatten'; import { luImportResolverGenerator, ResolverResource, DialogSetting } from '@bfc/shared'; import extractMemoryPaths from '@bfc/indexers/lib/dialogUtils/extractMemoryPaths'; import { UserIdentity } from '@bfc/extension'; import { ensureDir, existsSync, remove } from 'fs-extra'; import { Request } from 'express'; import formatMessage from 'format-message'; import AssetService from '../services/asset'; import { BotProject } from '../models/bot/botProject'; import { LocationRef } from '../models/bot/interface'; import { Store } from '../store/store'; import log from '../logger'; import { ExtensionContext } from '../models/extension/extensionContext'; import { getLocationRef, getNewProjRef, ejectAndMerge } from '../utility/project'; import { isSchema } from '../models/bot/botStructure'; import { getLatestGeneratorVersion } from '../controllers/asset'; import StorageService from './storage'; import { Path } from './../utility/path'; import { BackgroundProcessManager } from './backgroundProcessManager'; import { TelemetryService } from './telemetry'; const execAsync = promisify(exec); const MAX_RECENT_BOTS = 7; /** Metadata stored by Composer and associated by internal bot project id */ export type BotProjectMetadata = { alias?: string; eTag?: string; path: string; }; export type BotProjectLocationMap = Record<string, BotProjectMetadata>; /** Converts old bot project location maps to the new shape */ function fixOldBotProjectMapEntries( projectMap: BotProjectLocationMap | { [key: string]: string } ): BotProjectLocationMap { const map: BotProjectLocationMap = {}; for (const botId in projectMap) { const entry = projectMap[botId]; if (typeof entry === 'string') { map[botId] = { path: entry, eTag: '', }; } else { map[botId] = entry; } } return map; } const isFunctionsRuntimeInstalled = async (): Promise<boolean> => { try { const { stderr: funcErr } = await execAsync(`func -v`); return !funcErr; } catch (err) { return false; } }; export class BotProjectService { private static currentBotProjects: BotProject[] = []; private static recentBotProjects: LocationRef[] = []; private static projectLocationMap: BotProjectLocationMap; private static initialize() { if (!BotProjectService.recentBotProjects || BotProjectService.recentBotProjects.length === 0) { BotProjectService.recentBotProjects = Store.get('recentBotProjects', []); } if (!BotProjectService.projectLocationMap || Object.keys(BotProjectService.projectLocationMap).length === 0) { BotProjectService.projectLocationMap = fixOldBotProjectMapEntries(Store.get('projectLocationMap', {})); } } public static getLgResources(projectId?: string): ResolverResource[] { BotProjectService.initialize(); const project = BotProjectService.getIndexedProjectById(projectId); if (!project) throw new Error('project not found'); const resources = project.lgFiles.map((file) => { const { name, content } = file; return { id: Path.basename(name, '.lg'), content }; }); return resources; } public static luImportResolver(source: string, id: string, projectId: string): ResolverResource { BotProjectService.initialize(); const project = BotProjectService.getIndexedProjectById(projectId); if (!project) throw new Error('project not found'); const resource = project.luFiles.map((file) => { const { name, content } = file; return { id: Path.basename(name, '.lu'), content }; }); const resolver = luImportResolverGenerator(resource, '.lu'); return resolver(source, id); } public static staticMemoryResolver(projectId: string): string[] { const defaultProperties = [ 'this.value', 'this.turnCount', 'this.options', 'dialog.eventCounter', 'dialog.expectedProperties', 'dialog.lastEvent', 'dialog.requiredProperties', 'dialog.retries', 'dialog.lastIntent', 'dialog.lastTriggerEvent', 'turn.lastresult', 'turn.recognized', 'turn.recognized.intent', 'turn.recognized.score', 'turn.recognized.text', 'turn.recognized.alteredText', 'turn.recognized.entities', 'turn.recognized.intents', 'turn.unrecognizedText', 'turn.recognizedEntities', 'turn.interrupted', 'turn.dialogEvent', 'turn.repeatedIds', 'turn.activityProcessed', 'turn.activity.type', 'turn.activity.timestamp', 'turn.activity.localTimestamp', 'turn.activity.localTimezone', 'turn.activity.from', 'turn.activity.recipient', 'turn.activity.topicName', 'turn.activity.locale', 'turn.activity.text', 'turn.activity.speak', 'turn.activity.summary', 'turn.activity.suggestedActions', 'turn.activity.attachments', 'turn.activity.entities', 'turn.activity.action', 'turn.activity.name', 'turn.activity.value', 'turn.activity.importance', 'turn.activity.semanticAction', ]; const projectVariables = BotProjectService.getIndexedProjectById(projectId)?.dialogFiles.map(({ content }) => { const dialogJson = JSON.parse(content); return extractMemoryPaths(dialogJson); }) || []; const userDefined: string[] = flatten(projectVariables); return [...defaultProperties, ...userDefined]; } public static staticEntityResolver(projectId: string): string[] | undefined { const contents = BotProjectService.getIndexedProjectById(projectId)?.luFiles.map((file) => file.content); return flatten(contents); } public static getCurrentBotProject(): BotProject | undefined { throw new Error('getCurrentBotProject is DEPRECATED'); } public static getProjectsDateModifiedDict = async ( projects: LocationRef[], user?: UserIdentity ): Promise<{ dateModified: string; path: string }[]> => { const dateModifiedDict: { dateModified: string; path: string }[] = []; const promises = projects.map(async (project) => { let dateModified = ''; try { dateModified = await StorageService.getBlobDateModified(project.storageId, project.path, user); dateModifiedDict.push({ dateModified, path: project.path }); } catch (err) { log(err); } }); await Promise.all(promises); return dateModifiedDict; }; public static getRecentBotProjects = async (user?: UserIdentity) => { BotProjectService.initialize(); const dateModifiedDict = await BotProjectService.getProjectsDateModifiedDict( BotProjectService.recentBotProjects, user ); const allRecentBots = BotProjectService.recentBotProjects; // Filter the bot projects that don't exist anymore. for (const locationRef of allRecentBots) { if (!(await StorageService.checkBlob(locationRef.storageId ?? 'default', locationRef.path, user))) { BotProjectService.deleteRecentProject(locationRef.path); } } const recentBots = allRecentBots .filter((bot) => !Path.basename(bot.path).includes('.botproj')) .map((bot) => ({ ...bot, name: Path.basename(bot.path), })); return recentBots.map((item) => { return merge(item, find(dateModifiedDict, { path: item.path })); }); }; public static deleteProject = async (projectId: string): Promise<string> => { const projectLoc = BotProjectService.projectLocationMap[projectId]; if (!projectLoc) { // no-op return ''; } else { const { path = '' } = projectLoc; BotProjectService.removeProjectIdFromCache(projectId); return path; } }; public static openProject = async ( locationRef: LocationRef, user?: UserIdentity, isRootBot?: boolean, options?: { allowPartialBots: boolean } ): Promise<string> => { BotProjectService.initialize(); // TODO: this should be refactored or moved into the BotProject constructor so that it can use user auth amongst other things if (!(await StorageService.checkBlob(locationRef.storageId, locationRef.path, user))) { BotProjectService.deleteRecentProject(locationRef.path); throw new Error(`file ${locationRef.path} does not exist`); } if ( !options?.allowPartialBots && !(await StorageService.checkIsBotFolder(locationRef.storageId, locationRef.path, user)) ) { throw new Error(`${locationRef.path} is not a bot project folder`); } for (const key in BotProjectService.projectLocationMap) { const projectLoc = BotProjectService.projectLocationMap[key]; if (projectLoc && projectLoc.path === locationRef.path) { // TODO: this should probably move to getProjectById if (isRootBot) BotProjectService.addRecentProject(locationRef.path); return key; } } // generate an id and store it in the projectLocationMap const projectId = await BotProjectService.generateProjectId(locationRef.path); if (isRootBot) BotProjectService.addRecentProject(locationRef.path); Store.set('projectLocationMap', BotProjectService.projectLocationMap); return projectId.toString(); }; // clean project registry based on path to avoid reuseing the same id public static cleanProject = async (location: LocationRef): Promise<void> => { for (const key in BotProjectService.projectLocationMap) { const projectLoc = BotProjectService.projectLocationMap[key]; if (projectLoc && projectLoc.path === location.path) { delete BotProjectService.projectLocationMap[key]; } } Store.set('projectLocationMap', BotProjectService.projectLocationMap); }; public static generateProjectId = async (path: string): Promise<string> => { const projectId = (Math.random() * 100000).toString(); BotProjectService.projectLocationMap[projectId] = { eTag: '', path }; return projectId; }; private static removeProjectIdFromCache = (projectId: string): void => { delete BotProjectService.projectLocationMap[projectId]; Store.set('projectLocationMap', BotProjectService.projectLocationMap); }; public static getIndexedProjectById(projectId): BotProject | undefined { // use indexed project const indexedCurrentProject = BotProjectService.currentBotProjects.find(({ id }) => id === projectId); if (indexedCurrentProject) return indexedCurrentProject; } public static getProjectIdByPath = async (path: string) => { for (const key in BotProjectService.projectLocationMap) { const projectLoc = BotProjectService.projectLocationMap[key]; if (projectLoc && projectLoc.path === path) { return key; } } return null; }; public static getProjectById = async (projectId: string, user?: UserIdentity): Promise<BotProject> => { BotProjectService.initialize(); const projectLoc = BotProjectService.projectLocationMap[projectId]; if (!projectLoc || projectLoc.path == null) { throw new Error(`project ${projectId} not found in cache`); } else { const { eTag, path } = projectLoc; // check to make sure the project is still there! if (!(await StorageService.checkBlob('default', path, user))) { BotProjectService.deleteRecentProject(path); BotProjectService.removeProjectIdFromCache(projectId); throw new Error(`${path} doesn't seem to be exist any longer`); } const project = new BotProject({ storageId: 'default', path: path }, user, eTag); await project.init(); project.id = projectId; // update current indexed bot projects BotProjectService.updateCurrentProjects(project); return project; } }; public static setProjectLocationData(projectId: string, data: Partial<BotProjectMetadata>): void { const projectLoc = BotProjectService.projectLocationMap[projectId]; if (projectLoc) { // filter out undefined values for (const key in data) { if (data[key] === undefined) { delete data[key]; } } log('Updating project location data for %s: %O', projectId, data); BotProjectService.projectLocationMap[projectId] = { ...projectLoc, ...data, }; Store.set('projectLocationMap', BotProjectService.projectLocationMap); } } public static getProjectByAlias = async (alias: string, user?: UserIdentity): Promise<BotProject | undefined> => { BotProjectService.initialize(); let matchingProjectId; for (const projectId in BotProjectService.projectLocationMap) { const info = BotProjectService.projectLocationMap[projectId]; if (info.alias && info.alias === alias) { matchingProjectId = projectId; break; } } if (matchingProjectId) { const { eTag, path } = BotProjectService.projectLocationMap[matchingProjectId]; if (path == null) { throw new Error(`project ${matchingProjectId} not found in cache`); } else { // check to make sure the project is still there! if (!(await StorageService.checkBlob('default', path, user))) { BotProjectService.deleteRecentProject(path); BotProjectService.removeProjectIdFromCache(matchingProjectId); throw new Error(`${path} doesn't seem to be exist any longer`); } const project = new BotProject({ storageId: 'default', path: path }, user, eTag); await project.init(); project.id = matchingProjectId; // update current indexed bot projects BotProjectService.updateCurrentProjects(project); return project; } } else { // no match found return undefined; } }; public static setProjectAlias = (projectId: string, alias: string): void => { BotProjectService.setProjectLocationData(projectId, { alias }); }; private static updateCurrentProjects = (project: BotProject): void => { const { id } = project; const idx = BotProjectService.currentBotProjects.findIndex((item) => item.id === id); if (idx > -1) { BotProjectService.currentBotProjects.splice(idx, 1); } BotProjectService.currentBotProjects.unshift(project); if (BotProjectService.currentBotProjects.length > MAX_RECENT_BOTS) { BotProjectService.currentBotProjects = BotProjectService.currentBotProjects.slice(0, MAX_RECENT_BOTS); } }; private static addRecentProject = (path: string): void => { const currDir = Path.resolve(path); const idx = BotProjectService.recentBotProjects.findIndex((ref) => currDir === Path.resolve(ref.path)); if (idx > -1) { BotProjectService.recentBotProjects.splice(idx, 1); } const toSaveRecentProject = { storageId: 'default', path: currDir }; BotProjectService.recentBotProjects.unshift(toSaveRecentProject); // remove LRU bot project if (BotProjectService.recentBotProjects.length > MAX_RECENT_BOTS) { BotProjectService.recentBotProjects = BotProjectService.recentBotProjects.slice(0, MAX_RECENT_BOTS); } Store.set('recentBotProjects', BotProjectService.recentBotProjects); }; public static deleteRecentProject = (path: string): void => { const recentBotProjects = BotProjectService.recentBotProjects.filter( (ref) => Path.resolve(path) !== Path.resolve(ref.path) ); BotProjectService.recentBotProjects = recentBotProjects; Store.set('recentBotProjects', recentBotProjects); }; public static saveProjectAs = async ( sourceProject: BotProject, locationRef: LocationRef, user?: UserIdentity ): Promise<string> => { BotProjectService.initialize(); if (typeof sourceProject !== 'undefined') { await sourceProject.copyTo(locationRef, user); const projectId = await BotProjectService.generateProjectId(locationRef.path); BotProjectService.addRecentProject(locationRef.path); return projectId; } else { return ''; } }; public static backupProject = async (project: BotProject): Promise<string> => { try { // ensure there isn't an older backup directory hanging around const projectDirName = Path.basename(project.dir); const backupPath = Path.join(process.env.COMPOSER_BACKUP_DIR as string, `${projectDirName}.${Date.now()}`); await ensureDir(process.env.COMPOSER_BACKUP_DIR as string); if (existsSync(backupPath)) { log('%s already exists. Deleting before backing up.', backupPath); await remove(backupPath); log('Existing backup folder deleted successfully.'); } // clone the bot project to the backup directory const location: LocationRef = { storageId: 'default', path: backupPath, }; log('Backing up project at %s to %s', project.dir, backupPath); await project.cloneFiles(location); log('Project backed up successfully.'); return location.path; } catch (e) { throw new Error(`Failed to backup project ${project.id}: ${e}`); } }; public static async migrateProjectAsync(req: Request, jobId: string) { const { oldProjectId, name, description, location, storageId, runtimeType, runtimeLanguage } = req.body; const user = await ExtensionContext.getUserFromRequest(req); try { const locationRef = getLocationRef(location, storageId, name); await BotProjectService.cleanProject(locationRef); log('Downloading adaptive generator'); // Update status for polling BackgroundProcessManager.updateProcess(jobId, 202, formatMessage('Getting template')); const baseGenerator = '@microsoft/generator-bot-adaptive'; const latestVersion = await getLatestGeneratorVersion(baseGenerator); log(`Using version ${latestVersion} of ${baseGenerator} for migration`); const newProjRef = await AssetService.manager.copyRemoteProjectTemplateToV2( baseGenerator, latestVersion, // use the @latest version name, locationRef, jobId, runtimeType, runtimeLanguage, { applicationSettingsDirectory: 'settings', }, user ); // update project ref to point at newly created folder newProjRef.path = `${newProjRef.path}/${name}`; BackgroundProcessManager.updateProcess(jobId, 202, formatMessage('Migrating data')); log('Migrating files...'); const originalProject = await BotProjectService.getProjectById(oldProjectId, user); if (originalProject.settings) { const originalFiles = originalProject.getProject().files; // pass in allowPartialBots = true so that this project can be opened even though // it doesn't yet have a root dialog... const id = await BotProjectService.openProject(newProjRef, user, true, { allowPartialBots: true }); const currentProject = await BotProjectService.getProjectById(id, user); // add all original files to new project for (let f = 0; f < originalFiles.length; f++) { // exclude the schema files, so we start from scratch if (!isSchema(originalFiles[f].name)) { await currentProject.migrateFile( originalFiles[f].name, originalFiles[f].content, originalProject.rootDialogId ); } } const newSettings: DialogSetting = { ...currentProject.settings, runtimeSettings: { components: [], features: { showTyping: originalProject.settings?.feature?.UseShowTypingMiddleware || false, useInspection: originalProject.settings?.feature?.UseInspectionMiddleware || false, removeRecipientMentions: originalProject.settings?.feature?.RemoveRecipientMention || false, setSpeak: originalProject.settings?.feature?.useSetSpeakMiddleware ? { voiceFontName: 'en-US-AriaNeural', fallbackToTextForSpeechIfEmpty: true } : undefined, blobTranscript: originalProject.settings?.blobStorage?.connectionString ? { connectionString: originalProject.settings.blobStorage.connectionString, containerName: originalProject.settings.blobStorage.container, } : {}, }, telemetry: { options: { instrumentationKey: originalProject.settings?.applicationInsights?.InstrumentationKey }, }, skills: { allowedCallers: originalProject.settings?.skillConfiguration?.allowedCallers, }, storage: originalProject.settings?.cosmosDb?.authKey ? 'CosmosDbPartitionedStorage' : undefined, }, CosmosDbPartitionedStorage: originalProject.settings?.cosmosDb?.authKey ? originalProject.settings.cosmosDb : undefined, luis: { ...originalProject.settings.luis }, luFeatures: { ...originalProject.settings.luFeatures }, publishTargets: originalProject.settings.publishTargets?.map((target) => { if (target.type === 'azureFunctionsPublish') target.type = 'azurePublish'; return target; }), qna: { ...originalProject.settings.qna }, downsampling: { ...originalProject.settings.downsampling }, skill: { ...originalProject.settings.skill }, speech: { ...originalProject.settings.speech }, defaultLanguage: originalProject.settings.defaultLanguage, languages: originalProject.settings.languages, customFunctions: originalProject.settings.customFunctions ?? [], importedLibraries: [], MicrosoftAppId: originalProject.settings.MicrosoftAppId, runtime: currentProject.settings?.runtime ? { ...currentProject.settings.runtime } : { customRuntime: true, path: '../', key: 'adaptive-runtime-dotnet-webapp', command: `dotnet run --project ${name}.csproj`, }, }; log('Update settings...'); // adjust settings from old format to new format await currentProject.updateEnvSettings(newSettings); log('Copy boilerplate...'); await AssetService.manager.copyBoilerplate(currentProject.dataDir, currentProject.fileStorage); log('Update bot info...'); await currentProject.updateBotInfo(name, description, true); const runtime = ExtensionContext.getRuntimeByProject(currentProject); const runtimePath = currentProject.getRuntimePath(); if (runtimePath) { // install all dependencies and build the app BackgroundProcessManager.updateProcess(jobId, 202, formatMessage('Building runtime')); log('Build new runtime...'); await runtime.build(runtimePath, currentProject); } await ejectAndMerge(currentProject, jobId); const project = currentProject.getProject(); log('Project created successfully.'); BackgroundProcessManager.updateProcess(jobId, 200, 'Migrated successfully', { id, ...project, }); } else { BackgroundProcessManager.updateProcess(jobId, 500, 'Could not find source project to migrate.'); } } catch (err) { BackgroundProcessManager.updateProcess(jobId, 500, err instanceof Error ? err.message : err, err); TelemetryService.trackEvent('CreateNewBotProjectCompleted', { template: '@microsoft/generator-microsoft-bot-adaptive', status: 500, }); } } public static async createProjectAsync(req: Request, jobId: string) { const { templateId, templateVersion, name, description, storageId, location, preserveRoot, templateDir, eTag, alias, locale, schemaUrl, runtimeType, runtimeLanguage, isLocalGenerator, isRoot: creatingRootBot = true, } = req.body; // get user from request const user = await ExtensionContext.getUserFromRequest(req); const createFromPva = !!templateDir; // populate template if none was passed if (templateId === '') { // TODO: Replace with default template once one is determined throw Error('empty templateID passed'); } // test for required dependencies if (runtimeType === 'functions') { if (!(await isFunctionsRuntimeInstalled())) { BackgroundProcessManager.updateProcess(jobId, 500, formatMessage('Azure Functions runtime not installed.')); TelemetryService.trackEvent('CreateNewBotProjectFailed', { reason: 'Azure Functions runtime not installed.', template: templateId, status: 500, }); return; } } // location to store the bot project const locationRef = getLocationRef(location, storageId, name); try { await BotProjectService.cleanProject(locationRef); // Update status for polling BackgroundProcessManager.updateProcess(jobId, 202, formatMessage('Getting template')); const newProjRef = createFromPva ? await getNewProjRef(templateDir, templateId, locationRef, user, locale) : await AssetService.manager.copyRemoteProjectTemplateToV2( templateId, templateVersion, name, locationRef, jobId, runtimeType, runtimeLanguage, null, user, isLocalGenerator ); BackgroundProcessManager.updateProcess(jobId, 202, formatMessage('Bot files created')); const botsToProcess: { storageId: string; path: string; name: string }[] = []; // The outcome of our creation might be > 1 bot! We need to determine how many bots we find in this folder. // is this a single bot? if (await StorageService.checkIsBotFolder(newProjRef.storageId, newProjRef.path, user)) { botsToProcess.push({ ...newProjRef, name }); } else { // or multiple bots? const files = await StorageService.getBlob(newProjRef.storageId, newProjRef.path, user); const childbots = files.children.filter((f) => f.type === 'bot'); childbots.forEach((b) => { botsToProcess.push({ storageId: newProjRef.storageId, path: b.path, name: b.name, }); }); } await Promise.all( botsToProcess.map((botRef) => { // eslint-disable-next-line no-async-promise-executor return new Promise(async (resolve, reject) => { try { log('Open project', botRef); const id = await BotProjectService.openProject(botRef, user, false); // in the case of remote project, we need to update the eTag and alias used by the import mechanism BotProjectService.setProjectLocationData(id, { alias, eTag }); log('Get Project by Id', id); const currentProject = await BotProjectService.getProjectById(id, user); // inject shared content into every new project. this comes from assets/shared !createFromPva && (await AssetService.manager.copyBoilerplate(currentProject.dataDir, currentProject.fileStorage)); if (currentProject !== undefined) { !createFromPva && (await ejectAndMerge(currentProject, jobId)); BackgroundProcessManager.updateProcess(jobId, 202, formatMessage('Initializing bot project')); log('Updatebot info', id, preserveRoot); await currentProject.updateBotInfo(botRef.name, description, true); if (schemaUrl && !createFromPva) { await currentProject.saveSchemaToProject(schemaUrl, botRef.path); } log('Init project', id); await currentProject.init(); } resolve(id); } catch (err) { return reject(err); } }); }) ); const rootBot = botsToProcess.find((b) => b.name === name); if (rootBot) { const id = await BotProjectService.openProject( { storageId: rootBot?.storageId, path: rootBot.path }, user, creatingRootBot ); const currentProject = await BotProjectService.getProjectById(id, user); const project = currentProject.getProject(); log('Project created successfully.'); BackgroundProcessManager.updateProcess(jobId, 200, 'Created Successfully', { id, ...project, }); TelemetryService.trackEvent('CreateNewBotProjectCompleted', { template: templateId, status: 200 }); } else { throw new Error('Could not find root bot'); } } catch (err) { // Clean up failed projects log('Cleaning up failed project at ', locationRef.path); const storage = StorageService.getStorageClient(locationRef.storageId, user); await storage.rmrfDir(locationRef.path); BackgroundProcessManager.updateProcess(jobId, 500, err instanceof Error ? err.message : err, err); TelemetryService.trackEvent('CreateNewBotProjectFailed', { reason: err instanceof Error ? err.message : err, template: templateId, status: 500, }); } } }
the_stack
import { blockchainTests, constants, expect, getRandomInteger, getRandomPortion, randomAddress, } from '@0x/contracts-test-utils'; import { SignatureType } from '@0x/protocol-utils'; import { BigNumber, hexUtils, NULL_BYTES } from '@0x/utils'; import * as _ from 'lodash'; import { FillQuoteTransformerOrderType, LimitOrderFields } from '../../src'; import { SamplerCallResult, SignedNativeOrder } from '../../src/types'; import { artifacts } from '../artifacts'; import { DummyLiquidityProviderContract, TestERC20BridgeSamplerContract } from '../wrappers'; // tslint:disable: custom-no-magic-numbers const { NULL_ADDRESS } = constants; // HACK(dorothy-zbornak): Disabled because these tests are flakey and all this logic is moving to // the sampler service anyway. blockchainTests.skip('erc20-bridge-sampler', env => { let testContract: TestERC20BridgeSamplerContract; const RATE_DENOMINATOR = constants.ONE_ETHER; const MIN_RATE = new BigNumber('0.01'); const MAX_RATE = new BigNumber('100'); const MIN_DECIMALS = 4; const MAX_DECIMALS = 20; const WETH_ADDRESS = '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'; const KYBER_SALT = '0x0ff3ca9d46195c39f9a12afb74207b4970349fb3cfb1e459bbf170298d326bc7'; const UNISWAP_BASE_SALT = '0x1d6a6a0506b0b4a554b907a4c29d9f4674e461989d9c1921feb17b26716385ab'; const UNISWAP_V2_SALT = '0xadc7fcb33c735913b8635927e66896b356a53a912ab2ceff929e60a04b53b3c1'; const INVALID_TOKEN_PAIR_ERROR = 'ERC20BridgeSampler/INVALID_TOKEN_PAIR'; const MAKER_TOKEN = randomAddress(); const TAKER_TOKEN = randomAddress(); const INTERMEDIATE_TOKEN = randomAddress(); const KYBER_RESERVE_OFFSET = new BigNumber(0); let KYBER_ADDRESS = ''; let UNISWAP_ADDRESS = ''; let UNISWAP_V2_ROUTER = ''; before(async () => { testContract = await TestERC20BridgeSamplerContract.deployFrom0xArtifactAsync( artifacts.TestERC20BridgeSampler, env.provider, { ...env.txDefaults, gas: 100e6 }, {}, ); UNISWAP_V2_ROUTER = await testContract.uniswapV2Router().callAsync(); KYBER_ADDRESS = await testContract.kyber().callAsync(); UNISWAP_ADDRESS = await testContract.uniswap().callAsync(); }); function getPackedHash(...args: string[]): string { return hexUtils.hash(hexUtils.concat(...args.map(a => hexUtils.toHex(a)))); } function getUniswapExchangeSalt(tokenAddress: string): string { return getPackedHash(UNISWAP_BASE_SALT, tokenAddress); } function getDeterministicRate(salt: string, sellToken: string, buyToken: string): BigNumber { const hash = getPackedHash(salt, sellToken, buyToken); const _minRate = RATE_DENOMINATOR.times(MIN_RATE); const _maxRate = RATE_DENOMINATOR.times(MAX_RATE); return new BigNumber(hash) .mod(_maxRate.minus(_minRate)) .plus(_minRate) .div(RATE_DENOMINATOR); } function getDeterministicTokenDecimals(token: string): number { if (token === WETH_ADDRESS) { return 18; } // HACK(dorothy-zbornak): Linter will complain about the addition not being // between two numbers, even though they are. // tslint:disable-next-line restrict-plus-operands return new BigNumber(getPackedHash(token)).mod(MAX_DECIMALS - MIN_DECIMALS).toNumber() + MIN_DECIMALS; } function getDeterministicSellQuote( salt: string, sellToken: string, buyToken: string, sellAmount: BigNumber, ): BigNumber { const sellBase = new BigNumber(10).pow(getDeterministicTokenDecimals(sellToken)); const buyBase = new BigNumber(10).pow(getDeterministicTokenDecimals(buyToken)); const rate = getDeterministicRate(salt, sellToken, buyToken); return sellAmount .times(rate) .times(buyBase) .dividedToIntegerBy(sellBase); } function getDeterministicBuyQuote( salt: string, sellToken: string, buyToken: string, buyAmount: BigNumber, ): BigNumber { const sellBase = new BigNumber(10).pow(getDeterministicTokenDecimals(sellToken)); const buyBase = new BigNumber(10).pow(getDeterministicTokenDecimals(buyToken)); const rate = getDeterministicRate(salt, sellToken, buyToken); return buyAmount .times(sellBase) .dividedToIntegerBy(rate) .dividedToIntegerBy(buyBase); } function areAddressesEqual(a: string, b: string): boolean { return a.toLowerCase() === b.toLowerCase(); } function getDeterministicUniswapSellQuote(sellToken: string, buyToken: string, sellAmount: BigNumber): BigNumber { if (areAddressesEqual(buyToken, WETH_ADDRESS)) { return getDeterministicSellQuote(getUniswapExchangeSalt(sellToken), sellToken, WETH_ADDRESS, sellAmount); } if (areAddressesEqual(sellToken, WETH_ADDRESS)) { return getDeterministicSellQuote(getUniswapExchangeSalt(buyToken), buyToken, WETH_ADDRESS, sellAmount); } const ethBought = getDeterministicSellQuote( getUniswapExchangeSalt(sellToken), sellToken, WETH_ADDRESS, sellAmount, ); return getDeterministicSellQuote(getUniswapExchangeSalt(buyToken), buyToken, WETH_ADDRESS, ethBought); } function getDeterministicUniswapBuyQuote(sellToken: string, buyToken: string, buyAmount: BigNumber): BigNumber { if (areAddressesEqual(buyToken, WETH_ADDRESS)) { return getDeterministicBuyQuote(getUniswapExchangeSalt(sellToken), WETH_ADDRESS, sellToken, buyAmount); } if (areAddressesEqual(sellToken, WETH_ADDRESS)) { return getDeterministicBuyQuote(getUniswapExchangeSalt(buyToken), WETH_ADDRESS, buyToken, buyAmount); } const ethSold = getDeterministicBuyQuote(getUniswapExchangeSalt(buyToken), WETH_ADDRESS, buyToken, buyAmount); return getDeterministicBuyQuote(getUniswapExchangeSalt(sellToken), WETH_ADDRESS, sellToken, ethSold); } function getDeterministicSellQuotes( sellToken: string, buyToken: string, sources: string[], sampleAmounts: BigNumber[], ): BigNumber[][] { const quotes: BigNumber[][] = []; for (const source of sources) { const sampleOutputs = []; for (const amount of sampleAmounts) { if (source === 'Kyber') { sampleOutputs.push(getDeterministicSellQuote(KYBER_SALT, sellToken, buyToken, amount)); } else if (source === 'Uniswap') { sampleOutputs.push(getDeterministicUniswapSellQuote(sellToken, buyToken, amount)); } } quotes.push(sampleOutputs); } return quotes; } function getDeterministicBuyQuotes( sellToken: string, buyToken: string, sources: string[], sampleAmounts: BigNumber[], ): BigNumber[][] { const quotes: BigNumber[][] = []; for (const source of sources) { const sampleOutputs = []; for (const amount of sampleAmounts) { if (source === 'Kyber') { sampleOutputs.push(getDeterministicBuyQuote(KYBER_SALT, sellToken, buyToken, amount)); } else if (source === 'Uniswap') { sampleOutputs.push(getDeterministicUniswapBuyQuote(sellToken, buyToken, amount)); } } quotes.push(sampleOutputs); } return quotes; } function getDeterministicUniswapV2SellQuote(path: string[], sellAmount: BigNumber): BigNumber { let bought = sellAmount; for (let i = 0; i < path.length - 1; ++i) { bought = getDeterministicSellQuote(UNISWAP_V2_SALT, path[i], path[i + 1], bought); } return bought; } function getDeterministicUniswapV2BuyQuote(path: string[], buyAmount: BigNumber): BigNumber { let sold = buyAmount; for (let i = path.length - 1; i > 0; --i) { sold = getDeterministicBuyQuote(UNISWAP_V2_SALT, path[i - 1], path[i], sold); } return sold; } function getDeterministicFillableTakerAssetAmount(order: SignedNativeOrder): BigNumber { const hash = getPackedHash(hexUtils.leftPad(order.order.salt)); return new BigNumber(hash).mod(order.order.takerAmount); } function getDeterministicFillableMakerAssetAmount(order: SignedNativeOrder): BigNumber { const takerAmount = getDeterministicFillableTakerAssetAmount(order); return order.order.makerAmount .times(takerAmount) .div(order.order.takerAmount) .integerValue(BigNumber.ROUND_UP); } function getSampleAmounts(tokenAddress: string, count?: number): BigNumber[] { const tokenDecimals = getDeterministicTokenDecimals(tokenAddress); const _upperLimit = getRandomPortion(getRandomInteger(1000, 50000).times(10 ** tokenDecimals)); const _count = count || _.random(1, 16); const d = _upperLimit.div(_count); return _.times(_count, i => d.times((i + 1) / _count).integerValue()); } function createOrder(makerToken: string, takerToken: string): SignedNativeOrder { return { order: { chainId: 1337, verifyingContract: randomAddress(), maker: randomAddress(), taker: randomAddress(), pool: NULL_BYTES, sender: NULL_ADDRESS, feeRecipient: randomAddress(), makerAmount: getRandomInteger(1, 1e18), takerAmount: getRandomInteger(1, 1e18), takerTokenFeeAmount: getRandomInteger(1, 1e18), makerToken, takerToken, salt: new BigNumber(hexUtils.random()), expiry: getRandomInteger(0, 2 ** 32), }, signature: { v: 1, r: NULL_BYTES, s: NULL_BYTES, signatureType: SignatureType.EthSign }, type: FillQuoteTransformerOrderType.Limit, }; } function createOrders(makerToken: string, takerToken: string, count?: number): SignedNativeOrder[] { return _.times(count || _.random(1, 16), () => createOrder(makerToken, takerToken)); } async function enableFailTriggerAsync(): Promise<void> { await testContract.enableFailTrigger().awaitTransactionSuccessAsync({ value: 1 }); } function expectQuotesWithinRange( quotes: BigNumber[], expectedQuotes: BigNumber[], maxSlippage: BigNumber | number, ): void { quotes.forEach((_q, i) => { // If we're within 1 base unit of a low decimal token // then that's as good as we're going to get (and slippage is "high") if ( expectedQuotes[i].isZero() || BigNumber.max(expectedQuotes[i], quotes[i]) .minus(BigNumber.min(expectedQuotes[i], quotes[i])) .eq(1) ) { return; } const slippage = quotes[i] .dividedBy(expectedQuotes[i]) .minus(1) .decimalPlaces(4); expect(slippage, `quote[${i}]: ${slippage} ${quotes[i]} ${expectedQuotes[i]}`).to.be.bignumber.gte(0); expect(slippage, `quote[${i}] ${slippage} ${quotes[i]} ${expectedQuotes[i]}`).to.be.bignumber.lte( new BigNumber(maxSlippage), ); }); } describe('getOrderFillableTakerAssetAmounts()', () => { it('returns the expected amount for each order', async () => { const orders = createOrders(MAKER_TOKEN, TAKER_TOKEN); const expected = orders.map(getDeterministicFillableTakerAssetAmount); const actual = await testContract .getLimitOrderFillableTakerAssetAmounts( // tslint:disable-next-line:no-unnecessary-type-assertion orders.map(o => o.order as LimitOrderFields), orders.map(o => o.signature), NULL_ADDRESS, ) .callAsync(); expect(actual).to.deep.eq(expected); }); it('returns empty for no orders', async () => { const actual = await testContract.getLimitOrderFillableTakerAssetAmounts([], [], NULL_ADDRESS).callAsync(); expect(actual).to.deep.eq([]); }); }); describe('getOrderFillableMakerAssetAmounts()', () => { it('returns the expected amount for each order', async () => { const orders = createOrders(MAKER_TOKEN, TAKER_TOKEN); const expected = orders.map(getDeterministicFillableMakerAssetAmount); const actual = await testContract .getLimitOrderFillableMakerAssetAmounts( // tslint:disable-next-line:no-unnecessary-type-assertion orders.map(o => o.order as LimitOrderFields), orders.map(o => o.signature), NULL_ADDRESS, ) .callAsync(); expect(actual).to.deep.eq(expected); }); it('returns empty for no orders', async () => { const actual = await testContract.getLimitOrderFillableMakerAssetAmounts([], [], NULL_ADDRESS).callAsync(); expect(actual).to.deep.eq([]); }); }); blockchainTests.resets('sampleSellsFromKyberNetwork()', () => { let kyberOpts = { hintHandler: NULL_ADDRESS, networkProxy: NULL_ADDRESS, weth: WETH_ADDRESS, reserveOffset: KYBER_RESERVE_OFFSET, hint: NULL_BYTES, }; before(async () => { await testContract.createTokenExchanges([MAKER_TOKEN, TAKER_TOKEN]).awaitTransactionSuccessAsync(); kyberOpts = { ...kyberOpts, hintHandler: KYBER_ADDRESS, networkProxy: KYBER_ADDRESS, }; }); it('throws if tokens are the same', async () => { const tx = testContract.sampleSellsFromKyberNetwork(kyberOpts, MAKER_TOKEN, MAKER_TOKEN, []).callAsync(); return expect(tx).to.revertWith(INVALID_TOKEN_PAIR_ERROR); }); it('can return no quotes', async () => { const [, , quotes] = await testContract .sampleSellsFromKyberNetwork(kyberOpts, TAKER_TOKEN, MAKER_TOKEN, []) .callAsync(); expect(quotes).to.deep.eq([]); }); it('returns zero if token -> token fails', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const [, , quotes] = await testContract .sampleSellsFromKyberNetwork(kyberOpts, TAKER_TOKEN, MAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote token -> ETH', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const [expectedQuotes] = getDeterministicSellQuotes(TAKER_TOKEN, WETH_ADDRESS, ['Kyber'], sampleAmounts); const [, , quotes] = await testContract .sampleSellsFromKyberNetwork(kyberOpts, TAKER_TOKEN, WETH_ADDRESS, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote token -> token', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const [expectedQuotes] = getDeterministicSellQuotes(TAKER_TOKEN, MAKER_TOKEN, ['Kyber'], sampleAmounts); const [, , quotes] = await testContract .sampleSellsFromKyberNetwork(kyberOpts, TAKER_TOKEN, MAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if token -> ETH fails', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const [, , quotes] = await testContract .sampleSellsFromKyberNetwork(kyberOpts, TAKER_TOKEN, WETH_ADDRESS, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote ETH -> token', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const [expectedQuotes] = getDeterministicSellQuotes(WETH_ADDRESS, TAKER_TOKEN, ['Kyber'], sampleAmounts); const [, , quotes] = await testContract .sampleSellsFromKyberNetwork(kyberOpts, WETH_ADDRESS, TAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if ETH -> token fails', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const [, , quotes] = await testContract .sampleSellsFromKyberNetwork(kyberOpts, WETH_ADDRESS, TAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); }); blockchainTests.resets('sampleBuysFromKyberNetwork()', () => { let kyberOpts = { hintHandler: NULL_ADDRESS, networkProxy: NULL_ADDRESS, weth: WETH_ADDRESS, reserveOffset: KYBER_RESERVE_OFFSET, hint: NULL_BYTES, }; const ACCEPTABLE_SLIPPAGE = 0.0005; before(async () => { await testContract.createTokenExchanges([MAKER_TOKEN, TAKER_TOKEN]).awaitTransactionSuccessAsync(); kyberOpts = { ...kyberOpts, hintHandler: KYBER_ADDRESS, networkProxy: KYBER_ADDRESS, }; }); it('throws if tokens are the same', async () => { const tx = testContract.sampleBuysFromKyberNetwork(kyberOpts, MAKER_TOKEN, MAKER_TOKEN, []).callAsync(); return expect(tx).to.revertWith(INVALID_TOKEN_PAIR_ERROR); }); it('can return no quotes', async () => { const [, , quotes] = await testContract .sampleBuysFromKyberNetwork(kyberOpts, TAKER_TOKEN, MAKER_TOKEN, []) .callAsync(); expect(quotes).to.deep.eq([]); }); it('can quote token -> token', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const [expectedQuotes] = getDeterministicBuyQuotes(TAKER_TOKEN, MAKER_TOKEN, ['Kyber'], sampleAmounts); const [, , quotes] = await testContract .sampleBuysFromKyberNetwork(kyberOpts, TAKER_TOKEN, MAKER_TOKEN, sampleAmounts) .callAsync(); expectQuotesWithinRange(quotes, expectedQuotes, ACCEPTABLE_SLIPPAGE); }); it('returns zero if token -> token fails', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const [, , quotes] = await testContract .sampleBuysFromKyberNetwork(kyberOpts, TAKER_TOKEN, MAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote token -> ETH', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const [expectedQuotes] = getDeterministicBuyQuotes(TAKER_TOKEN, WETH_ADDRESS, ['Kyber'], sampleAmounts); const [, , quotes] = await testContract .sampleBuysFromKyberNetwork(kyberOpts, TAKER_TOKEN, WETH_ADDRESS, sampleAmounts) .callAsync(); expectQuotesWithinRange(quotes, expectedQuotes, ACCEPTABLE_SLIPPAGE); }); it('returns zero if token -> ETH fails', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const [, , quotes] = await testContract .sampleBuysFromKyberNetwork(kyberOpts, TAKER_TOKEN, WETH_ADDRESS, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote ETH -> token', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const [expectedQuotes] = getDeterministicBuyQuotes(WETH_ADDRESS, TAKER_TOKEN, ['Kyber'], sampleAmounts); const [, , quotes] = await testContract .sampleBuysFromKyberNetwork(kyberOpts, WETH_ADDRESS, TAKER_TOKEN, sampleAmounts) .callAsync(); expectQuotesWithinRange(quotes, expectedQuotes, ACCEPTABLE_SLIPPAGE); }); it('returns zero if ETH -> token fails', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const [, , quotes] = await testContract .sampleBuysFromKyberNetwork(kyberOpts, WETH_ADDRESS, TAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); }); blockchainTests.resets('sampleSellsFromUniswap()', () => { const UNISWAP_ETH_ADDRESS = NULL_ADDRESS; before(async () => { await testContract.createTokenExchanges([MAKER_TOKEN, TAKER_TOKEN]).awaitTransactionSuccessAsync(); }); it('throws if tokens are the same', async () => { const tx = testContract.sampleSellsFromUniswap(UNISWAP_ADDRESS, MAKER_TOKEN, MAKER_TOKEN, []).callAsync(); return expect(tx).to.revertWith(INVALID_TOKEN_PAIR_ERROR); }); it('can return no quotes', async () => { const quotes = await testContract .sampleSellsFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, MAKER_TOKEN, []) .callAsync(); expect(quotes).to.deep.eq([]); }); it('can quote token -> token', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const [expectedQuotes] = getDeterministicSellQuotes(TAKER_TOKEN, MAKER_TOKEN, ['Uniswap'], sampleAmounts); const quotes = await testContract .sampleSellsFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, MAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if token -> token fails', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const quotes = await testContract .sampleSellsFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, MAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote token -> ETH', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const [expectedQuotes] = getDeterministicSellQuotes(TAKER_TOKEN, WETH_ADDRESS, ['Uniswap'], sampleAmounts); const quotes = await testContract .sampleSellsFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, UNISWAP_ETH_ADDRESS, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if token -> ETH fails', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const quotes = await testContract .sampleSellsFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, UNISWAP_ETH_ADDRESS, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote ETH -> token', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const [expectedQuotes] = getDeterministicSellQuotes(WETH_ADDRESS, TAKER_TOKEN, ['Uniswap'], sampleAmounts); const quotes = await testContract .sampleSellsFromUniswap(UNISWAP_ADDRESS, UNISWAP_ETH_ADDRESS, TAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if ETH -> token fails', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const quotes = await testContract .sampleSellsFromUniswap(UNISWAP_ADDRESS, UNISWAP_ETH_ADDRESS, TAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if no exchange exists for the maker token', async () => { const nonExistantToken = randomAddress(); const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); const quotes = await testContract .sampleSellsFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, nonExistantToken, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if no exchange exists for the taker token', async () => { const nonExistantToken = randomAddress(); const sampleAmounts = getSampleAmounts(nonExistantToken); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); const quotes = await testContract .sampleSellsFromUniswap(UNISWAP_ADDRESS, nonExistantToken, MAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); }); blockchainTests.resets('sampleBuysFromUniswap()', () => { const UNISWAP_ETH_ADDRESS = NULL_ADDRESS; before(async () => { await testContract.createTokenExchanges([MAKER_TOKEN, TAKER_TOKEN]).awaitTransactionSuccessAsync(); }); it('throws if tokens are the same', async () => { const tx = testContract.sampleBuysFromUniswap(UNISWAP_ADDRESS, MAKER_TOKEN, MAKER_TOKEN, []).callAsync(); return expect(tx).to.revertWith(INVALID_TOKEN_PAIR_ERROR); }); it('can return no quotes', async () => { const quotes = await testContract .sampleBuysFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, MAKER_TOKEN, []) .callAsync(); expect(quotes).to.deep.eq([]); }); it('can quote token -> token', async () => { const sampleAmounts = getSampleAmounts(MAKER_TOKEN); const [expectedQuotes] = getDeterministicBuyQuotes(TAKER_TOKEN, MAKER_TOKEN, ['Uniswap'], sampleAmounts); const quotes = await testContract .sampleBuysFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, MAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if token -> token fails', async () => { const sampleAmounts = getSampleAmounts(MAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const quotes = await testContract .sampleBuysFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, MAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote token -> ETH', async () => { const sampleAmounts = getSampleAmounts(MAKER_TOKEN); const [expectedQuotes] = getDeterministicBuyQuotes(TAKER_TOKEN, WETH_ADDRESS, ['Uniswap'], sampleAmounts); const quotes = await testContract .sampleBuysFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, UNISWAP_ETH_ADDRESS, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if token -> ETH fails', async () => { const sampleAmounts = getSampleAmounts(MAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const quotes = await testContract .sampleBuysFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, UNISWAP_ETH_ADDRESS, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote ETH -> token', async () => { const sampleAmounts = getSampleAmounts(MAKER_TOKEN); const [expectedQuotes] = getDeterministicBuyQuotes(WETH_ADDRESS, TAKER_TOKEN, ['Uniswap'], sampleAmounts); const quotes = await testContract .sampleBuysFromUniswap(UNISWAP_ADDRESS, UNISWAP_ETH_ADDRESS, TAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if ETH -> token fails', async () => { const sampleAmounts = getSampleAmounts(MAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const quotes = await testContract .sampleBuysFromUniswap(UNISWAP_ADDRESS, UNISWAP_ETH_ADDRESS, TAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if no exchange exists for the maker token', async () => { const nonExistantToken = randomAddress(); const sampleAmounts = getSampleAmounts(nonExistantToken); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); const quotes = await testContract .sampleBuysFromUniswap(UNISWAP_ADDRESS, TAKER_TOKEN, nonExistantToken, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if no exchange exists for the taker token', async () => { const nonExistantToken = randomAddress(); const sampleAmounts = getSampleAmounts(MAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); const quotes = await testContract .sampleBuysFromUniswap(UNISWAP_ADDRESS, nonExistantToken, MAKER_TOKEN, sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); }); describe('liquidity provider', () => { const xAsset = randomAddress(); const yAsset = randomAddress(); const sampleAmounts = getSampleAmounts(yAsset); let liquidityProvider: DummyLiquidityProviderContract; before(async () => { liquidityProvider = await DummyLiquidityProviderContract.deployFrom0xArtifactAsync( artifacts.DummyLiquidityProvider, env.provider, env.txDefaults, {}, ); }); it('should be able to query sells from the liquidity provider', async () => { const quotes = await testContract .sampleSellsFromLiquidityProvider(liquidityProvider.address, yAsset, xAsset, sampleAmounts) .callAsync(); quotes.forEach((value, idx) => { expect(value).is.bignumber.eql(sampleAmounts[idx].minus(1)); }); }); it('should be able to query buys from the liquidity provider', async () => { const quotes = await testContract .sampleBuysFromLiquidityProvider(liquidityProvider.address, yAsset, xAsset, sampleAmounts) .callAsync(); quotes.forEach((value, idx) => { expect(value).is.bignumber.eql(sampleAmounts[idx].plus(1)); }); }); it('should just return zeros if the liquidity provider does not exist', async () => { const quotes = await testContract .sampleBuysFromLiquidityProvider(randomAddress(), yAsset, xAsset, sampleAmounts) .callAsync(); quotes.forEach(value => { expect(value).is.bignumber.eql(constants.ZERO_AMOUNT); }); }); }); blockchainTests.resets('sampleSellsFromUniswapV2()', () => { function predictSellQuotes(path: string[], sellAmounts: BigNumber[]): BigNumber[] { return sellAmounts.map(a => getDeterministicUniswapV2SellQuote(path, a)); } it('can return no quotes', async () => { const quotes = await testContract .sampleSellsFromUniswapV2(UNISWAP_V2_ROUTER, [TAKER_TOKEN, MAKER_TOKEN], []) .callAsync(); expect(quotes).to.deep.eq([]); }); it('can quote token -> token', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = predictSellQuotes([TAKER_TOKEN, MAKER_TOKEN], sampleAmounts); const quotes = await testContract .sampleSellsFromUniswapV2(UNISWAP_V2_ROUTER, [TAKER_TOKEN, MAKER_TOKEN], sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if token -> token fails', async () => { const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const quotes = await testContract .sampleSellsFromUniswapV2(UNISWAP_V2_ROUTER, [TAKER_TOKEN, MAKER_TOKEN], sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote token -> token -> token', async () => { const intermediateToken = randomAddress(); const sampleAmounts = getSampleAmounts(TAKER_TOKEN); const expectedQuotes = predictSellQuotes([TAKER_TOKEN, intermediateToken, MAKER_TOKEN], sampleAmounts); const quotes = await testContract .sampleSellsFromUniswapV2( UNISWAP_V2_ROUTER, [TAKER_TOKEN, intermediateToken, MAKER_TOKEN], sampleAmounts, ) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); }); blockchainTests.resets('sampleBuysFromUniswapV2()', () => { function predictBuyQuotes(path: string[], buyAmounts: BigNumber[]): BigNumber[] { return buyAmounts.map(a => getDeterministicUniswapV2BuyQuote(path, a)); } it('can return no quotes', async () => { const quotes = await testContract .sampleBuysFromUniswapV2(UNISWAP_V2_ROUTER, [TAKER_TOKEN, MAKER_TOKEN], []) .callAsync(); expect(quotes).to.deep.eq([]); }); it('can quote token -> token', async () => { const sampleAmounts = getSampleAmounts(MAKER_TOKEN); const expectedQuotes = predictBuyQuotes([TAKER_TOKEN, MAKER_TOKEN], sampleAmounts); const quotes = await testContract .sampleBuysFromUniswapV2(UNISWAP_V2_ROUTER, [TAKER_TOKEN, MAKER_TOKEN], sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('returns zero if token -> token fails', async () => { const sampleAmounts = getSampleAmounts(MAKER_TOKEN); const expectedQuotes = _.times(sampleAmounts.length, () => constants.ZERO_AMOUNT); await enableFailTriggerAsync(); const quotes = await testContract .sampleBuysFromUniswapV2(UNISWAP_V2_ROUTER, [TAKER_TOKEN, MAKER_TOKEN], sampleAmounts) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); it('can quote token -> token -> token', async () => { const intermediateToken = randomAddress(); const sampleAmounts = getSampleAmounts(MAKER_TOKEN); const expectedQuotes = predictBuyQuotes([TAKER_TOKEN, intermediateToken, MAKER_TOKEN], sampleAmounts); const quotes = await testContract .sampleBuysFromUniswapV2( UNISWAP_V2_ROUTER, [TAKER_TOKEN, intermediateToken, MAKER_TOKEN], sampleAmounts, ) .callAsync(); expect(quotes).to.deep.eq(expectedQuotes); }); }); describe('batchCall()', () => { it('can call one function', async () => { const orders = createOrders(MAKER_TOKEN, TAKER_TOKEN); const expected = orders.map(getDeterministicFillableTakerAssetAmount); const calls = [ testContract .getLimitOrderFillableTakerAssetAmounts( // tslint:disable-next-line:no-unnecessary-type-assertion orders.map(o => o.order as LimitOrderFields), orders.map(o => o.signature), NULL_ADDRESS, ) .getABIEncodedTransactionData(), ]; const r = await testContract.batchCall(calls).callAsync(); expect(r).to.be.length(1); const actual = testContract.getABIDecodedReturnData<BigNumber[]>( 'getLimitOrderFillableTakerAssetAmounts', r[0].data, ); expect(actual).to.deep.eq(expected); }); it('can call two functions', async () => { const numOrders = _.random(1, 10); const orders = _.times(2, () => createOrders(MAKER_TOKEN, TAKER_TOKEN, numOrders)); const expecteds = [ orders[0].map(getDeterministicFillableTakerAssetAmount), orders[1].map(getDeterministicFillableMakerAssetAmount), ]; const calls = [ testContract .getLimitOrderFillableTakerAssetAmounts( // tslint:disable-next-line:no-unnecessary-type-assertion orders[0].map(o => o.order as LimitOrderFields), orders[0].map(o => o.signature), NULL_ADDRESS, ) .getABIEncodedTransactionData(), testContract .getLimitOrderFillableMakerAssetAmounts( // tslint:disable-next-line:no-unnecessary-type-assertion orders[1].map(o => o.order as LimitOrderFields), orders[1].map(o => o.signature), NULL_ADDRESS, ) .getABIEncodedTransactionData(), ]; const r = await testContract.batchCall(calls).callAsync(); expect(r).to.be.length(2); expect( testContract.getABIDecodedReturnData('getLimitOrderFillableTakerAssetAmounts', r[0].data), ).to.deep.eq(expecteds[0]); expect( testContract.getABIDecodedReturnData('getLimitOrderFillableMakerAssetAmounts', r[1].data), ).to.deep.eq(expecteds[1]); }); it('can make recursive calls', async () => { const numOrders = _.random(1, 10); const orders = createOrders(MAKER_TOKEN, TAKER_TOKEN, numOrders); const expected = orders.map(getDeterministicFillableTakerAssetAmount); let r = await testContract .batchCall([ testContract .batchCall([ testContract .getLimitOrderFillableTakerAssetAmounts( // tslint:disable-next-line:no-unnecessary-type-assertion orders.map(o => o.order as LimitOrderFields), orders.map(o => o.signature), NULL_ADDRESS, ) .getABIEncodedTransactionData(), ]) .getABIEncodedTransactionData(), ]) .callAsync(); expect(r).to.be.length(1); r = testContract.getABIDecodedReturnData<SamplerCallResult[]>('batchCall', r[0].data); expect(r).to.be.length(1); expect( testContract.getABIDecodedReturnData('getLimitOrderFillableTakerAssetAmounts', r[0].data), ).to.deep.eq(expected); }); }); blockchainTests.resets('TwoHopSampler', () => { before(async () => { await testContract .createTokenExchanges([MAKER_TOKEN, TAKER_TOKEN, INTERMEDIATE_TOKEN]) .awaitTransactionSuccessAsync(); }); it('sampleTwoHopSell', async () => { // tslint:disable-next-line no-unnecessary-type-assertion const sellAmount = _.last(getSampleAmounts(TAKER_TOKEN))!; const uniswapV2FirstHopPath = [TAKER_TOKEN, INTERMEDIATE_TOKEN]; const uniswapV2FirstHop = testContract .sampleSellsFromUniswapV2(UNISWAP_V2_ROUTER, uniswapV2FirstHopPath, [constants.ZERO_AMOUNT]) .getABIEncodedTransactionData(); const uniswapV2SecondHopPath = [INTERMEDIATE_TOKEN, randomAddress(), MAKER_TOKEN]; const uniswapV2SecondHop = testContract .sampleSellsFromUniswapV2(UNISWAP_V2_ROUTER, uniswapV2SecondHopPath, [constants.ZERO_AMOUNT]) .getABIEncodedTransactionData(); const firstHopQuotes = [getDeterministicUniswapV2SellQuote(uniswapV2FirstHopPath, sellAmount)]; const expectedIntermediateAssetAmount = BigNumber.max(...firstHopQuotes); const secondHopQuotes = [ getDeterministicUniswapV2SellQuote(uniswapV2SecondHopPath, expectedIntermediateAssetAmount), ]; const expectedBuyAmount = BigNumber.max(...secondHopQuotes); const [firstHop, secondHop, buyAmount] = await testContract .sampleTwoHopSell([uniswapV2FirstHop], [uniswapV2SecondHop], sellAmount) .callAsync(); expect(firstHop.sourceIndex, 'First hop source index').to.bignumber.equal( firstHopQuotes.findIndex(quote => quote.isEqualTo(expectedIntermediateAssetAmount)), ); expect(secondHop.sourceIndex, 'Second hop source index').to.bignumber.equal( secondHopQuotes.findIndex(quote => quote.isEqualTo(expectedBuyAmount)), ); expect(buyAmount, 'Two hop buy amount').to.bignumber.equal(expectedBuyAmount); }); it('sampleTwoHopBuy', async () => { // tslint:disable-next-line no-unnecessary-type-assertion const buyAmount = _.last(getSampleAmounts(MAKER_TOKEN))!; const uniswapV2FirstHopPath = [TAKER_TOKEN, INTERMEDIATE_TOKEN]; const uniswapV2FirstHop = testContract .sampleBuysFromUniswapV2(UNISWAP_V2_ROUTER, uniswapV2FirstHopPath, [constants.ZERO_AMOUNT]) .getABIEncodedTransactionData(); const uniswapV2SecondHopPath = [INTERMEDIATE_TOKEN, randomAddress(), MAKER_TOKEN]; const uniswapV2SecondHop = testContract .sampleBuysFromUniswapV2(UNISWAP_V2_ROUTER, uniswapV2SecondHopPath, [constants.ZERO_AMOUNT]) .getABIEncodedTransactionData(); const secondHopQuotes = [getDeterministicUniswapV2BuyQuote(uniswapV2SecondHopPath, buyAmount)]; const expectedIntermediateAssetAmount = BigNumber.min(...secondHopQuotes); const firstHopQuotes = [ getDeterministicUniswapV2BuyQuote(uniswapV2FirstHopPath, expectedIntermediateAssetAmount), ]; const expectedSellAmount = BigNumber.min(...firstHopQuotes); const [firstHop, secondHop, sellAmount] = await testContract .sampleTwoHopBuy([uniswapV2FirstHop], [uniswapV2SecondHop], buyAmount) .callAsync(); expect(firstHop.sourceIndex, 'First hop source index').to.bignumber.equal( firstHopQuotes.findIndex(quote => quote.isEqualTo(expectedSellAmount)), ); expect(secondHop.sourceIndex, 'Second hop source index').to.bignumber.equal( secondHopQuotes.findIndex(quote => quote.isEqualTo(expectedIntermediateAssetAmount)), ); expect(sellAmount, 'Two hop sell amount').to.bignumber.equal(expectedSellAmount); }); }); });
the_stack
import { expect } from "chai"; import { it } from "mocha"; import * as moq from "typemoq"; import { IModelConnection } from "@itwin/core-frontend"; import { ITwinLocalization } from "@itwin/core-i18n"; import { Node, RegisteredRuleset, RulesetVariable, StandardNodeTypes, VariableValueTypes } from "@itwin/presentation-common"; import { Presentation, PresentationManager, RulesetManager, RulesetVariablesManager } from "@itwin/presentation-frontend"; import { PrimitiveValue } from "@itwin/appui-abstract"; import { computeVisibleNodes, MutableTreeModel, TreeModel, TreeModelNode, TreeModelNodeEditingInfo, TreeModelNodeInput, UiComponents, } from "@itwin/components-react"; import { act, cleanup, renderHook } from "@testing-library/react-hooks"; import { IPresentationTreeDataProvider } from "../../../presentation-components"; import { applyHierarchyChanges, PresentationTreeNodeLoaderProps, reloadVisibleHierarchyParts, usePresentationTreeNodeLoader, } from "../../../presentation-components/tree/controlled/TreeHooks"; import { createTreeNodeItem } from "../../../presentation-components/tree/Utils"; import { mockPresentationManager } from "../../_helpers/UiComponents"; describe("usePresentationNodeLoader", () => { let onIModelHierarchyChanged: PresentationManager["onIModelHierarchyChanged"]; let onRulesetModified: RulesetManager["onRulesetModified"]; let onRulesetVariableChanged: RulesetVariablesManager["onVariableChanged"]; let presentationManagerMock: moq.IMock<PresentationManager>; let rulesetVariablesManagerMock: moq.IMock<RulesetVariablesManager>; const imodelMock = moq.Mock.ofType<IModelConnection>(); const rulesetId = "test-ruleset-id"; const imodelKey = "test-imodel-key"; const initialProps: PresentationTreeNodeLoaderProps = { imodel: imodelMock.object, ruleset: rulesetId, pagingSize: 5, }; before(async () => { await UiComponents.initialize(new ITwinLocalization()); }); after(() => { UiComponents.terminate(); }); beforeEach(() => { imodelMock.reset(); imodelMock.setup((x) => x.key).returns(() => imodelKey); const mocks = mockPresentationManager(); presentationManagerMock = mocks.presentationManager; rulesetVariablesManagerMock = mocks.rulesetVariablesManager; onIModelHierarchyChanged = mocks.presentationManager.object.onIModelHierarchyChanged; onRulesetModified = mocks.rulesetsManager.object.onRulesetModified; onRulesetVariableChanged = mocks.rulesetVariablesManager.object.onVariableChanged; mocks.presentationManager.setup((x) => x.stateTracker).returns(() => undefined); mocks.presentationManager .setup(async (x) => x.getNodesAndCount(moq.It.isAny())) .returns(async () => ({ count: 0, nodes: [] })); Presentation.setPresentationManager(mocks.presentationManager.object); }); afterEach(async () => { await cleanup(); Presentation.terminate(); }); it("creates node loader", () => { const { result } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); expect(result.current.nodeLoader).to.not.be.undefined; }); it("creates new nodeLoader when imodel changes", () => { const { result, rerender } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; const newImodelMock = moq.Mock.ofType<IModelConnection>(); rerender({ ...initialProps, imodel: newImodelMock.object }); expect(result.current.nodeLoader).to.not.eq(oldNodeLoader); }); it("creates new nodeLoader when ruleset changes", () => { const { result, rerender } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; rerender({ ...initialProps, ruleset: "changed" }); expect(result.current.nodeLoader).to.not.eq(oldNodeLoader); }); it("creates new nodeLoader when pagingSize changes", () => { const { result, rerender } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; rerender({ ...initialProps, pagingSize: 20 }); expect(result.current.nodeLoader).to.not.eq(oldNodeLoader); }); describe("auto-updating model source", () => { beforeEach(() => { initialProps.enableHierarchyAutoUpdate = true; }); it("doesn't create a new nodeLoader when `PresentationManager` raises `onIModelHierarchyChanged` event with unrelated ruleset", () => { const { result } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; onIModelHierarchyChanged.raiseEvent({ rulesetId: "unrelated", updateInfo: "FULL", imodelKey }); expect(result.current.nodeLoader).to.eq(oldNodeLoader); }); it("doesn't create a new nodeLoader when `PresentationManager` raises `onIModelHierarchyChanged` event with unrelated imodel", () => { const { result } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; onIModelHierarchyChanged.raiseEvent({ rulesetId, updateInfo: "FULL", imodelKey: "unrelated" }); expect(result.current.nodeLoader).to.eq(oldNodeLoader); }); it("creates a new nodeLoader when `PresentationManager` raises a related `onIModelHierarchyChanged` event with FULL hierarchy update", async () => { const { result, waitForNextUpdate } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps: { ...initialProps, ruleset: rulesetId } }, ); const oldNodeLoader = result.current.nodeLoader; onIModelHierarchyChanged.raiseEvent({ rulesetId, updateInfo: "FULL", imodelKey }); await waitForNextUpdate(); expect(result.current.nodeLoader).to.not.eq(oldNodeLoader); }); it("creates a new nodeLoader when `PresentationManager` raises a related `onIModelHierarchyChanged` event with partial hierarchy updates", async () => { const { result, waitForNextUpdate } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; onIModelHierarchyChanged.raiseEvent({ rulesetId, updateInfo: [{ parent: undefined, nodesCount: 2 }], imodelKey }); await waitForNextUpdate(); expect(result.current.nodeLoader).to.not.eq(oldNodeLoader); }); it("doesn't create a new nodeLoader when `RulesetsManager` raises an unrelated `onRulesetModified` event", () => { const { result } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; const currRuleset = new RegisteredRuleset({ id: "unrelated", rules: [] }, "", () => { }); onRulesetModified.raiseEvent(currRuleset, { ...currRuleset.toJSON() }); expect(result.current.nodeLoader).to.eq(oldNodeLoader); }); it("creates a new nodeLoader when `RulesetsManager` raises a related `onRulesetModified` event", async () => { const { result, waitForNextUpdate } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; const currRuleset = new RegisteredRuleset({ id: rulesetId, rules: [] }, "", () => { }); // eslint-disable-next-line @typescript-eslint/no-floating-promises act(() => { onRulesetModified.raiseEvent(currRuleset, currRuleset.toJSON()); }); await waitForNextUpdate(); expect(result.current.nodeLoader).to.not.eq(oldNodeLoader); }); it("creates a new nodeLoader when `RulesetVariablesManager` raises an `onRulesetVariableChanged` event with a new value", async () => { const { result, waitForNextUpdate } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; const variables: RulesetVariable[] = [{ id: "var-id", type: VariableValueTypes.String, value: "curr", }, { id: "other-var", type: VariableValueTypes.Int, value: 123, }]; rulesetVariablesManagerMock.setup((x) => x.getAllVariables()).returns(() => variables); // eslint-disable-next-line @typescript-eslint/no-floating-promises act(() => { onRulesetVariableChanged.raiseEvent("var-id", undefined, "curr"); }); await waitForNextUpdate(); expect(result.current.nodeLoader).to.not.eq(oldNodeLoader); }); it("creates a new nodeLoader when `RulesetVariablesManager` raises an `onRulesetVariableChanged` event with a changed value", async () => { const { result, waitForNextUpdate } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; const variables: RulesetVariable[] = [{ id: "var-id", type: VariableValueTypes.String, value: "curr", }, { id: "other-var", type: VariableValueTypes.Int, value: 123, }]; rulesetVariablesManagerMock.setup((x) => x.getAllVariables()).returns(() => variables); // eslint-disable-next-line @typescript-eslint/no-floating-promises act(() => { onRulesetVariableChanged.raiseEvent("var-id", "prev", "curr"); }); await waitForNextUpdate(); expect(result.current.nodeLoader).to.not.eq(oldNodeLoader); }); it("creates a new nodeLoader when `RulesetVariablesManager` raises an `onRulesetVariableChanged` event with a removed value", async () => { const { result, waitForNextUpdate } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; const variables: RulesetVariable[] = [{ id: "other-var", type: VariableValueTypes.Int, value: 123, }]; rulesetVariablesManagerMock.setup((x) => x.getAllVariables()).returns(() => variables); // eslint-disable-next-line @typescript-eslint/no-floating-promises act(() => { onRulesetVariableChanged.raiseEvent("var-id", "prev", undefined); }); await waitForNextUpdate(); expect(result.current.nodeLoader).to.not.eq(oldNodeLoader); }); it("does not create a new nodeLoader when `onRulesetModified` event is raised but there are no changes", () => { const { result } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; const currRuleset = new RegisteredRuleset({ id: rulesetId, rules: [] }, "", () => { }); onRulesetModified.raiseEvent(currRuleset, currRuleset.toJSON()); expect(result.current.nodeLoader).to.eq(oldNodeLoader); }); it("does not create a new nodeLoader when 'onIModelHierarchyChanged' event is raised but there are no changes", () => { const { result } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; onIModelHierarchyChanged.raiseEvent({ rulesetId, updateInfo: [], imodelKey }); expect(result.current.nodeLoader).to.eq(oldNodeLoader); }); it("creates a fresh `TreeModelSource` when nodeLoader changes", async () => { const { result, rerender, waitForNextUpdate } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps: { ...initialProps, ruleset: "initial" } }, ); const initialModelSource = result.current.nodeLoader.modelSource; expectTree(initialModelSource.getModel(), []); initialModelSource.modifyModel((treeModel) => treeModel.insertChild(undefined, createNodeInput("test"), 0)); // Update tree so that `info.treeModel` is not undefined onRulesetModified.raiseEvent( new RegisteredRuleset({ id: "initial", rules: [] }, "", () => { }), { id: "initial", rules: [] }, ); await waitForNextUpdate(); rerender({ ...initialProps, ruleset: "updated" }); const newModelSource = result.current.nodeLoader.modelSource; expectTree(newModelSource.getModel(), []); }); it("reloads nodes and creates a new nodeLoader when 'onIModelHierarchyChanged' event is raised", async () => { const { result, waitForNextUpdate } = renderHook( (props: PresentationTreeNodeLoaderProps) => usePresentationTreeNodeLoader(props), { initialProps }, ); const oldNodeLoader = result.current.nodeLoader; result.current.onItemsRendered({ overscanStartIndex: 0, overscanStopIndex: 1, visibleStartIndex: 0, visibleStopIndex: 1 }); presentationManagerMock.setup(async (x) => x.getNodesAndCount( moq.It.is(({ paging, parentKey }) => paging?.start === 0 && paging.size === 1 && !parentKey)) ) .returns(async () => ({ count: 1, nodes: [createNode("root1")] })) .verifiable(moq.Times.once()); void act(() => { onIModelHierarchyChanged.raiseEvent({ rulesetId, updateInfo: [{ parent: undefined, nodesCount: 1 }], imodelKey }); }); await waitForNextUpdate(); expect(result.current.nodeLoader).to.not.eq(oldNodeLoader); presentationManagerMock.verifyAll(); }); }); }); function createNode(label: string): Node { return Node.fromJSON({ key: { type: StandardNodeTypes.ECInstancesNode, instanceKeys: [], pathFromRoot: [label] }, labelDefinition: { displayValue: label, rawValue: label, typeName: "string" }, }); } function createNodeInput(label: string): TreeModelNodeInput { const node = createNode(label); const item = createTreeNodeItem(node, undefined); return { id: label, item, label: item.label, isExpanded: false, isLoading: false, isSelected: false, }; } type TreeHierarchy = string | { [label: string]: TreeHierarchy[]; } | { label: string; selected?: true; expanded?: true; loading?: true; editingInfo?: TreeModelNodeEditingInfo; children?: TreeHierarchy[]; }; function expectTree(model: TreeModel, expectedHierarchy: TreeHierarchy[]): void { const actualHierarchy = buildActualHierarchy(undefined); expect(actualHierarchy).to.deep.equal(expectedHierarchy); function buildActualHierarchy(parentId: string | undefined): TreeHierarchy[] { const result: TreeHierarchy[] = []; for (const childId of model.getChildren(parentId) ?? []) { const node = model.getNode(childId) as TreeModelNode; if (!node) continue; const label = (node.label.value as PrimitiveValue).displayValue!; const children = buildActualHierarchy(childId); const additionalProperties: Partial<TreeHierarchy> = {}; if (node.isSelected) { additionalProperties.selected = true; } if (node.isExpanded) { additionalProperties.expanded = true; } if (node.isLoading) { additionalProperties.loading = true; } if (node.editingInfo) { additionalProperties.editingInfo = node.editingInfo; } if (Object.keys(additionalProperties).length > 0) { result.push({ label, ...additionalProperties, ...(children.length > 0 && { children }) }); } else if (children.length > 0) { result.push({ [label]: children }); } else { result.push(label); } } return result; } } function createTreeModel(hierarchy: TreeHierarchy[]): MutableTreeModel { const treeModel = new MutableTreeModel(); insertNodes(undefined, hierarchy); expectTree(treeModel, hierarchy); return treeModel; function insertNodes(parentId: string | undefined, childNodes: TreeHierarchy[]): void { for (let i = 0; i < childNodes.length; ++i) { const node = childNodes[i]; if (typeof node === "string") { treeModel.insertChild(parentId, createNodeInput(node), i); } else if (typeof node.label === "string") { treeModel.insertChild(parentId, createNodeInput(node.label), i); const insertedNode = treeModel.getNode(node.label)!; if (node.selected) { insertedNode.isSelected = true; } if (node.expanded) { insertedNode.isExpanded = true; } if (node.loading) { insertedNode.isLoading = true; } if (node.editingInfo) { insertedNode.editingInfo = node.editingInfo as TreeModelNodeEditingInfo; } insertNodes(node.label, node.children ?? []); } else { const nodeLabel = Object.keys(node)[0]; treeModel.insertChild(parentId, createNodeInput(nodeLabel), i); insertNodes(nodeLabel, (node as any)[nodeLabel] as TreeHierarchy[]); } } } } describe("applyHierarchyUpdateRecords", () => { it("returns same model if node was not present in model", () => { const nonExistingNode = createNode("non-existing"); const initialTree = createTreeModel(["root1", "root2"]); const updatedTree = applyHierarchyChanges( initialTree, [{ parent: nonExistingNode.key, nodesCount: 2, }], [], {} ); expect(updatedTree).to.be.eq(initialTree); }); it("updates children count of updated root node", () => { const initialTree = createTreeModel(["root1", "root2"]); const updatedTree = applyHierarchyChanges( initialTree, [{ parent: createNode("root1").key, nodesCount: 2, }], [], {} ); expectTree(updatedTree, ["root1", "root2"]); expect(updatedTree.getNode("root1")?.numChildren).to.be.eq(2); }); it("updates expanded root node and removes siblings", () => { const initialTree = createTreeModel(["root1", "root2"]); const updatedTree = applyHierarchyChanges( initialTree, [{ parent: undefined, nodesCount: 1, expandedNodes: [{ node: { ...createNode("root1"), description: "updated-description" }, position: 0, }], }], [], {} ); expectTree(updatedTree, ["root1"]); expect(updatedTree.getNode("root1")?.description).to.be.eq("updated-description"); }); it("replaces root node with new", () => { const initialTree = createTreeModel(["root1", "root2"]); const updatedTree = applyHierarchyChanges( initialTree, [{ parent: undefined, nodesCount: 1, expandedNodes: [{ node: createNode("updated-root"), position: 0, }], }], [], {} ); expectTree(updatedTree, ["updated-root"]); }); it("replaces child nodes with new if parent is expanded", () => { const initialTree = createTreeModel([{ label: "root1", expanded: true, children: ["child1", "child2"] }, "root2"]); const updatedTree = applyHierarchyChanges( initialTree, [{ parent: createNode("root1").key, nodesCount: 1, expandedNodes: [{ node: createNode("updated-child"), position: 0, }], }], [], {} ); expectTree(updatedTree, [{ label: "root1", expanded: true, children: ["updated-child"] }, "root2"]); }); it("removes child nodes if parent is not expanded", () => { const initialTree = createTreeModel([{ ["root1"]: ["child1", "child2"] }, "root2"]); const updatedTree = applyHierarchyChanges( initialTree, [{ parent: createNode("root1").key, nodesCount: 1, expandedNodes: [{ node: createNode("updated-child"), position: 0, }], }], [], {} ); expectTree(updatedTree, ["root1", "root2"]); expect(updatedTree.getNode("root1")?.numChildren).to.be.eq(1); }); it("updates parent node and persists it's subtree", () => { const initialTree = createTreeModel([ { label: "root1", expanded: true, children: [ "child1", { label: "child2", expanded: true, children: ["grandChild1"], }, ], }, "root2", ]); const updatedTree = applyHierarchyChanges( initialTree, [{ parent: undefined, nodesCount: 2, expandedNodes: [{ node: { ...createNode("root1"), description: "updated-description" }, position: 0, }], }], [], {} ); expectTree(updatedTree, [ { label: "root1", expanded: true, children: [ "child1", { label: "child2", expanded: true, children: ["grandChild1"], }, ], }, ]); expect(updatedTree.getNode("root1")?.description).to.be.eq("updated-description"); }); it("updates root node and adds reloaded siblings", () => { const initialTree = createTreeModel(["root1", "root2", "root3"]); const updatedTree = applyHierarchyChanges( initialTree, [{ parent: undefined, nodesCount: 3, expandedNodes: [{ node: { ...createNode("root2"), description: "updated-description" }, position: 1, }], }], [{ parentId: undefined, nodeItems: [createTreeNodeItem(createNode("root1")), createTreeNodeItem(createNode("root2")), createTreeNodeItem(createNode("root3"))], offset: 0, }], {} ); expectTree(updatedTree, ["root1", "root2", "root3"]); expect(updatedTree.getNode("root2")?.description).to.be.eq("updated-description"); }); }); describe("reloadVisibleHierarchyParts", () => { interface HierarchyItem { label: string; position: number; children?: HierarchyItem[]; childCount?: number; } function addNodes(model: MutableTreeModel, parentId: string | undefined, items?: HierarchyItem[], itemsCount?: number) { model.setNumChildren(parentId, itemsCount ?? items?.length); for (const item of items ?? []) { model.setChildren(parentId, [{ ...createNodeInput(item.label), isExpanded: true }], item.position); addNodes(model, item.label, item.children, item.childCount); } } function createVisibleNodes(rootNodesCount: number, hierarchy: HierarchyItem[],) { const model = new MutableTreeModel(); addNodes(model, undefined, hierarchy, rootNodesCount); return computeVisibleNodes(model); } const dataProviderMock = moq.Mock.ofType<IPresentationTreeDataProvider>(); beforeEach(() => { dataProviderMock.reset(); }); it("does not load nodes if they are already loaded", async () => { const visibleNodes = createVisibleNodes(2, [{ label: "root1", position: 0 }, { label: "root2", position: 1 }]); await reloadVisibleHierarchyParts(visibleNodes, { overscanStartIndex: 0, overscanStopIndex: 1, visibleStartIndex: 0, visibleStopIndex: 1 }, dataProviderMock.object); dataProviderMock.verify(async (x) => x.getNodes(moq.It.isAny(), moq.It.isAny()), moq.Times.never()); }); it("does not load nodes if there are no visible nodes", async () => { const visibleNodes = createVisibleNodes(0, []); await reloadVisibleHierarchyParts(visibleNodes, { overscanStartIndex: 0, overscanStopIndex: 4, visibleStartIndex: 0, visibleStopIndex: 4 }, dataProviderMock.object); dataProviderMock.verify(async (x) => x.getNodes(moq.It.isAny(), moq.It.isAny()), moq.Times.never()); }); it("reloads visible root nodes", async () => { const visibleNodes = createVisibleNodes(4, [{ label: "root2", position: 1 }, { label: "root3", position: 2 }]); dataProviderMock.setup(async (x) => x.getNodes(undefined, moq.It.isObjectWith({ start: 0, size: 4 }))) .returns(async () => []) .verifiable(moq.Times.once()); await reloadVisibleHierarchyParts(visibleNodes, { overscanStartIndex: 0, overscanStopIndex: 3, visibleStartIndex: 0, visibleStopIndex: 3 }, dataProviderMock.object); dataProviderMock.verifyAll(); }); it("reloads visible child nodes", async () => { const visibleNodes = createVisibleNodes(2, [{ label: "root1", position: 0 }, { label: "root2", position: 1, childCount: 3 }]); dataProviderMock.setup(async (x) => x.getNodes(moq.It.is((item) => item !== undefined && item.id === "root2"), moq.It.isObjectWith({ start: 0, size: 3 }))) .returns(async () => []) .verifiable(moq.Times.once()); await reloadVisibleHierarchyParts(visibleNodes, { overscanStartIndex: 0, overscanStopIndex: 5, visibleStartIndex: 0, visibleStopIndex: 5 }, dataProviderMock.object); dataProviderMock.verifyAll(); }); it("reloads with correct page size if there are less visible nodes", async () => { const visibleNodes = createVisibleNodes(1, []); dataProviderMock.setup(async (x) => x.getNodes(undefined, moq.It.isObjectWith({ start: 0, size: 1 }))) .returns(async () => []) .verifiable(moq.Times.once()); await reloadVisibleHierarchyParts(visibleNodes, { overscanStartIndex: 0, overscanStopIndex: 4, visibleStartIndex: 0, visibleStopIndex: 4 }, dataProviderMock.object); dataProviderMock.verifyAll(); }); it("reloads with correct page start if there are less visible nodes", async () => { const visibleNodes = createVisibleNodes(3, []); dataProviderMock.setup(async (x) => x.getNodes(undefined, moq.It.isObjectWith({ start: 0, size: 3 }))) .returns(async () => []) .verifiable(moq.Times.once()); await reloadVisibleHierarchyParts(visibleNodes, { overscanStartIndex: 1, overscanStopIndex: 3, visibleStartIndex: 1, visibleStopIndex: 3 }, dataProviderMock.object); dataProviderMock.verifyAll(); }); });
the_stack
import { IEvent, IEventProvider, ITelemetryLogger, } from "@fluidframework/common-definitions"; import { IFluidRouter, IFluidLoadable, } from "@fluidframework/core-interfaces"; import { ContainerWarning, IDeltaManager } from "@fluidframework/container-definitions"; import { ISequencedDocumentMessage, ISummaryTree, IDocumentMessage, } from "@fluidframework/protocol-definitions"; import { ISummaryStats } from "@fluidframework/runtime-definitions"; import { ISummaryAckMessage, ISummaryNackMessage, ISummaryOpMessage } from "./summaryCollection"; declare module "@fluidframework/core-interfaces" { // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface IFluidObject extends Readonly<Partial<IProvideSummarizer>> { } } export const ISummarizer: keyof IProvideSummarizer = "ISummarizer"; export interface IProvideSummarizer { readonly ISummarizer: ISummarizer; } /** * Similar to AbortSignal, but using promise instead of events * @param T - cancellation reason type */ export interface ICancellationToken<T> { /** Tells if this cancellable token is cancelled */ readonly cancelled: boolean; /** * Promise that gets fulfilled when this cancellable token is cancelled * @returns reason of cancellation */ readonly waitCancelled: Promise<T>; } /* Similar to AbortSignal, but using promise instead of events */ export type ISummaryCancellationToken = ICancellationToken<SummarizerStopReason>; export interface ISummarizerInternalsProvider { /** Encapsulates the work to walk the internals of the running container to generate a summary */ submitSummary(options: ISubmitSummaryOptions): Promise<SubmitSummaryResult>; /** Callback whenever a new SummaryAck is received, to update internal tracking state */ refreshLatestSummaryAck( proposalHandle: string, ackHandle: string, summaryRefSeq: number, summaryLogger: ITelemetryLogger, ): Promise<void>; } /** Options that control the behavior of a running summarizer. */ export interface ISummarizerOptions { /** * Set to true to disable the default heuristics from running; false by default. * This affects only the heuristics around when a summarizer should * submit summaries. So when it is disabled, summarizer clients should * not be expected to summarize unless an on-demand summary is requested. */ disableHeuristics: boolean; } export interface ISummarizingWarning extends ContainerWarning { readonly errorType: "summarizingError"; readonly logged: boolean; } export interface IConnectableRuntime { readonly disposed: boolean; readonly connected: boolean; readonly clientId: string | undefined; readonly deltaManager: IDeltaManager<ISequencedDocumentMessage, IDocumentMessage>; once(event: "connected" | "disconnected" | "dispose", listener: () => void): this; } export interface ISummarizerRuntime extends IConnectableRuntime { readonly logger: ITelemetryLogger; /** clientId of parent (non-summarizing) container that owns summarizer container */ readonly summarizerClientId: string | undefined; closeFn(): void; on(event: "batchEnd", listener: (error: any, op: ISequencedDocumentMessage) => void): this; removeListener(event: "batchEnd", listener: (error: any, op: ISequencedDocumentMessage) => void): this; } /** Options affecting summarize behavior. */ export interface ISummarizeOptions { /** True to generate the full tree with no handle reuse optimizations; defaults to false */ readonly fullTree?: boolean, /** True to ask the server what the latest summary is first; defaults to false */ readonly refreshLatestAck?: boolean, } export interface ISubmitSummaryOptions extends ISummarizeOptions { /** Logger to use for correlated summary events */ readonly summaryLogger: ITelemetryLogger, /** Tells when summary process should be cancelled */ readonly cancellationToken: ISummaryCancellationToken, } export interface IOnDemandSummarizeOptions extends ISummarizeOptions { /** Reason for generating summary. */ readonly reason: string; } /** Options to use when enqueueing a summarize attempt. */ export interface IEnqueueSummarizeOptions extends IOnDemandSummarizeOptions { /** If specified, The summarize attempt will not occur until after this sequence number. */ readonly afterSequenceNumber?: number; /** * True to override the existing enqueued summarize attempt if there is one. * This will guarantee that this attempt gets enqueued. If override is false, * than an existing enqueued summarize attempt will block a new one from being * enqueued. There can only be one enqueued at a time. Defaults to false. */ readonly override?: boolean; } /** * In addition to the normal summary tree + stats, this contains additional stats * only relevant at the root of the tree. */ export interface IGeneratedSummaryStats extends ISummaryStats { readonly dataStoreCount: number; readonly summarizedDataStoreCount: number; } /** Base results for all submitSummary attempts. */ export interface IBaseSummarizeResult { readonly stage: "base"; /** Error object related to failed summarize attempt. */ readonly error: any; /** Reference sequence number as of the generate summary attempt. */ readonly referenceSequenceNumber: number; } /** Results of submitSummary after generating the summary tree. */ export interface IGenerateSummaryTreeResult extends Omit<IBaseSummarizeResult, "stage"> { readonly stage: "generate"; /** Generated summary tree. */ readonly summaryTree: ISummaryTree; /** Stats for generated summary tree. */ readonly summaryStats: IGeneratedSummaryStats; /** Time it took to generate the summary tree and stats. */ readonly generateDuration: number; } /** Results of submitSummary after uploading the tree to storage. */ export interface IUploadSummaryResult extends Omit<IGenerateSummaryTreeResult, "stage"> { readonly stage: "upload"; /** The handle returned by storage pointing to the uploaded summary tree. */ readonly handle: string; /** Time it took to upload the summary tree to storage. */ readonly uploadDuration: number; } /** Results of submitSummary after submitting the summarize op. */ export interface ISubmitSummaryOpResult extends Omit<IUploadSummaryResult, "stage" | "error"> { readonly stage: "submit"; /** The client sequence number of the summarize op submitted for the summary. */ readonly clientSequenceNumber: number; /** Time it took to submit the summarize op to the broadcasting service. */ readonly submitOpDuration: number; } /** * Strict type representing result of a submitSummary attempt. * The result consists of 4 possible stages, each with its own data. * The data is cumulative, so each stage will contain the data from the previous stages. * If the final "submitted" stage is not reached, the result may contain the error object. * Stages: * 1. "base" - stopped before the summary tree was even generated, and the result only contains the base data * 2. "generate" - the summary tree was generated, and the result will contain that tree + stats * 3. "upload" - the summary was uploaded to storage, and the result contains the server-provided handle * 4. "submit" - the summarize op was submitted, and the result contains the op client sequence number. */ export type SubmitSummaryResult = | IBaseSummarizeResult | IGenerateSummaryTreeResult | IUploadSummaryResult | ISubmitSummaryOpResult; export interface IBroadcastSummaryResult { readonly summarizeOp: ISummaryOpMessage; readonly broadcastDuration: number; } export interface IAckSummaryResult { readonly summaryAckOp: ISummaryAckMessage; readonly ackNackDuration: number; } export interface INackSummaryResult { readonly summaryNackOp: ISummaryNackMessage; readonly ackNackDuration: number; } export type SummarizeResultPart<TSuccess, TFailure = undefined> = { success: true; data: TSuccess; } | { success: false; data: TFailure | undefined; message: string; error: any; retryAfterSeconds?: number; }; export interface ISummarizeResults { /** Resolves when we generate, upload, and submit the summary. */ readonly summarySubmitted: Promise<SummarizeResultPart<SubmitSummaryResult>>; /** Resolves when we observe our summarize op broadcast. */ readonly summaryOpBroadcasted: Promise<SummarizeResultPart<IBroadcastSummaryResult>>; /** Resolves when we receive a summaryAck or summaryNack. */ readonly receivedSummaryAckOrNack: Promise<SummarizeResultPart<IAckSummaryResult, INackSummaryResult>>; } export type OnDemandSummarizeResult = (ISummarizeResults & { /** Indicates that an already running summarize attempt does not exist. */ readonly alreadyRunning?: undefined; }) | { /** Resolves when an already running summarize attempt completes. */ readonly alreadyRunning: Promise<void>; }; export type EnqueueSummarizeResult = (ISummarizeResults & { /** * Indicates that another summarize attempt is not already enqueued, * and this attempt has been enqueued. */ readonly alreadyEnqueued?: undefined; }) | (ISummarizeResults & { /** Indicates that another summarize attempt was already enqueued. */ readonly alreadyEnqueued: true; /** * Indicates that the other enqueued summarize attempt was abandoned, * and this attempt has been enqueued enqueued. */ readonly overridden: true; }) | { /** Indicates that another summarize attempt was already enqueued. */ readonly alreadyEnqueued: true; /** * Indicates that the other enqueued summarize attempt remains enqueued, * and this attempt has not been enqueued. */ readonly overridden?: undefined; }; export type SummarizerStopReason = /** Summarizer client failed to summarize in all 3 consecutive attempts. */ | "failToSummarize" /** Parent client reported that it is no longer connected. */ | "parentNotConnected" /** * Parent client reported that it is no longer elected the summarizer. * This is the normal flow; a disconnect will always trigger the parent * client to no longer be elected as responsible for summaries. Then it * tries to stop its spawned summarizer client. */ | "parentShouldNotSummarize" /** Summarizer client was disconnected */ | "summarizerClientDisconnected" /* running summarizer threw an exception */ | "summarizerException"; export interface ISummarizerEvents extends IEvent { /** * An event indicating that the Summarizer is having problems summarizing */ (event: "summarizingError", listener: (error: ISummarizingWarning) => void); } export interface ISummarizer extends IEventProvider<ISummarizerEvents>, IFluidRouter, IFluidLoadable { stop(reason: SummarizerStopReason): void; run(onBehalfOf: string, options?: Readonly<Partial<ISummarizerOptions>>): Promise<SummarizerStopReason>; /** * Attempts to generate a summary on demand. If already running, takes no action. * @param options - options controlling the summarize attempt * @returns an alreadyRunning promise if a summarize attempt is already in progress, * which will resolve when the current attempt completes. At that point caller can * decide to try again or not. Otherwise, it will return an object containing promises * that resolve as the summarize attempt progresses. They will resolve with success * false if a failure is encountered. */ summarizeOnDemand(options: IOnDemandSummarizeOptions): OnDemandSummarizeResult; /** * Enqueue an attempt to summarize after the specified sequence number. * If afterSequenceNumber is provided, the summarize attempt is "enqueued" * to run once an eligible op comes in with sequenceNumber \>= afterSequenceNumber. * @param options - options controlling the summarize attempt * @returns an object containing an alreadyEnqueued flag to indicate if another * summarize attempt has already been enqueued. It also may contain an overridden flag * when alreadyEnqueued is true, that indicates whether this attempt forced the * previous attempt to abort. If this attempt becomes enqueued, it returns an object * containing promises that resolve as the summarize attempt progresses. They will * resolve with success false if a failure is encountered. */ enqueueSummarize(options: IEnqueueSummarizeOptions): EnqueueSummarizeResult; } /** Data about an attempt to summarize used for heuristics. */ export interface ISummarizeAttempt { /** Reference sequence number when summary was generated or attempted */ readonly refSequenceNumber: number; /** Time of summary attempt after it was sent or attempted */ readonly summaryTime: number; /** Sequence number of summary op */ summarySequenceNumber?: number; } /** Data relevant for summary heuristics. */ export interface ISummarizeHeuristicData { /** Latest received op sequence number */ lastOpSequenceNumber: number; /** Most recent summary attempt from this client */ readonly lastAttempt: ISummarizeAttempt; /** Most recent summary that received an ack */ readonly lastSuccessfulSummary: Readonly<ISummarizeAttempt>; /** * Initializes lastAttempt and lastSuccessfulAttempt based on the last summary. * @param lastSummary - last ack summary */ initialize(lastSummary: ISummarizeAttempt): void; /** * Records a summary attempt. If the attempt was successfully sent, * provide the reference sequence number, otherwise it will be set * to the last seen op sequence number. * @param referenceSequenceNumber - reference sequence number of sent summary */ recordAttempt(referenceSequenceNumber?: number): void; /** Mark that the last sent summary attempt has received an ack */ markLastAttemptAsSuccessful(): void; } /** Responsible for running heuristics determining when to summarize. */ export interface ISummarizeHeuristicRunner { /** Runs the heuristic to determine if it should try to summarize */ run(): void; /** Runs a different heuristic to check if it should summarize before closing */ shouldRunLastSummary(): boolean; /** Disposes of resources */ dispose(): void; }
the_stack
import { expect } from "chai"; import { expectType as _expectType, TypeEqual, TypeOf } from "ts-expect"; import * as v from "../src"; // A helper for checking whether the given validator's // inferred output type is _exactly_ the same as given one. // For example the following are valid: // expectType(v.number()).toImply<number>(true); // expectType(v.number()).toImply<1>(false); // expectType(v.number()).toImply<string>(false); // expectType(v.number()).toImply<string | number>(false); // expectType(v.number()).toImply<unknown>(false); // expectType(v.number()).toImply<any>(false); // expectType(v.number()).toImply<never>(false); function expectType<T extends v.Type | v.Optional>( _type: T ): { toImply<M>(_truth: TypeEqual<v.Infer<T>, M>): void; toBeAssignableTo<M>(_truth: TypeOf<T, M>): void; } { return { toImply: () => void {}, toBeAssignableTo: () => void {} }; } describe("Type", () => { describe("try", () => { it("returns ValitaResult<T> when called for v.Type<T>", () => { function _<T>(type: v.Type<T>, value: unknown): v.ValitaResult<T> { return type.try(value); } }); it("returns type v.ValitaResult<v.Infer<...>>", () => { function _<T extends v.Type>( type: T, value: unknown ): v.ValitaResult<v.Infer<T>> { return type.try(value); } }); it("returns type discriminated by .ok", () => { const result = v.number().try(1); if (result.ok) { _expectType<TypeOf<{ value: number }, typeof result>>(true); _expectType<TypeOf<{ message: string }, typeof result>>(false); } else { _expectType<TypeOf<{ value: number }, typeof result>>(false); _expectType<TypeOf<{ message: string }, typeof result>>(true); } }); it("returns { ok: true, value: ... } on success", () => { const result = v.number().try(1); expect(result.ok).to.equal(true); expect(result.ok && result.value).to.equal(1); }); it("keeps the original instance for .value when possible", () => { const o = {}; const t = v.object({}); const result = t.try(o); expect(result.ok && result.value).to.equal(o); }); it("creates a new instance for .value when necessary", () => { const o = { a: 1 }; const t = v.object({}); const result = t.try(o, { mode: "strip" }); expect(result.ok && result.value).to.not.equal(o); }); it("returns { ok: false, ... } on failure", () => { const t = v.number(); const result = t.try("test"); expect(result.ok).to.equal(false); }); }); describe("parse", () => { it("returns T when called for v.Type<T>", () => { function _<T>(type: v.Type<T>, value: unknown): T { return type.parse(value); } }); it("returns type v.Infer<...>", () => { function _<T extends v.Type>(type: T, value: unknown): v.Infer<T> { return type.parse(value); } }); }); describe("assert", () => { it("passes the type through by default", () => { const t = v.number().assert(() => true); expectType(t).toImply<number>(true); }); it("turns optional input into non-optional output", () => { const t = v.object({ a: v .number() .optional() .assert(() => true), }); expect(t.parse({})).to.deep.equal({ a: undefined }); expectType(t).toImply<{ a: number | undefined }>(true); }); it("accepts type predicates", () => { type Branded = number & { readonly brand: unique symbol }; const t = v.number().assert((n): n is Branded => true); expectType(t).toImply<Branded>(true); expectType(t).toImply<number>(false); }); it("accepts type parameters", () => { const t = v.number().assert<1>((n) => n === 1); expectType(t).toImply<1>(true); }); it("passes in the parsed value", () => { let value: unknown; const t = v.number().assert((v) => { value = v; return true; }); t.parse(1000); expect(value).to.equal(1000); }); it("passes the value through on success", () => { const t = v.number().assert(() => true); expect(t.parse(1000)).to.equal(1000); }); it("creates a custom error on failure", () => { const t = v.number().assert(() => false); expect(() => t.parse(1)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.includes({ code: "custom_error" }); }); it("allows passing in a custom error message", () => { const t = v.number().assert(() => false, "test"); expect(() => t.parse(1)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: "test", }); }); it("allows passing in a custom error message in an object", () => { const t = v.number().assert(() => false, { message: "test" }); expect(() => t.parse(1)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: { message: "test" }, }); }); it("allows passing in a error path", () => { const t = v.number().assert(() => false, { path: ["test"] }); expect(() => t.parse(1)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", path: ["test"], }); }); it("runs multiple asserts in order", () => { const t = v .string() .assert((s) => s !== "a", "a") .assert(() => false, "b"); expect(() => t.parse("a")) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: "a", }); expect(() => t.parse("b")) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: "b", }); }); it("always gets the value transformed by previous maps and chains", () => { const x = {}; const t = v .string() .assert((s) => s === "a") .map(() => x) .assert((s) => s === x); expect(t.parse("a")).to.equal(x); }); }); describe("map", () => { it("changes the output type to the function's return type", () => { const t = v.number().map(String); expectType(t).toImply<string>(true); }); it("infers literals when possible", () => { const t = v.number().map(() => "test"); expectType(t).toImply<"test">(true); }); it("passes in the parsed value", () => { let value: unknown; const t = v.number().map((v) => (value = v)); t.parse(1000); expect(value).to.equal(1000); }); it("passes on the return value", () => { const t = v.number().map(() => "test"); expect(t.parse(1000)).to.equal("test"); }); it("runs multiple maps in order", () => { const t = v .string() .map((s) => s + "b") .map((s) => s + "c"); expect(t.parse("a")).to.equal("abc"); }); }); describe("chain", () => { it("changes the output type to the function's return type", () => { const t = v.number().chain((n) => v.ok(String(n))); expectType(t).toImply<string>(true); }); it("infers literals when possible", () => { const t = v.number().chain(() => ({ ok: true, value: "test" })); expectType(t).toImply<"test">(true); }); it("passes in the parsed value", () => { let value: unknown; const t = v.number().chain((n) => { value = n; return v.ok("test"); }); t.parse(1000); expect(value).to.equal(1000); }); it("passes on the success value", () => { const t = v.number().chain(() => v.ok("test")); expect(t.parse(1)).to.equal("test"); }); it("fails on error result", () => { const t = v.number().chain(() => v.err()); expect(() => t.parse(1)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", }); }); it("allows passing in a custom error message", () => { const t = v.number().chain(() => v.err("test")); expect(() => t.parse(1)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: "test", }); }); it("allows passing in a custom error message in an object", () => { const t = v.number().chain(() => v.err({ message: "test" })); expect(() => t.parse(1)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: { message: "test" }, }); }); it("allows passing in an error path", () => { const t = v.number().chain(() => v.err({ path: ["test"] })); expect(() => t.parse(1)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", path: ["test"], }); }); it("runs multiple chains in order", () => { const t = v .string() .chain((s) => v.ok(s + "b")) .chain((s) => v.ok(s + "c")); expect(t.parse("a")).to.equal("abc"); }); it("works together with .try()", () => { const s = v.string(); const t = v.unknown().chain((x) => s.try(x)); expectType(t).toImply<string>(true); expect(t.parse("a")).to.equal("a"); expect(() => t.parse(1)).to.throw(v.ValitaError); }); }); describe("optional()", () => { it("accepts missing values", () => { const t = v.object({ a: v.string().optional(), }); expect(t.parse({})).to.deep.equal({}); }); it("accepts undefined", () => { const t = v.object({ a: v.string().optional(), }); expect(t.parse({ a: undefined })).to.deep.equal({ a: undefined }); }); it("accepts the original type", () => { const t = v.object({ a: v.string().optional(), }); expect(t.parse({ a: "test" })).to.deep.equal({ a: "test" }); }); it("adds undefined to output", () => { const t = v.string().optional(); expectType(t).toImply<string | undefined>(true); }); it("makes the output type optional", () => { const t1 = v.object({ a: v.number().optional() }); expectType(t1).toImply<{ a?: number | undefined }>(true); }); it("short-circuits previous optionals", () => { const t = v.object({ a: v .string() .optional() .map(() => 1) .optional(), }); expect(t.parse({ a: undefined })).to.deep.equal({ a: undefined }); expectType(t).toImply<{ a?: 1 | undefined }>(true); }); it("short-circuits undefined()", () => { const t = v.object({ a: v .undefined() .map(() => 1) .optional(), }); expect(t.parse({ a: undefined })).to.deep.equal({ a: undefined }); expectType(t).toImply<{ a?: 1 | undefined }>(true); }); it("passes undefined to assert() for missing values", () => { let value: unknown = null; const t = v.object({ missing: v .string() .optional() .assert((input) => { value = input; return true; }), }); t.parse({}); expect(value).to.be.undefined; }); it("passes undefined to map() for missing values", () => { let value: unknown = null; const t = v.object({ missing: v .string() .optional() .map((input) => { value = input; }), }); t.parse({}); expect(value).to.be.undefined; }); it("passes undefined to chain() for missing values", () => { let value: unknown = null; const t = v.object({ missing: v .string() .optional() .chain((input) => { value = input; return v.ok(true); }), }); t.parse({}); expect(value).to.be.undefined; }); }); describe("default", () => { it("accepts undefined", () => { const t = v.number().default(2); expect(t.parse(undefined)).to.deep.equal(2); }); it("maps undefined output from any parser", () => { const t = v .string() .map(() => undefined) .default(2); expect(t.parse("test")).to.deep.equal(2); }); it("makes input optional", () => { const t = v.object({ a: v.number().default(2), }); expect(t.parse({})).to.deep.equal({ a: 2 }); }); it("infers literals when possible", () => { const t = v.undefined().default(2); expectType(t).toImply<2>(true); }); it("removes undefined from the return type", () => { const t = v.union(v.string(), v.undefined()).default(2); expectType(t).toImply<string | 2>(true); }); }); }); describe("never()", () => { it("rejects everything", () => { const t = v.never(); for (const val of ["1", 1, 1n, true, null, undefined, [], {}]) { expect(() => t.parse(val)).to.throw(v.ValitaError); } }); it("has output type 'never'", () => { const t = v.never(); expectType(t).toImply<never>(true); }); it("never propagates to assert()", () => { let called = false; const t = v.never().assert(() => { called = true; return true; }); expect(() => t.parse(null)).to.throw(v.ValitaError); expect(called).to.be.false; }); it("never propagates to map()", () => { let called = false; const t = v.never().map(() => { called = true; }); expect(() => t.parse(null)).to.throw(v.ValitaError); expect(called).to.be.false; }); it("never propagates to chain()", () => { let called = false; const t = v.never().chain(() => { called = true; return v.ok(true); }); expect(() => t.parse(null)).to.throw(v.ValitaError); expect(called).to.be.false; }); }); describe("string()", () => { it("accepts strings", () => { const t = v.string(); expect(t.parse("test")).to.equal("test"); }); it("rejects other types", () => { const t = v.string(); for (const val of [1, 1n, true, null, undefined, [], {}]) { expect(() => t.parse(val)).to.throw(v.ValitaError); } }); }); describe("unknown()", () => { it("accepts anything", () => { const t = v.unknown(); for (const val of ["test", 1, 1n, true, null, undefined, [], {}]) { expect(t.parse(val)).to.equal(val); } }); }); describe("number()", () => { it("accepts numbers", () => { const t = v.number(); expect(t.parse(1)).to.equal(1); }); it("rejects other types", () => { const t = v.number(); for (const val of ["1", 1n, true, null, undefined, [], {}]) { expect(() => t.parse(val)).to.throw(v.ValitaError); } }); }); describe("bigint()", () => { it("accepts bigints", () => { const t = v.bigint(); expect(t.parse(1n)).to.equal(1n); }); it("rejects other types", () => { const t = v.bigint(); for (const val of ["1", 1, true, null, undefined, [], {}]) { expect(() => t.parse(val)).to.throw(v.ValitaError); } }); }); describe("boolean()", () => { it("accepts booleans", () => { const t = v.boolean(); expect(t.parse(true)).to.equal(true); }); it("rejects other types", () => { const t = v.boolean(); for (const val of ["1", 1, 1n, null, undefined, [], {}]) { expect(() => t.parse(val)).to.throw(v.ValitaError); } }); }); describe("object()", () => { it("acceps empty objects", () => { const t = v.object({}); expect(t.parse({})).to.deep.equal({}); // eslint-disable-next-line @typescript-eslint/ban-types expectType(t).toImply<{}>(true); }); it("infers required keys object({})", () => { const t = v.object({ a: v.object({}), }); // eslint-disable-next-line @typescript-eslint/ban-types expectType(t).toImply<{ a: {} }>(true); }); it("infers optional keys for optional()", () => { const t = v.object({ a: v.undefined().optional(), }); expectType(t).toImply<{ a?: undefined }>(true); }); it("infers required keys for never()", () => { const t = v.object({ a: v.never(), }); expectType(t).toImply<{ a: never }>(true); }); it("infers required keys for undefined()", () => { const t = v.object({ a: v.undefined(), }); expectType(t).toImply<{ a: undefined }>(true); }); it("infers required keys for unknown()", () => { const t = v.object({ a: v.unknown(), }); expectType(t).toImply<{ a: unknown }>(true); }); it("throws on missing required keys", () => { const t = v.object({ a: v.string() }); expect(() => t.parse({})) .to.throw(v.ValitaError) .with.nested.property("issues[0].code", "missing_value"); }); it("reports multiple missing required keys", () => { const result = v.object({ a: v.string(), b: v.number() }).try({}); expect(!result.ok && result.issues).to.have.deep.members([ { path: ["a"], code: "missing_value", }, { path: ["b"], code: "missing_value", }, ]); }); it("does not throw on missing optional keys", () => { const t = v.object({ a: v.string().optional() }); expect(t.parse({})).to.deep.equal({}); }); it("returns the original object instance if possible", () => { const t = v.object({ a: v.number() }); const o = { a: 1 }; expect(t.parse(o)).to.equal(o); }); it("returns a new object instance if the fields change", () => { const t = v.object({ a: v.number().map(() => "test"), }); const o = { a: 1 }; expect(t.parse(o)).to.not.equal(o); }); it("doesn't lose enumerable optional keys when there are transformed non-enumerable optional keys", () => { const o = { a: 1 }; Object.defineProperty(o, "b", { value: 2, enumerable: false, }); const t = v.object({ a: v.number().optional(), b: v .number() .map((n) => n + 1) .optional(), }); expect(t.parse(o)).to.deep.equal({ a: 1, b: 3 }); }); it("rejects other types", () => { const t = v.object({}); for (const val of ["1", 1n, true, null, undefined, []]) { expect(() => t.parse(val)).to.throw(v.ValitaError); } }); it("checks non-enumerable required keys", () => { const t = v.object({ a: v.string() }); const o = {}; Object.defineProperty(o, "a", { value: 1, enumerable: false, }); expect(() => t.parse(o)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", path: ["a"], expected: ["string"], }); }); it("checks non-enumerable optional keys", () => { const t = v.object({ a: v.string().optional() }); const o = {}; Object.defineProperty(o, "a", { value: 1, enumerable: false, }); expect(() => t.parse(o)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", path: ["a"], expected: ["string"], }); }); it("fails on unrecognized keys by default", () => { const t = v.object({ a: v.number() }); expect(() => t.parse({ a: 1, b: 2 })) .to.throw(v.ValitaError) .with.deep.nested.include({ "issues[0].code": "unrecognized_keys", "issues[0].keys": ["b"], }); }); it("fails on unrecognized keys when mode=strict", () => { const t = v.object({ a: v.number() }); expect(() => t.parse({ a: 1, b: 2 }, { mode: "strict" })) .to.throw(v.ValitaError) .with.deep.nested.include({ "issues[0].code": "unrecognized_keys", "issues[0].keys": ["b"], }); }); it("reports multiple unrecognized keys when mode=strict", () => { const t = v.object({}); expect(() => t.parse({ a: 1, b: 2 }, { mode: "strict" })) .to.throw(v.ValitaError) .with.deep.nested.include({ "issues[0].code": "unrecognized_keys", "issues[0].keys": ["a", "b"], }); }); it("passes through unrecognized keys when mode=passthrough", () => { const t = v.object({ a: v.number() }); const o = t.parse({ a: 1, b: 2 }, { mode: "passthrough" }); expect(o).to.deep.equal({ a: 1, b: 2 }); }); it("strips unrecognized keys when mode=strip", () => { const t = v.object({ a: v.number() }); const o = t.parse({ a: 1, b: 2 }, { mode: "strip" }); expect(o).to.deep.equal({ a: 1 }); }); it("strips unrecognized keys when mode=strip and there are transformed values", () => { const t = v.object({ a: v.number().map((x) => x + 1) }); const o = t.parse({ a: 1, b: 2 }, { mode: "strip" }); expect(o).to.deep.equal({ a: 2 }); }); it("doesn't lose optional keys when mode=strip and there unrecognized non-enumerable keys", () => { const o = { a: 1 } as Record<string, unknown>; o.b = 2; o.c = 3; const t = v.object({ a: v.number().optional(), c: v.number().optional(), }); expect(t.parse(o, { mode: "strip" })).to.deep.equal({ a: 1, c: 3 }); }); it("doesn't fail on unrecognized non-enumerable keys when mode=strict", () => { const o = { a: 1 }; Object.defineProperty(o, "b", { value: 2, enumerable: false, }); const t = v.object({ a: v.number(), b: v.number() }); expect(t.parse(o, { mode: "strict" })).to.equal(o); }); it("doesn't get confused by recognized non-enumerable keys when mode=strict", () => { const o = { x: 1 }; Object.defineProperties(o, { a: { value: 1, enumerable: false, }, b: { value: 2, enumerable: false, }, }); const t = v.object({ a: v.number(), b: v.number() }); expect(() => t.parse(o, { mode: "strict" })) .to.throw(v.ValitaError) .with.deep.nested.include({ "issues[0].code": "unrecognized_keys", "issues[0].keys": ["x"], }); }); it("keeps missing optionals missing when mode=strip", () => { const t = v.object({ a: v.number().optional() }); const o = t.parse({ b: 2 }, { mode: "strip" }); expect(o).to.deep.equal({}); }); it("doesn't consider undefined() optional when mode=strict", () => { const t = v.object({ a: v.undefined() }); expect(() => t.parse({}, { mode: "strict" })) .to.throw(v.ValitaError) .with.deep.nested.include({ "issues[0].code": "missing_value", "issues[0].path": ["a"], }); }); it("doesn't consider undefined() optional when mode=passthrough", () => { const t = v.object({ a: v.undefined() }); expect(() => t.parse({}, { mode: "passthrough" })) .to.throw(v.ValitaError) .with.deep.nested.include({ "issues[0].code": "missing_value", "issues[0].path": ["a"], }); }); it("doesn't consider undefined() optional when mode=strip", () => { const t = v.object({ a: v.undefined() }); expect(() => t.parse({}, { mode: "strip" })) .to.throw(v.ValitaError) .with.deep.nested.include({ "issues[0].code": "missing_value", "issues[0].path": ["a"], }); }); it("forwards parsing mode to nested types", () => { const t = v.object({ nested: v.object({ a: v.number() }) }); const i = { nested: { a: 1, b: 2 } }; expect(() => t.parse(i)).to.throw(v.ValitaError); expect(() => t.parse(i, { mode: "strict" })).to.throw(v.ValitaError); expect(t.parse(i, { mode: "passthrough" })).to.equal(i); expect(t.parse(i, { mode: "strip" })).to.deep.equal({ nested: { a: 1 } }); }); describe("omit", () => { it("omits given keys", () => { const t = v.object({ a: v.literal(1), b: v.literal(2) }).omit("b"); expectType(t).toImply<{ a: 1 }>(true); expect(t.parse({ a: 1 })).to.deep.equal({ a: 1 }); }); it("allows zero arguments", () => { const t = v.object({ a: v.literal(1), b: v.literal(2) }).omit(); expectType(t).toImply<{ a: 1; b: 2 }>(true); expect(t.parse({ a: 1, b: 2 })).to.deep.equal({ a: 1, b: 2 }); }); it("allows multiple", () => { const t = v .object({ a: v.literal(1), b: v.literal(2), c: v.literal(3) }) .omit("a", "b"); expectType(t).toImply<{ c: 3 }>(true); expect(t.parse({ c: 3 })).to.deep.equal({ c: 3 }); }); it("keeps rest", () => { const t = v .object({ a: v.literal(1), b: v.literal(2) }) .rest(v.number()) .omit("b"); expectType(t).toImply<{ a: 1; [K: string]: number }>(true); expect(t.parse({ a: 1, b: 1000 })).to.deep.equal({ a: 1, b: 1000 }); }); it("removes checks", () => { const t = v .object({ a: v.literal(1), b: v.literal(2) }) .check(() => false) .omit("b"); expectType(t).toImply<{ a: 1 }>(true); expect(t.parse({ a: 1 })).to.deep.equal({ a: 1 }); }); }); describe("pick", () => { it("omits given keys", () => { const t = v.object({ a: v.literal(1), b: v.literal(2) }).pick("a"); expectType(t).toImply<{ a: 1 }>(true); expect(t.parse({ a: 1 })).to.deep.equal({ a: 1 }); }); it("allows zero arguments", () => { const t = v.object({ a: v.literal(1), b: v.literal(2) }).pick(); // eslint-disable-next-line @typescript-eslint/ban-types expectType(t).toImply<{}>(true); expect(t.parse({})).to.deep.equal({}); }); it("allows multiple", () => { const t = v .object({ a: v.literal(1), b: v.literal(2), c: v.literal(3) }) .pick("a", "b"); expectType(t).toImply<{ a: 1; b: 2 }>(true); expect(t.parse({ a: 1, b: 2 })).to.deep.equal({ a: 1, b: 2 }); }); it("removes rest", () => { const t = v .object({ a: v.literal(1), b: v.literal(2) }) .rest(v.string()) .pick("a"); expectType(t).toImply<{ a: 1 }>(true); expect(() => t.parse({ a: 1, b: "test" }, { mode: "strict" })).to.throw( v.ValitaError ); }); it("removes checks", () => { const t = v .object({ a: v.literal(1), b: v.literal(2) }) .check(() => false) .pick("a"); expectType(t).toImply<{ a: 1 }>(true); expect(t.parse({ a: 1 })).to.deep.equal({ a: 1 }); }); }); describe("partial", () => { it("makes all keys optional", () => { const t = v.object({ a: v.literal(1), b: v.literal(2) }).partial(); expectType(t).toImply<Partial<{ a: 1; b: 2 }>>(true); expect(t.parse({ a: 1 })).to.deep.equal({ a: 1 }); }); it("makes rest accept undefined as well as the original type", () => { it("makes all keys optional", () => { const t = v .object({ a: v.literal(1) }) .rest(v.number()) .partial(); expectType(t).toImply<Partial<{ a: 1; [K: string]: number }>>(true); expect(t.parse({ a: 1, x: undefined, y: 1000 })).to.deep.equal({ a: 1, x: undefined, y: 1000, }); }); const t = v .object({ a: v.literal(1), b: v.literal(2) }) .rest(v.number()) .omit("b"); expectType(t).toImply<{ a: 1; [K: string]: number }>(true); expect(t.parse({ a: 1, b: 1000 })).to.deep.equal({ a: 1, b: 1000 }); }); it("removes checks", () => { const t = v .object({ a: v.literal(1), b: v.literal(2) }) .check(() => false) .partial(); expectType(t).toImply<Partial<{ a: 1; b: 2 }>>(true); expect(t.parse({ a: 1 })).to.deep.equal({ a: 1 }); }); }); describe("rest", () => { it("adds an index signature to the inferred type", () => { const t = v.object({ a: v.literal(1) }).rest(v.number()); expectType(t).toImply<{ a: 1; [K: string]: number }>(true); expectType(t).toImply<{ a: string }>(false); }); it("accepts matching unexpected key values", () => { const t = v.object({ a: v.literal("test") }).rest(v.literal(1)); expect(t.parse({ a: "test", b: 1 })).to.deep.equal({ a: "test", b: 1 }); }); it("returns the original object instance if possible", () => { const t = v.object({ a: v.number() }).rest(v.number()); const o = { a: 1, b: 2 }; expect(t.parse(o)).to.equal(o); }); it("returns a new object instance if the fields change", () => { const t = v .object({ a: v.number(), }) .rest(v.number().map((x) => x)); const o = { a: 1, b: 2 }; expect(t.parse(o)).to.not.equal(o); }); it("doesn't lose the extra fields if the object has to be copied", () => { const t = v .object({ a: v.number(), c: v.number().map((n) => -n), }) .rest(v.number()); const r = { a: 1, b: 2, c: 3 } as Record<string, unknown>; const o = Object.create(r); o.d = 4; expect(t.parse(o)).to.deep.equal({ a: 1, b: 2, c: -3, d: 4 }); }); it("ignores non-enumerable keys", () => { const t = v.object({ a: v.literal("test") }).rest(v.literal(1)); const o = { a: "test" }; Object.defineProperty(o, "b", { value: "string", enumerable: false, }); expect(t.parse(o)).to.deep.equal({ a: "test" }); }); it("rejects non-matching unexpected key values", () => { const t = v.object({ a: v.literal("test") }).rest(v.literal(1)); expect(() => t.parse({ a: "test", b: 2 })) .to.throw(v.ValitaError) .with.nested.property("issues") .with.lengthOf(1) .that.deep.includes({ code: "invalid_literal", path: ["b"], expected: [1], }); }); it("applies only to unexpected keys", () => { const t = v.object({ a: v.literal("test") }).rest(v.literal(1)); expect(() => t.parse({ a: 1 })) .to.throw(v.ValitaError) .with.nested.property("issues") .with.lengthOf(1) .that.deep.includes({ code: "invalid_literal", path: ["a"], expected: ["test"], }); }); it("takes precedence over mode=strict", () => { const t = v.object({}).rest(v.literal(1)); expect(t.parse({ a: 1 }, { mode: "strict" })).to.deep.equal({ a: 1 }); expect(() => t.parse({ a: 2 }, { mode: "strict" })) .to.throw(v.ValitaError) .with.nested.property("issues") .with.lengthOf(1) .that.deep.includes({ code: "invalid_literal", path: ["a"], expected: [1], }); }); it("takes precedence over mode=strip", () => { const t = v.object({}).rest(v.literal(1)); expect(t.parse({ a: 1 }, { mode: "strip" })).to.deep.equal({ a: 1 }); expect(() => t.parse({ a: 2 }, { mode: "strip" })) .to.throw(v.ValitaError) .with.nested.property("issues") .with.lengthOf(1) .that.deep.includes({ code: "invalid_literal", path: ["a"], expected: [1], }); }); it("takes precedence over mode=passthrough", () => { const t = v.object({}).rest(v.literal(1)); expect(t.parse({ a: 1 }, { mode: "passthrough" })).to.deep.equal({ a: 1, }); expect(() => t.parse({ a: 2 }, { mode: "passthrough" })) .to.throw(v.ValitaError) .with.nested.property("issues") .with.lengthOf(1) .that.deep.includes({ code: "invalid_literal", path: ["a"], expected: [1], }); }); }); it("attaches paths to issues", () => { const t = v.object({ type: v.literal(2), other: v.literal("test"), }); expect(() => t.parse({ type: 2, other: "not_test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_literal", path: ["other"], expected: ["test"], }); }); it("attaches nested paths to issues", () => { const t = v.object({ type: v.literal(2), other: v.object({ key: v.literal("test"), }), }); expect(() => t.parse({ type: 2, other: { key: "not_test" } })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_literal", path: ["other", "key"], expected: ["test"], }); }); describe("extend()", () => { it("extends the base shape", () => { const t = v.object({ a: v.string() }).extend({ b: v.number() }); expect(t.parse({ a: "test", b: 1 })).to.deep.equal({ a: "test", b: 1 }); expectType(t).toImply<{ a: string; b: number }>(true); }); it("overwrites already existing keys", () => { const t = v.object({ a: v.string() }).extend({ a: v.number() }); expect(t.parse({ a: 1 })).to.deep.equal({ a: 1 }); expect(() => t.parse({ a: "test" })).to.throw(v.ValitaError); expectType(t).toImply<{ a: number }>(true); }); }); describe("check()", () => { it("accepts a function returning boolean", () => { const t = v.object({ a: v.string() }).check((_v) => true); expect(t.parse({ a: "test" })).to.deep.equal({ a: "test" }); }); it("doesn't affect the base shape", () => { const t = v.object({ a: v.string() }).check((v): boolean => Boolean(v)); expectType(t).toImply<{ a: string }>(true); }); it("skips all checks if any property fails to parse", () => { let didRun = false; const t = v.object({ a: v.string(), b: v.number() }).check(() => { didRun = true; return true; }); expect(() => t.parse({ a: "test" })).to.throw(v.ValitaError); expect(didRun).to.be.false; }); it("runs multiple checks in order", () => { const t = v .object({ a: v.string() }) .check((v) => v.a === "test", "first") .check(() => false, "second"); expect(() => t.parse({ a: "test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: "second", }); expect(() => t.parse({ a: "other" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: "first", }); }); it("runs checks after the object has otherwise been parsed", () => { const t = v .object({ a: v.string() }) .check((v) => (v as Record<string, unknown>).b === 2) .extend({ b: v.undefined().map(() => 2) }) .check((v) => v.b === 2); expect(() => t.parse({ a: "test", b: null })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", path: ["b"], }); expect(t.parse({ a: "test", b: undefined })).to.deep.equal({ a: "test", b: 2, }); }); it("allows extending the base type after adding checks", () => { const t = v .object({ a: v.string() }) .check((v): boolean => Boolean(v)) .extend({ b: v.number() }); expect(t.parse({ a: "test", b: 1 })).to.deep.equal({ a: "test", b: 1 }); expectType(t).toImply<{ a: string; b: number }>(true); }); it("creates a custom error on failure", () => { const t = v.object({ a: v.string() }).check(() => false); expect(() => t.parse({ a: "test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.includes({ code: "custom_error" }); }); it("allows passing in a custom error message", () => { const t = v.object({ a: v.string() }).check(() => false, "test"); expect(() => t.parse({ a: "test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: "test", }); }); it("allows passing in a custom error message in an object", () => { const t = v .object({ a: v.string() }) .check(() => false, { message: "test" }); expect(() => t.parse({ a: "test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: { message: "test" }, }); }); it("allows passing in a error path", () => { const t = v .object({ a: v.string() }) .check(() => false, { path: ["test"] }); expect(() => t.parse({ a: "test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", path: ["test"], }); }); }); }); describe("record()", () => { it("acceps empty objects", () => { const t = v.record(v.unknown()); expect(t.parse({})).to.deep.equal({}); expectType(t).toImply<{ [K: string]: unknown }>(true); }); it("does not accept arrays", () => { const t = v.record(v.unknown()); expect(() => t.parse([])).to.throw(v.ValitaError); }); it("acceps the defined types of values", () => { const t = v.record(v.number()); expect(t.parse({ a: 1 })).to.deep.equal({ a: 1 }); expectType(t).toImply<{ [K: string]: number }>(true); }); it("defaults to Record<string, unknown>", () => { const t = v.record(); expect(t.parse({ a: 1 })).to.deep.equal({ a: 1 }); expectType(t).toImply<{ [K: string]: unknown }>(true); }); it("rejects values other than the defined type", () => { const t = v.record(v.number()); expect(() => t.parse({ a: "test" })).to.throw(v.ValitaError); }); it("does not react to parsing modes", () => { const t = v.record(v.number()); expect(t.parse({ a: 1 }, { mode: "strict" })).to.deep.equal({ a: 1 }); expect(() => t.parse({ a: 1, b: "test" }, { mode: "strict" })).to.throw( v.ValitaError ); expect(t.parse({ a: 1 }, { mode: "strip" })).to.deep.equal({ a: 1 }); expect(() => t.parse({ a: 1, b: "test" }, { mode: "strip" })).to.throw( v.ValitaError ); expect(() => t.parse({ a: 1, b: "test" }, { mode: "passthrough" }) ).to.throw(v.ValitaError); }); }); describe("literal()", () => { it("accepts string literals", () => { const t = v.literal("test"); expect(t.parse("test")).to.equal("test"); }); it("accepts number literals", () => { const t = v.literal(1); expect(t.parse(1)).to.equal(1); }); it("accepts bigint literals", () => { const t = v.literal(1n); expect(t.parse(1n)).to.equal(1n); }); it("accepts boolean literals", () => { const t = v.literal(true); expect(t.parse(true)).to.equal(true); }); it("rejects other literals when expecting a string literal", () => { const t = v.literal("test"); expect(() => t.parse("other")).to.throw(v.ValitaError); expect(() => t.parse(1)).to.throw(v.ValitaError); expect(() => t.parse(1n)).to.throw(v.ValitaError); expect(() => t.parse(true)).to.throw(v.ValitaError); }); it("rejects other literals when expecting a numeric literal", () => { const t = v.literal(1); expect(() => t.parse("test")).to.throw(v.ValitaError); expect(() => t.parse(2)).to.throw(v.ValitaError); expect(() => t.parse(1n)).to.throw(v.ValitaError); expect(() => t.parse(true)).to.throw(v.ValitaError); }); it("rejects other literals when expecting a bigint literal", () => { const t = v.literal(1n); expect(() => t.parse("test")).to.throw(v.ValitaError); expect(() => t.parse(1)).to.throw(v.ValitaError); expect(() => t.parse(2n)).to.throw(v.ValitaError); expect(() => t.parse(true)).to.throw(v.ValitaError); }); it("rejects other literals when expecting a boolean literal", () => { const t = v.literal(true); expect(() => t.parse("test")).to.throw(v.ValitaError); expect(() => t.parse(1)).to.throw(v.ValitaError); expect(() => t.parse(1n)).to.throw(v.ValitaError); expect(() => t.parse(false)).to.throw(v.ValitaError); }); }); describe("array()", () => { it("accepts arrays", () => { const t = v.array(v.number()); expect(t.parse([1])).to.deep.equal([1]); }); it("rejects other types", () => { const t = v.array(v.number()); for (const val of ["1", 1n, true, null, undefined, { 0: 1 }]) { expect(() => t.parse(val)).to.throw(v.ValitaError); } }); it("throws on item mismatch", () => { const t = v.array(v.string()); expect(() => t.parse([1])).to.throw(v.ValitaError); }); it("returns the original array instance if possible", () => { const t = v.array(v.number()); const a = [1]; expect(t.parse(a)).to.equal(a); }); it("returns a new array instance if the items change", () => { const t = v.array(v.number().map(() => "test")); const a = [1]; expect(t.parse(a)).to.not.equal(a); }); it("infers array", () => { const t = v.array(v.number()); expectType(t).toImply<number[]>(true); }); }); describe("tuple()", () => { it("accepts arrays", () => { const t = v.tuple([v.number(), v.number()]); expect(t.parse([1, 1])).to.deep.equal([1, 1]); }); it("rejects non-arrays", () => { const t = v.tuple([v.number(), v.number()]); expect(() => t.parse(1)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", path: [], expected: ["array"], }); }); it("accepts tuples of different types", () => { const t = v.tuple([v.number(), v.string()]); expect(t.parse([1, "string"])).to.deep.equal([1, "string"]); }); it("throws on item mismatch", () => { const t = v.tuple([v.number(), v.string()]); expect(() => t.parse([1, 1])) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", path: [1], expected: ["string"], }); }); it("throws on length mismatch", () => { const t = v.tuple([v.number()]); expect(() => t.parse([1, 1])) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_length", path: [], minLength: 1, maxLength: 1, }); }); it("infers tuple", () => { const t = v.tuple([v.number(), v.string()]); expectType(t).toImply<[number, string]>(true); }); it("returns the original array instance if possible", () => { const t = v.tuple([v.number(), v.number()]); const a = [1, 2]; expect(t.parse(a)).to.equal(a); }); it("returns a new array instance if the items change", () => { const t = v.tuple([v.number().map(() => "test"), v.number()]); const a = [1, 2]; expect(t.parse(a)).to.not.equal(a); }); }); describe("union()", () => { it("accepts two subvalidators", () => { const t = v.union(v.string(), v.number()); expect(t.parse("test")).to.equal("test"); expect(t.parse(1)).to.equal(1); expect(() => t.parse({})).to.throw(v.ValitaError); }); it("ignores never()", () => { const t = v.union(v.string(), v.never()); expect(t.parse("test")).to.equal("test"); expect(() => t.parse(1)).to.throw(v.ValitaError); expectType(t).toImply<string>(true); }); it("picks the first successful parse", () => { const t = v.union( v .string() .map(() => 1) .assert(() => false), v.string().map(() => 2) ); expect(t.parse("test")).to.equal(2); }); it("respects the order of overlapping parsers", () => { const a = v.literal(1).map(() => "literal"); const b = v.number().map(() => "number"); const c = v.unknown().map(() => "unknown"); const u = v.union; expect(u(a, b, c).parse(1)).to.equal("literal"); expect(u(a, c, b).parse(1)).to.equal("literal"); expect(u(b, a, c).parse(1)).to.equal("number"); expect(u(b, c, a).parse(1)).to.equal("number"); expect(u(c, b, a).parse(1)).to.equal("unknown"); expect(u(c, a, b).parse(1)).to.equal("unknown"); }); it("deduplicates strictly equal parsers", () => { const a = v.unknown().assert(() => false, "test"); expect(() => v.union(a, a).parse(1)) .to.throw(v.ValitaError) .with.property("issues") .with.lengthOf(1); }); it("keeps the matching order when deduplicating", () => { const a = v.unknown().map(() => "a"); const b = v.unknown().map(() => "b"); expect(v.union(a, b, a).parse(1)).to.equal("a"); }); it("accepts more than two subvalidators", () => { const t = v.union( v.string(), v.number(), v.null(), v.undefined(), v.boolean() ); expect(t.parse("test")).to.equal("test"); expect(t.parse(1)).to.equal(1); expect(t.parse(null)).to.equal(null); expect(t.parse(undefined)).to.equal(undefined); expect(t.parse(true)).to.equal(true); expect(() => t.parse({})).to.throw(v.ValitaError); }); it("reports the expected type even for literals when the base type doesn't match", () => { const t = v.union(v.literal(1), v.literal("test")); expect(() => t.parse(true)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", expected: ["number", "string"], }); }); it("reports the expected literals when the base type matches", () => { const t = v.union(v.literal(1), v.literal("test")); expect(() => t.parse(2)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_literal", expected: [1, "test"], }); }); it("reports the errors from a branch that doesn't overlap with any other branch", () => { const t = v.union(v.literal(1), v.number(), v.object({ a: v.number() })); expect(() => t.parse({ a: "test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", path: ["a"], expected: ["number"], }); }); it("matches unknowns if nothing else matches", () => { const t = v.union( v.literal(1), v.literal(2), v.unknown().assert(() => false, "test") ); expect(() => t.parse({ a: 1 })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: "test", }); }); it("considers never() to not overlap with anything", () => { const t = v.union( v.never(), v.unknown().assert(() => false, "unknown") ); expect(() => t.parse(2)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "custom_error", error: "unknown", }); }); it("considers unknown() to overlap with everything except never()", () => { const t = v.union( v.literal(1), v.literal(2).assert(() => false), v.unknown().assert(() => false) ); expect(() => t.parse(2)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_union", }); }); it("considers unknown() to overlap with objects", () => { const t = v.union( v.unknown(), v.object({ type: v.literal("a") }), v.object({ type: v.literal("b") }) ); expect(t.parse({ type: "c" })).to.deep.equal({ type: "c" }); }); it("considers array() and tuple() to overlap", () => { const t = v.union(v.array(v.number()), v.tuple([v.string()])); expect(() => t.parse(2)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", expected: ["array"], }); }); describe("of objects", () => { it("discriminates based on base types", () => { const t = v.union( v.object({ type: v.number() }), v.object({ type: v.string() }) ); expect(() => t.parse({ type: true })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", path: ["type"], expected: ["number", "string"], }); }); it("discriminates based on literal values", () => { const t = v.union( v.object({ type: v.literal(1) }), v.object({ type: v.literal(2) }) ); expect(() => t.parse({ type: 3 })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_literal", path: ["type"], expected: [1, 2], }); }); it("reports expected types in the order they were first listed", () => { const t1 = v.union(v.literal(2), v.string(), v.literal(2)); expect(() => t1.parse(true)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", path: [], expected: ["number", "string"], }); const t2 = v.union(v.string(), v.literal(2), v.string()); expect(() => t2.parse(true)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", path: [], expected: ["string", "number"], }); }); it("reports expected literals in the order they were first listed", () => { const t1 = v.union(v.literal(2), v.literal(1), v.literal(2)); expect(() => t1.parse(3)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_literal", path: [], expected: [2, 1], }); const t2 = v.union(v.literal(1), v.literal(2), v.literal(1)); expect(() => t2.parse(3)) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_literal", path: [], expected: [1, 2], }); }); it("discriminates based on mixture of base types and literal values", () => { const t = v.union( v.object({ type: v.literal(1) }), v.object({ type: v.string() }) ); expect(() => t.parse({ type: true })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_type", path: ["type"], expected: ["number", "string"], }); }); it("considers unknown() to overlap with everything except never()", () => { const t = v.union( v.object({ type: v.literal(1) }), v.object({ type: v.unknown().assert(() => false) }) ); expect(() => t.parse({ type: "test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_union" }); }); it("considers literals to overlap with their base types", () => { const t = v.union( v.object({ type: v.literal(1) }), v.object({ type: v.number() }) ); expect(() => t.parse({ type: "test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_union" }); }); it("considers equal literals to overlap", () => { const t = v.union( v.object({ type: v.literal(1) }), v.object({ type: v.literal(1) }) ); expect(() => t.parse({ type: "test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_union" }); }); it("folds multiple overlapping types together in same branch", () => { const t = v.union( v.object({ type: v.union(v.string(), v.union(v.string(), v.literal("test"))), }), v.object({ type: v.union(v.literal(2), v.undefined()), other: v.literal("test"), }) ); expect(() => t.parse({ type: 2, other: "not_test" })) .to.throw(v.ValitaError) .with.nested.property("issues[0]") .that.deep.includes({ code: "invalid_literal", path: ["other"], expected: ["test"], }); }); it("considers two optionals to overlap", () => { const t = v.union( v.object({ type: v.literal(1).optional() }), v.object({ type: v.literal(2).optional() }) ); expect(() => t.parse({ type: 3 })) .to.throw(v.ValitaError) .with.nested.property("issues[0].code", "invalid_union"); }); it("considers two optionals and undefineds to overlap", () => { const t = v.union( v.object({ type: v.undefined() }), v.object({ type: v.literal(2).optional() }) ); expect(() => t.parse({ type: 3 })) .to.throw(v.ValitaError) .with.nested.property("issues[0].code", "invalid_union"); }); it("considers two unions with partially same types to overlap", () => { const t = v.union( v.object({ type: v.union(v.literal(1), v.literal(2)) }), v.object({ type: v.union(v.literal(2), v.literal(3)) }) ); expect(() => t.parse({ type: 4 })) .to.throw(v.ValitaError) .with.nested.property("issues[0].code", "invalid_union"); }); }); }); describe("lazy()", () => { it("allows recursive type definitions", () => { type T = | undefined | { t: T; }; const t: v.Type<T> = v.lazy(() => v.union(v.undefined(), v.object({ t }))); expectType(t).toImply<T>(true); }); it("allows mutually recursive type definitions", () => { type A = | undefined | { b: B; }; type B = undefined | A[]; const a: v.Type<A> = v.lazy(() => v.union(v.undefined(), v.object({ b }))); const b: v.Type<B> = v.lazy(() => v.union(v.undefined(), v.array(a))); expectType(a).toImply<A>(true); expectType(b).toImply<B>(true); }); it("fail typecheck on conflicting return type", () => { type T = | undefined | { t: T; }; expectType( v.lazy(() => v.union(v.undefined(), v.object({ t: v.number() }))) ).toBeAssignableTo<v.Type<T>>(false); }); it("parses recursively", () => { type T = | undefined | { t: T; }; const t: v.Type<T> = v.lazy(() => v.union(v.undefined(), v.object({ t }))); expect(t.parse({ t: { t: { t: undefined } } })).to.deep.equal({ t: { t: { t: undefined } }, }); expect(() => t.parse({ t: { t: { t: 1 } } })).to.throw( v.ValitaError, "invalid_type at .t.t.t (expected undefined or object)" ); }); it("parses recursively", () => { type T = { t?: T; }; const t: v.Type<T> = v.lazy(() => v.object({ t: t.optional() })); expect(t.parse({ t: { t: { t: undefined } } })).to.deep.equal({ t: { t: { t: undefined } }, }); expect(() => t.parse({ t: { t: { t: 1 } } })).to.throw( v.ValitaError, "invalid_type at .t.t.t (expected object)" ); }); }); describe("ok()", () => { it("infers literals when possible", () => { const t = v.number().chain(() => v.ok("test")); expectType(t).toImply<"test">(true); }); }); describe("ValitaResult", () => { describe("issues", () => { it("lists issues", () => { const result = v.bigint().try("test"); expect(!result.ok && result.issues).to.deep.equal([ { path: [], code: "invalid_type", expected: ["bigint"], }, ]); }); it("supports multiple issues", () => { const result = v .object({ a: v.bigint(), b: v.string() }) .try({ a: "test", b: 1 }); expect(!result.ok && result.issues).to.have.deep.members([ { path: ["a"], code: "invalid_type", expected: ["bigint"], }, { path: ["b"], code: "invalid_type", expected: ["string"], }, ]); }); it("caches the issues list", () => { const result = v.bigint().try("test"); expect(!result.ok && result.issues).to.equal(!result.ok && result.issues); }); }); describe("message", () => { it("describes the issue when there's only one issue", () => { const result = v.bigint().try("test"); expect(!result.ok && result.message).to.equal( "invalid_type at . (expected bigint)" ); }); it("describes the leftmost issue when there are two issues", () => { const result = v.tuple([v.bigint(), v.string()]).try(["test", 1]); expect(!result.ok && result.message).to.equal( "invalid_type at .0 (expected bigint) (+ 1 other issue)" ); }); it("describes the leftmost issue when there are more than two issues", () => { const result = v .tuple([v.bigint(), v.string(), v.number()]) .try(["test", 1, "other"]); expect(!result.ok && result.message).to.equal( "invalid_type at .0 (expected bigint) (+ 2 other issues)" ); }); }); describe("throw", () => { it("throws a corresponding ValitaError", () => { const result = v.bigint().try("test"); expect(() => !result.ok && result.throw()) .to.throw(v.ValitaError) .with.deep.property("issues", !result.ok && result.issues); }); }); }); describe("ValitaError", () => { const error = new v.ValitaError({ code: "invalid_type", expected: ["bigint"], }); it("is derived from Error", () => { expect(error).to.be.instanceof(Error); }); it("has a name", () => { expect(error.name).to.equal("ValitaError"); }); describe("issues", () => { it("lists issues", () => { expect(error.issues).to.deep.equal([ { path: [], code: "invalid_type", expected: ["bigint"], }, ]); }); it("supports multiple issues", () => { const error = new v.ValitaError({ code: "join", left: { code: "invalid_type", expected: ["bigint"], }, right: { code: "prepend", key: "first", tree: { code: "invalid_type", expected: ["string"], }, }, }); expect(error.issues).to.deep.equal([ { path: [], code: "invalid_type", expected: ["bigint"], }, { path: ["first"], code: "invalid_type", expected: ["string"], }, ]); }); it("caches the issues list", () => { expect(error.issues).to.equal(error.issues); }); }); describe("message", () => { it("describes the issue when there's only one issue", () => { expect(error.message).to.equal("invalid_type at . (expected bigint)"); }); it("describes the leftmost issue when there are two issues", () => { const error = new v.ValitaError({ code: "join", left: { code: "invalid_type", expected: ["bigint"], }, right: { code: "prepend", key: "first", tree: { code: "invalid_type", expected: ["string"], }, }, }); expect(error.message).to.equal( "invalid_type at . (expected bigint) (+ 1 other issue)" ); }); it("describes the leftmost issue when there are more than two issues", () => { const error = new v.ValitaError({ code: "join", left: { code: "invalid_type", expected: ["bigint"], }, right: { code: "join", left: { code: "invalid_type", expected: ["bigint"], }, right: { code: "prepend", key: "first", tree: { code: "invalid_type", expected: ["string"], }, }, }, }); expect(error.message).to.equal( "invalid_type at . (expected bigint) (+ 2 other issues)" ); }); }); });
the_stack
import { CodeStringParser } from "../../src/shared/schema"; import { EditorState, TextSelection } from "prosemirror-state"; import { commonmarkSchema } from "../../src/shared/schema"; import * as commands from "../../src/commonmark/commands"; import { MenuCommand } from "../../src/shared/menu"; import { getSelectedText } from "../test-helpers"; /** * Creates a state with the content optionally selected if selectFrom/To are passed * @param content the document content * @param selectFrom string index to select from * @param selectTo string index to select to */ function createState( content: string, selectFrom?: number, selectTo?: number ): EditorState { const doc = CodeStringParser.fromSchema(commonmarkSchema).parseCode( content ); let selection: TextSelection = undefined; if (typeof selectFrom !== "undefined") { // if selectTo not set, then this is not a selection, but a cursor position if (typeof selectTo === "undefined") { selectTo = selectFrom; } // document vs string offset is different, adjust selectFrom = selectFrom + 1; selectTo = selectTo + 1; selection = TextSelection.create(doc, selectFrom, selectTo); } return EditorState.create({ doc: doc, schema: commonmarkSchema, selection: selection, }); } /** * Creates a state with all the content selected */ function createSelectedState(content: string) { const selectFrom = 0; const selectTo = content.length; return createState(content, selectFrom, selectTo); } /** * Applies a command to the state and expects the entire doc to resemble * `expected` and the selected text to resemble `expectedSelected` */ function expectTransactionSuccess( state: EditorState, command: MenuCommand, expected: string, expectedSelected: string ) { let newState = state; const isValid = command(state, (t) => { newState = state.apply(t); }); expect(isValid).toBeTruthy(); expect(newState.doc.textContent).toEqual(expected); // if no text is passed to check for selection, // assume the test implies that the selection is empty if (!expectedSelected) { expect(newState.selection.empty).toBeTruthy(); } else { const selectedText = getSelectedText(newState); expect(selectedText).toEqual(expectedSelected); } } declare global { // Disable eslint warning, this is what the docs say to do // eslint-disable-next-line @typescript-eslint/no-namespace namespace jest { interface Matchers<R> { transactionSuccess( command: MenuCommand, expected: string, expectedSelected: string ): R; } } } expect.extend({ transactionSuccess( state: EditorState, command: MenuCommand, expected: string, expectedSelected: string ) { expectTransactionSuccess(state, command, expected, expectedSelected); return { message: () => "", pass: true, }; }, }); describe("commonmark editor commands", () => { // Gotta test the test it("should prep correctly", () => { const content = "this is a\ntest"; // check the "select all text" version let state = createSelectedState(content); let selectedText = getSelectedText(state); expect(selectedText).toEqual(content); // check the "select some text" version state = createState(content, 0, 4); selectedText = getSelectedText(state); expect(selectedText).toEqual(content.slice(0, 4)); // check the "select no text" version state = createState(content); expect(state.selection.empty).toBeTruthy(); }); describe("wrapInCommand", () => { it("should wrap in character", () => { const content = "some text"; const result = "**some text**"; const state = createSelectedState(content); expect(state).transactionSuccess( commands.wrapInCommand("**"), result, result ); }); it("should unwrap character", () => { const content = "**some text**"; const result = "some text"; const state = createSelectedState(content); expect(state).transactionSuccess( commands.wrapInCommand("**"), result, result ); }); it("should inject character", () => { const state = createState(""); expect(state).transactionSuccess( commands.wrapInCommand("**"), "**your text**", "your text" ); }); }); describe("blockWrapInCommand", () => { it("should wrap single line in block characters", () => { const content = "some text"; const expectedSelection = `\`\`\` some text \`\`\``; const expectedContent = "\n" + expectedSelection; const state = createSelectedState(content); expect(state).transactionSuccess( commands.blockWrapInCommand("```"), expectedContent, expectedSelection ); }); it("should unwrap block", () => { const content = `\`\`\` some text \`\`\``; const expectedContent = "some text"; const state = createSelectedState(content); expect(state).transactionSuccess( commands.blockWrapInCommand("```"), expectedContent, expectedContent ); }); it("should insert if selection is empty", () => { const content = "some text"; const expectedSelection = "type here"; const expectedContent = ` \`\`\` type here \`\`\` some text`; const state = createState(content, 0, 0); expect(state).transactionSuccess( commands.blockWrapInCommand("```"), expectedContent, expectedSelection ); }); }); describe("insertRawTextCommand", () => { it("should insert raw text and select", () => { const state = createState("test"); const command = commands.insertRawTextCommand( "this is some new text", 13, 16 ); expect(state).transactionSuccess( command, "this is some new texttest", "new" ); }); it("should insert raw text and not select", () => { const state = createState("test"); const command = commands.insertRawTextCommand( "this is some new text" ); expect(state).transactionSuccess( command, "this is some new texttest", null ); }); it("should replace existing text", () => { const state = createState("test REPLACE text", 5, 12); const command = commands.insertRawTextCommand("INSERTED"); expect(state).transactionSuccess( command, "test INSERTED text", null ); }); }); describe("setBlockTypeCommand", () => { /* SET */ describe("should set from no block type", () => { it("single line, cursor at start of line", () => { const state = createState("test"); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess(command, "> test", null); }); it("single line, cursor at arbitrary position", () => { const state = createState("test", 2); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess(command, "> test", null); }); it("multi line, cursor at start of line", () => { const state = createState( "test\nthis is a test\nuntouched line", 5 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "test\n> this is a test\nuntouched line", null ); }); it("multi line, cursor at arbitrary position", () => { const state = createState( "test\nthis is a test\nuntouched line", 10 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "test\n> this is a test\nuntouched line", null ); }); it("single line, arbitary selection", () => { const state = createState("test", 2, 4); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess(command, "> test", null); }); it("multi line, arbitary selection spanning multiple lines, insert newline", () => { const state = createState( "test\n\nthis is a test\nuntouched line", 2, 10 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "te\n> st\n> \n> this is a test\nuntouched line", "> st\n> \n> this" ); }); it("multi line, arbitary selection spanning multiple lines, don't insert newline", () => { const state = createState( "test\n\nthis is a test\nuntouched line", 0, 10 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "> test\n> \n> this is a test\nuntouched line", "> test\n> \n> this" ); }); it("multi line, arbitary selection spanning multiple lines, partial exists", () => { const state = createState( "test\n> \nthis is a test\nuntouched line", 2, 12 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "te\n> st\n> \n> this is a test\nuntouched line", "> st\n> \n> this" ); }); it("multi line, arbitary selection spanning multiple lines, partial exists + swap", () => { const state = createState( "test\n# \nthis is a test\nuntouched line", 2, 12 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "te\n> st\n> \n> this is a test\nuntouched line", "> st\n> \n> this" ); }); }); /* UNSET */ describe("should unset same block type", () => { it("single line, cursor at start of line", () => { const state = createState("> test"); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess(command, "test", null); }); it("single line, cursor at arbitrary position", () => { const state = createState("> test", 3); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess(command, "test", null); }); it("multi line, cursor at start of line", () => { const state = createState( "test\n> this is a test\nuntouched line", 5 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "test\nthis is a test\nuntouched line", null ); }); it("multi line, cursor at arbitrary position", () => { const state = createState( "test\n> this is a test\nuntouched line", 10 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "test\nthis is a test\nuntouched line", null ); }); it("single line, arbitary selection", () => { const state = createState("> test", 2, 4); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess(command, "test", "te"); }); it("multi line, arbitary selection spanning multiple lines", () => { const state = createState( "test\n> \n> this is a test\nuntouched line", 5, 12 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "test\n\nthis is a test\nuntouched line", "\nth" ); }); }); /* SWAP */ describe("should swap other block type", () => { it("single line, cursor at start of line", () => { const state = createState("# test"); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess(command, "> test", null); }); it("single line, cursor at arbitrary position", () => { const state = createState("# test", 2); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess(command, "> test", null); }); it("multi line, cursor at start of line", () => { const state = createState( "test\n# this is a test\nuntouched line", 5 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "test\n> this is a test\nuntouched line", null ); }); it("multi line, cursor at arbitrary position", () => { const state = createState( "test\n# this is a test\nuntouched line", 10 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "test\n> this is a test\nuntouched line", null ); }); it("single line, arbitary selection", () => { const state = createState("# test", 2, 4); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess(command, "> test", null); }); it("multi line, arbitary selection spanning multiple lines", () => { const state = createState( "te\n# st\n# \n# this is a test\nuntouched line", 3, 17 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "te\n> st\n> \n> this is a test\nuntouched line", "> st\n> \n> this" ); }); it("multi line, arbitary selection spanning multiple lines, partial exists", () => { const state = createState( "te\n# st\n> \n# this is a test\nuntouched line", 3, 17 ); const command = commands.setBlockTypeCommand(">"); expect(state).transactionSuccess( command, "te\n> st\n> \n> this is a test\nuntouched line", "> st\n> \n> this" ); }); }); }); });
the_stack
import * as pulumi from "@pulumi/pulumi"; import { input as inputs, output as outputs, enums } from "../types"; import * as utilities from "../utilities"; /** * Manages an Amazon FSx for NetApp ONTAP file system. * See the [FSx ONTAP User Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/what-is-fsx-ontap.html) for more information. * * ## Example Usage * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as aws from "@pulumi/aws"; * * const test = new aws.fsx.OntapFileSystem("test", { * storageCapacity: 1024, * subnetIds: [ * aws_subnet.test1.id, * aws_subnet.test2.id, * ], * deploymentType: "MULTI_AZ_1", * throughputCapacity: 512, * preferredSubnetId: aws_subnet.test1.id, * }); * ``` * * ## Import * * FSx File Systems can be imported using the `id`, e.g. * * ```sh * $ pulumi import aws:fsx/ontapFileSystem:OntapFileSystem example fs-543ab12b1ca672f33 * ``` * * Certain resource arguments, like `security_group_ids`, do not have a FSx API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference, e.g. terraform resource "aws_fsx_ontap_file_system" "example" { * * # ... other configuration ... * * security_group_ids = [aws_security_group.example.id] * * # There is no FSx API for reading security_group_ids * * lifecycle { * * ignore_changes = [security_group_ids] * * } } */ export class OntapFileSystem extends pulumi.CustomResource { /** * Get an existing OntapFileSystem resource's state with the given name, ID, and optional extra * properties used to qualify the lookup. * * @param name The _unique_ name of the resulting resource. * @param id The _unique_ provider ID of the resource to lookup. * @param state Any extra arguments used during the lookup. * @param opts Optional settings to control the behavior of the CustomResource. */ public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: OntapFileSystemState, opts?: pulumi.CustomResourceOptions): OntapFileSystem { return new OntapFileSystem(name, <any>state, { ...opts, id: id }); } /** @internal */ public static readonly __pulumiType = 'aws:fsx/ontapFileSystem:OntapFileSystem'; /** * Returns true if the given object is an instance of OntapFileSystem. This is designed to work even * when multiple copies of the Pulumi SDK have been loaded into the same process. */ public static isInstance(obj: any): obj is OntapFileSystem { if (obj === undefined || obj === null) { return false; } return obj['__pulumiType'] === OntapFileSystem.__pulumiType; } /** * Amazon Resource Name of the file system. */ public /*out*/ readonly arn!: pulumi.Output<string>; /** * The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days. */ public readonly automaticBackupRetentionDays!: pulumi.Output<number | undefined>; /** * A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automaticBackupRetentionDays` to be set. */ public readonly dailyAutomaticBackupStartTime!: pulumi.Output<string>; /** * - The filesystem deployment type. Only `MULTI_AZ_1` is supported. */ public readonly deploymentType!: pulumi.Output<string>; /** * The SSD IOPS configuration for the Amazon FSx for NetApp ONTAP file system. See Disk Iops Configuration Below. */ public readonly diskIopsConfiguration!: pulumi.Output<outputs.fsx.OntapFileSystemDiskIopsConfiguration>; /** * The Domain Name Service (DNS) name for the file system. You can mount your file system using its DNS name. */ public /*out*/ readonly dnsName!: pulumi.Output<string>; /** * Specifies the IP address range in which the endpoints to access your file system will be created. By default, Amazon FSx selects an unused IP address range for you from the 198.19.* range. */ public readonly endpointIpAddressRange!: pulumi.Output<string>; /** * The endpoints that are used to access data or to manage the file system using the NetApp ONTAP CLI, REST API, or NetApp SnapMirror. See Endpoints below. */ public /*out*/ readonly endpoints!: pulumi.Output<outputs.fsx.OntapFileSystemEndpoint[]>; /** * The ONTAP administrative password for the fsxadmin user that you can use to administer your file system using the ONTAP CLI and REST API. */ public readonly fsxAdminPassword!: pulumi.Output<string | undefined>; /** * ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key. */ public readonly kmsKeyId!: pulumi.Output<string>; /** * Set of Elastic Network Interface identifiers from which the file system is accessible The first network interface returned is the primary network interface. */ public /*out*/ readonly networkInterfaceIds!: pulumi.Output<string[]>; /** * AWS account identifier that created the file system. */ public /*out*/ readonly ownerId!: pulumi.Output<string>; /** * The ID for a subnet. A subnet is a range of IP addresses in your virtual private cloud (VPC). */ public readonly preferredSubnetId!: pulumi.Output<string>; /** * Specifies the VPC route tables in which your file system's endpoints will be created. You should specify all VPC route tables associated with the subnets in which your clients are located. By default, Amazon FSx selects your VPC's default route table. */ public readonly routeTableIds!: pulumi.Output<string[]>; /** * A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. */ public readonly securityGroupIds!: pulumi.Output<string[] | undefined>; /** * The storage capacity (GiB) of the file system. Valid values between `1024` and `196608`. */ public readonly storageCapacity!: pulumi.Output<number | undefined>; /** * - The filesystem storage type. defaults to `SSD`. */ public readonly storageType!: pulumi.Output<string | undefined>; /** * A list of IDs for the subnets that the file system will be accessible from. Exactly 2 subnets need to be provided. */ public readonly subnetIds!: pulumi.Output<string[]>; /** * A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. */ public readonly tags!: pulumi.Output<{[key: string]: string} | undefined>; /** * A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block). */ public readonly tagsAll!: pulumi.Output<{[key: string]: string}>; public readonly throughputCapacity!: pulumi.Output<number>; /** * Identifier of the Virtual Private Cloud for the file system. */ public /*out*/ readonly vpcId!: pulumi.Output<string>; /** * The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone. */ public readonly weeklyMaintenanceStartTime!: pulumi.Output<string>; /** * Create a OntapFileSystem resource with the given unique name, arguments, and options. * * @param name The _unique_ name of the resource. * @param args The arguments to use to populate this resource's properties. * @param opts A bag of options that control this resource's behavior. */ constructor(name: string, args: OntapFileSystemArgs, opts?: pulumi.CustomResourceOptions) constructor(name: string, argsOrState?: OntapFileSystemArgs | OntapFileSystemState, opts?: pulumi.CustomResourceOptions) { let inputs: pulumi.Inputs = {}; opts = opts || {}; if (opts.id) { const state = argsOrState as OntapFileSystemState | undefined; inputs["arn"] = state ? state.arn : undefined; inputs["automaticBackupRetentionDays"] = state ? state.automaticBackupRetentionDays : undefined; inputs["dailyAutomaticBackupStartTime"] = state ? state.dailyAutomaticBackupStartTime : undefined; inputs["deploymentType"] = state ? state.deploymentType : undefined; inputs["diskIopsConfiguration"] = state ? state.diskIopsConfiguration : undefined; inputs["dnsName"] = state ? state.dnsName : undefined; inputs["endpointIpAddressRange"] = state ? state.endpointIpAddressRange : undefined; inputs["endpoints"] = state ? state.endpoints : undefined; inputs["fsxAdminPassword"] = state ? state.fsxAdminPassword : undefined; inputs["kmsKeyId"] = state ? state.kmsKeyId : undefined; inputs["networkInterfaceIds"] = state ? state.networkInterfaceIds : undefined; inputs["ownerId"] = state ? state.ownerId : undefined; inputs["preferredSubnetId"] = state ? state.preferredSubnetId : undefined; inputs["routeTableIds"] = state ? state.routeTableIds : undefined; inputs["securityGroupIds"] = state ? state.securityGroupIds : undefined; inputs["storageCapacity"] = state ? state.storageCapacity : undefined; inputs["storageType"] = state ? state.storageType : undefined; inputs["subnetIds"] = state ? state.subnetIds : undefined; inputs["tags"] = state ? state.tags : undefined; inputs["tagsAll"] = state ? state.tagsAll : undefined; inputs["throughputCapacity"] = state ? state.throughputCapacity : undefined; inputs["vpcId"] = state ? state.vpcId : undefined; inputs["weeklyMaintenanceStartTime"] = state ? state.weeklyMaintenanceStartTime : undefined; } else { const args = argsOrState as OntapFileSystemArgs | undefined; if ((!args || args.deploymentType === undefined) && !opts.urn) { throw new Error("Missing required property 'deploymentType'"); } if ((!args || args.preferredSubnetId === undefined) && !opts.urn) { throw new Error("Missing required property 'preferredSubnetId'"); } if ((!args || args.subnetIds === undefined) && !opts.urn) { throw new Error("Missing required property 'subnetIds'"); } if ((!args || args.throughputCapacity === undefined) && !opts.urn) { throw new Error("Missing required property 'throughputCapacity'"); } inputs["automaticBackupRetentionDays"] = args ? args.automaticBackupRetentionDays : undefined; inputs["dailyAutomaticBackupStartTime"] = args ? args.dailyAutomaticBackupStartTime : undefined; inputs["deploymentType"] = args ? args.deploymentType : undefined; inputs["diskIopsConfiguration"] = args ? args.diskIopsConfiguration : undefined; inputs["endpointIpAddressRange"] = args ? args.endpointIpAddressRange : undefined; inputs["fsxAdminPassword"] = args ? args.fsxAdminPassword : undefined; inputs["kmsKeyId"] = args ? args.kmsKeyId : undefined; inputs["preferredSubnetId"] = args ? args.preferredSubnetId : undefined; inputs["routeTableIds"] = args ? args.routeTableIds : undefined; inputs["securityGroupIds"] = args ? args.securityGroupIds : undefined; inputs["storageCapacity"] = args ? args.storageCapacity : undefined; inputs["storageType"] = args ? args.storageType : undefined; inputs["subnetIds"] = args ? args.subnetIds : undefined; inputs["tags"] = args ? args.tags : undefined; inputs["tagsAll"] = args ? args.tagsAll : undefined; inputs["throughputCapacity"] = args ? args.throughputCapacity : undefined; inputs["weeklyMaintenanceStartTime"] = args ? args.weeklyMaintenanceStartTime : undefined; inputs["arn"] = undefined /*out*/; inputs["dnsName"] = undefined /*out*/; inputs["endpoints"] = undefined /*out*/; inputs["networkInterfaceIds"] = undefined /*out*/; inputs["ownerId"] = undefined /*out*/; inputs["vpcId"] = undefined /*out*/; } if (!opts.version) { opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()}); } super(OntapFileSystem.__pulumiType, name, inputs, opts); } } /** * Input properties used for looking up and filtering OntapFileSystem resources. */ export interface OntapFileSystemState { /** * Amazon Resource Name of the file system. */ arn?: pulumi.Input<string>; /** * The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days. */ automaticBackupRetentionDays?: pulumi.Input<number>; /** * A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automaticBackupRetentionDays` to be set. */ dailyAutomaticBackupStartTime?: pulumi.Input<string>; /** * - The filesystem deployment type. Only `MULTI_AZ_1` is supported. */ deploymentType?: pulumi.Input<string>; /** * The SSD IOPS configuration for the Amazon FSx for NetApp ONTAP file system. See Disk Iops Configuration Below. */ diskIopsConfiguration?: pulumi.Input<inputs.fsx.OntapFileSystemDiskIopsConfiguration>; /** * The Domain Name Service (DNS) name for the file system. You can mount your file system using its DNS name. */ dnsName?: pulumi.Input<string>; /** * Specifies the IP address range in which the endpoints to access your file system will be created. By default, Amazon FSx selects an unused IP address range for you from the 198.19.* range. */ endpointIpAddressRange?: pulumi.Input<string>; /** * The endpoints that are used to access data or to manage the file system using the NetApp ONTAP CLI, REST API, or NetApp SnapMirror. See Endpoints below. */ endpoints?: pulumi.Input<pulumi.Input<inputs.fsx.OntapFileSystemEndpoint>[]>; /** * The ONTAP administrative password for the fsxadmin user that you can use to administer your file system using the ONTAP CLI and REST API. */ fsxAdminPassword?: pulumi.Input<string>; /** * ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key. */ kmsKeyId?: pulumi.Input<string>; /** * Set of Elastic Network Interface identifiers from which the file system is accessible The first network interface returned is the primary network interface. */ networkInterfaceIds?: pulumi.Input<pulumi.Input<string>[]>; /** * AWS account identifier that created the file system. */ ownerId?: pulumi.Input<string>; /** * The ID for a subnet. A subnet is a range of IP addresses in your virtual private cloud (VPC). */ preferredSubnetId?: pulumi.Input<string>; /** * Specifies the VPC route tables in which your file system's endpoints will be created. You should specify all VPC route tables associated with the subnets in which your clients are located. By default, Amazon FSx selects your VPC's default route table. */ routeTableIds?: pulumi.Input<pulumi.Input<string>[]>; /** * A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. */ securityGroupIds?: pulumi.Input<pulumi.Input<string>[]>; /** * The storage capacity (GiB) of the file system. Valid values between `1024` and `196608`. */ storageCapacity?: pulumi.Input<number>; /** * - The filesystem storage type. defaults to `SSD`. */ storageType?: pulumi.Input<string>; /** * A list of IDs for the subnets that the file system will be accessible from. Exactly 2 subnets need to be provided. */ subnetIds?: pulumi.Input<pulumi.Input<string>[]>; /** * A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. */ tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>; /** * A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block). */ tagsAll?: pulumi.Input<{[key: string]: pulumi.Input<string>}>; throughputCapacity?: pulumi.Input<number>; /** * Identifier of the Virtual Private Cloud for the file system. */ vpcId?: pulumi.Input<string>; /** * The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone. */ weeklyMaintenanceStartTime?: pulumi.Input<string>; } /** * The set of arguments for constructing a OntapFileSystem resource. */ export interface OntapFileSystemArgs { /** * The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days. */ automaticBackupRetentionDays?: pulumi.Input<number>; /** * A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automaticBackupRetentionDays` to be set. */ dailyAutomaticBackupStartTime?: pulumi.Input<string>; /** * - The filesystem deployment type. Only `MULTI_AZ_1` is supported. */ deploymentType: pulumi.Input<string>; /** * The SSD IOPS configuration for the Amazon FSx for NetApp ONTAP file system. See Disk Iops Configuration Below. */ diskIopsConfiguration?: pulumi.Input<inputs.fsx.OntapFileSystemDiskIopsConfiguration>; /** * Specifies the IP address range in which the endpoints to access your file system will be created. By default, Amazon FSx selects an unused IP address range for you from the 198.19.* range. */ endpointIpAddressRange?: pulumi.Input<string>; /** * The ONTAP administrative password for the fsxadmin user that you can use to administer your file system using the ONTAP CLI and REST API. */ fsxAdminPassword?: pulumi.Input<string>; /** * ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key. */ kmsKeyId?: pulumi.Input<string>; /** * The ID for a subnet. A subnet is a range of IP addresses in your virtual private cloud (VPC). */ preferredSubnetId: pulumi.Input<string>; /** * Specifies the VPC route tables in which your file system's endpoints will be created. You should specify all VPC route tables associated with the subnets in which your clients are located. By default, Amazon FSx selects your VPC's default route table. */ routeTableIds?: pulumi.Input<pulumi.Input<string>[]>; /** * A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. */ securityGroupIds?: pulumi.Input<pulumi.Input<string>[]>; /** * The storage capacity (GiB) of the file system. Valid values between `1024` and `196608`. */ storageCapacity?: pulumi.Input<number>; /** * - The filesystem storage type. defaults to `SSD`. */ storageType?: pulumi.Input<string>; /** * A list of IDs for the subnets that the file system will be accessible from. Exactly 2 subnets need to be provided. */ subnetIds: pulumi.Input<pulumi.Input<string>[]>; /** * A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. */ tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>; /** * A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block). */ tagsAll?: pulumi.Input<{[key: string]: pulumi.Input<string>}>; throughputCapacity: pulumi.Input<number>; /** * The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone. */ weeklyMaintenanceStartTime?: pulumi.Input<string>; }
the_stack
import Grid from "../Grid"; import { PROPERTY_TYPE, UPDATE_STATE } from "../consts"; import { GridOptions, Properties, GridOutlines, GridAlign } from "../types"; import { range, GetterSetter } from "../utils"; import { GridItem } from "../GridItem"; function getColumnPoint( outline: number[], columnIndex: number, columnCount: number, pointCaculationName: "max" | "min", ) { return Math[pointCaculationName](...outline.slice(columnIndex, columnIndex + columnCount)); } function getColumnIndex(outline: number[], columnCount: number, nearestCalculationName: "max" | "min") { const length = outline.length - columnCount + 1; const pointCaculationName = nearestCalculationName === "max" ? "min" : "max"; const indexCaculationName = nearestCalculationName === "max" ? "lastIndexOf" : "indexOf"; const points = range(length).map((index) => { return getColumnPoint(outline, index, columnCount, pointCaculationName); }); return points[indexCaculationName](Math[nearestCalculationName](...points)); } /** * @typedef * @memberof Grid.MasonryGrid * @extends Grid.GridOptions * @property - The number of columns. If the number of columns is 0, it is automatically calculated according to the size of the container. Can be used instead of outlineLength. (default: 0)<ko>열의 개수. 열의 개수가 0이라면, 컨테이너의 사이즈에 의해 계산이 된다. outlineLength 대신 사용할 수 있다.(default: 0) </ko> * @property - The size of the columns. If it is 0, it is calculated as the size of the first item in items. Can be used instead of outlineSize. (default: 0) <ko> 열의 사이즈. 만약 열의 사이즈가 0이면, 아이템들의 첫번째 아이템의 사이즈로 계산이 된다. outlineSize 대신 사용할 수 있다.(default: 0) </ko> * @property - The size ratio(inlineSize / contentSize) of the columns. 0 is not set. (default: 0) <ko>열의 사이즈 비율(inlineSize / contentSize). 0은 미설정이다. </ko> * @property - Align of the position of the items. If you want to use `stretch`, be sure to set `column` or `columnSize` option. ("start", "center", "end", "justify", "stretch") (default: "justify") <ko>아이템들의 위치의 정렬. `stretch`를 사용하고 싶다면 `column` 또는 `columnSize` 옵션을 설정해라. ("start", "center", "end", "justify", "stretch") (default: "justify")</ko> * @property - Difference Threshold for Counting Columns. Since offsetSize is calculated by rounding, the number of columns may not be accurate. (default: 1) <ko>칼럼 개수를 계산하기 위한 차이 임계값. offset 사이즈는 반올림으로 게산하기 때문에 정확하지 않을 수 있다. (default: 1)</ko> */ export interface MasonryGridOptions extends GridOptions { column?: number; columnSize?: number; columnSizeRatio?: number; align?: GridAlign; columnCalculationThreshold?: number; } /** * MasonryGrid is a grid that stacks items with the same width as a stack of bricks. Adjust the width of all images to the same size, find the lowest height column, and insert a new item. * @ko MasonryGrid는 벽돌을 쌓아 올린 모양처럼 동일한 너비를 가진 아이템를 쌓는 레이아웃이다. 모든 이미지의 너비를 동일한 크기로 조정하고, 가장 높이가 낮은 열을 찾아 새로운 이미지를 삽입한다. 따라서 배치된 아이템 사이에 빈 공간이 생기지는 않지만 배치된 레이아웃의 아래쪽은 울퉁불퉁해진다. * @memberof Grid * @param {HTMLElement | string} container - A base element for a module <ko>모듈을 적용할 기준 엘리먼트</ko> * @param {Grid.MasonryGrid.MasonryGridOptions} options - The option object of the MasonryGrid module <ko>MasonryGrid 모듈의 옵션 객체</ko> */ @GetterSetter export class MasonryGrid extends Grid<MasonryGridOptions> { public static propertyTypes = { ...Grid.propertyTypes, column: PROPERTY_TYPE.RENDER_PROPERTY, columnSize: PROPERTY_TYPE.RENDER_PROPERTY, columnSizeRatio: PROPERTY_TYPE.RENDER_PROPERTY, align: PROPERTY_TYPE.RENDER_PROPERTY, columnCalculationThreshold: PROPERTY_TYPE.RENDER_PROPERTY, }; public static defaultOptions: Required<MasonryGridOptions> = { ...Grid.defaultOptions, align: "justify", column: 0, columnSize: 0, columnSizeRatio: 0, columnCalculationThreshold: 0.5, }; public applyGrid(items: GridItem[], direction: "start" | "end", outline: number[]): GridOutlines { const columnSize = this.getComputedOutlineSize(items); const column = this.getComputedOutlineLength(items); const { gap, align, columnSizeRatio, } = this.options; const outlineLength = outline.length; const itemsLength = items.length; const alignPoses = this._getAlignPoses(column, columnSize); const isEndDirection = direction === "end"; const nearestCalculationName = isEndDirection ? "min" : "max"; const pointCalculationName = isEndDirection ? "max" : "min"; let startOutline = [0]; if (outlineLength === column) { startOutline = outline.slice(); } else { const point = outlineLength ? Math[pointCalculationName](...outline) : 0; startOutline = range(column).map(() => point); } const endOutline = startOutline.slice(); const columnDist = column > 1 ? alignPoses[1] - alignPoses[0] : 0; const isStretch = align === "stretch"; for (let i = 0; i < itemsLength; ++i) { const item = items[isEndDirection ? i : itemsLength - 1 - i]; const columnAttribute = parseInt(item.attributes.column || "1", 10); const maxColumnAttribute = parseInt(item.attributes.maxColumn || "1", 10); let contentSize = item.contentSize; let columnCount = Math.min( column, columnAttribute || Math.max(1, Math.ceil((item.inlineSize + gap) / columnDist)), ); const maxColumnCount = Math.min(column, Math.max(columnCount, maxColumnAttribute)); let columnIndex = getColumnIndex(endOutline, columnCount, nearestCalculationName); let contentPos = getColumnPoint(endOutline, columnIndex, columnCount, pointCalculationName); while (columnCount < maxColumnCount) { const nextEndColumnIndex = columnIndex + columnCount; const nextColumnIndex = columnIndex - 1; if (isEndDirection && (nextEndColumnIndex >= column || endOutline[nextEndColumnIndex] > contentPos)) { break; } if (!isEndDirection && (nextColumnIndex < 0 || endOutline[nextColumnIndex]) < contentPos) { break; } if (!isEndDirection) { --columnIndex; } ++columnCount; } columnIndex = Math.max(0, columnIndex); columnCount = Math.min(column - columnIndex, columnCount); // stretch mode or data-grid-column > "1" if ((columnAttribute > 0 && columnCount > 1) || isStretch) { item.cssInlineSize = (columnCount - 1) * columnDist + columnSize; } if (columnSizeRatio > 0) { contentSize = item.computedInlineSize / columnSizeRatio; item.cssContentSize = contentSize; } const inlinePos = alignPoses[columnIndex]; contentPos = isEndDirection ? contentPos : contentPos - gap - contentSize; item.cssInlinePos = inlinePos; item.cssContentPos = contentPos; const nextOutlinePoint = isEndDirection ? contentPos + contentSize + gap : contentPos; range(columnCount).forEach((indexOffset) => { endOutline[columnIndex + indexOffset] = nextOutlinePoint; }); } // if end items, startOutline is low, endOutline is high // if start items, startOutline is high, endOutline is low return { start: isEndDirection ? startOutline : endOutline, end: isEndDirection ? endOutline : startOutline, }; } public getComputedOutlineSize(items = this.items) { const { gap, align, } = this.options; const columnSizeOption = this.columnSize || this.outlineSize; const column = this.column || this.outlineLength || 1; let columnSize = 0; if (align === "stretch") { columnSize = (this.getContainerInlineSize() + gap) / (column || 1) - gap; } else if (columnSizeOption) { columnSize = columnSizeOption; } else if (items.length) { let checkedItem = items[0]; for (const item of items) { const attributes = item.attributes; if ( item.updateState !== UPDATE_STATE.UPDATED || !item.inlineSize || attributes.column || attributes.maxColumnCount ) { continue; } checkedItem = item; break; } const inlineSize = checkedItem.inlineSize || 0; columnSize = inlineSize; } else { columnSize = this.getContainerInlineSize(); } return columnSize || 0; } public getComputedOutlineLength(items = this.items) { const gap = this.gap; const columnOption = this.column || this.outlineLength; const columnCalculationThreshold = this.columnCalculationThreshold; let column = 1; if (columnOption) { column = columnOption; } else { const columnSize = this.getComputedOutlineSize(items); column = Math.min( items.length, Math.max(1, Math.floor((this.getContainerInlineSize() + gap) / (columnSize - columnCalculationThreshold + gap))), ); } return column; } private _getAlignPoses(column: number, columnSize: number) { const { align, gap, } = this.options; const containerSize = this.getContainerInlineSize(); const indexes = range(column); let offset = 0; let dist = 0; if (align === "justify" || align === "stretch") { const countDist = column - 1; dist = countDist ? Math.max((containerSize - columnSize) / countDist, columnSize + gap) : 0; offset = Math.min(0, containerSize / 2 - (countDist * dist + columnSize) / 2); } else { dist = columnSize + gap; const totalColumnSize = (column - 1) * dist + columnSize; if (align === "center") { offset = (containerSize - totalColumnSize) / 2; } else if (align === "end") { offset = containerSize - totalColumnSize; } } return indexes.map((i) => { return offset + i * dist; }); } } export interface MasonryGrid extends Properties<typeof MasonryGrid> { } /** * Align of the position of the items. If you want to use `stretch`, be sure to set `column` or `columnSize` option. ("start", "center", "end", "justify", "stretch") * @ko 아이템들의 위치의 정렬. `stretch`를 사용하고 싶다면 `column` 또는 `columnSize` 옵션을 설정해라. ("start", "center", "end", "justify", "stretch") * @name Grid.MasonryGrid#align * @type {$ts:Grid.MasonryGrid.MasonryGridOptions["align"]} * @default "justify" * @example * ```js * import { MasonryGrid } from "@egjs/grid"; * * const grid = new MasonryGrid(container, { * align: "start", * }); * * grid.align = "justify"; * ``` */ /** * The number of columns. If the number of columns is 0, it is automatically calculated according to the size of the container. Can be used instead of outlineLength. * @ko 열의 개수. 열의 개수가 0이라면, 컨테이너의 사이즈에 의해 계산이 된다. outlineLength 대신 사용할 수 있다. * @name Grid.MasonryGrid#column * @type {$ts:Grid.MasonryGrid.MasonryGridOptions["column"]} * @default 0 * @example * ```js * import { MasonryGrid } from "@egjs/grid"; * * const grid = new MasonryGrid(container, { * column: 0, * }); * * grid.column = 4; * ``` */ /** * The size of the columns. If it is 0, it is calculated as the size of the first item in items. Can be used instead of outlineSize. * @ko 열의 사이즈. 만약 열의 사이즈가 0이면, 아이템들의 첫번째 아이템의 사이즈로 계산이 된다. outlineSize 대신 사용할 수 있다. * @name Grid.MasonryGrid#columnSize * @type {$ts:Grid.MasonryGrid.MasonryGridOptions["columnSize"]} * @default 0 * @example * ```js * import { MasonryGrid } from "@egjs/grid"; * * const grid = new MasonryGrid(container, { * columnSize: 0, * }); * * grid.columnSize = 200; * ``` */ /** * The size ratio(inlineSize / contentSize) of the columns. 0 is not set. * @ko 열의 사이즈 비율(inlineSize / contentSize). 0은 미설정이다. * @name Grid.MasonryGrid#columnSizeRatio * @type {$ts:Grid.MasonryGrid.MasonryGridOptions["columnSizeRatio"]} * @default 0 * @example * ```js * import { MasonryGrid } from "@egjs/grid"; * * const grid = new MasonryGrid(container, { * columnSizeRatio: 0, * }); * * grid.columnSizeRatio = 0.5; * ``` */
the_stack
import { castToBrowser, expect, HAS_NATIVE_PROXY } from '@glimmer/util'; import { getInternalModifierManager } from '@glimmer/manager'; import { on } from '@glimmer/runtime'; import { jitSuite, RenderTest, test } from '../..'; // check if window exists and actually is the global const hasDom = typeof self === 'object' && self !== null && (self as Window['self']).Object === Object && typeof Window !== 'undefined' && self.constructor === Window && typeof document === 'object' && document !== null && self.document === document && typeof location === 'object' && location !== null && self.location === location && typeof history === 'object' && history !== null && self.history === history && typeof navigator === 'object' && navigator !== null && self.navigator === navigator && typeof navigator.userAgent === 'string'; // do this to get around type issues for these values let global = window as any; const isChrome = hasDom ? typeof global.chrome === 'object' && !(typeof global.opera === 'object') : false; const isFirefox = hasDom ? typeof global.InstallTrigger !== 'undefined' : false; const isIE11 = !global.ActiveXObject && 'ActiveXObject' in window; interface Counters { adds: number; removes: number; } interface OnManager { counters: Counters; SUPPORTS_EVENT_OPTIONS: boolean; } function getOnManager() { return (getInternalModifierManager(on) as unknown) as OnManager; } if (hasDom) { class OnTest extends RenderTest { static suiteName = '{{on}} Modifier'; startingCounters: Counters = { adds: 0, removes: 0 }; findButton(selector = 'button'): HTMLButtonElement { return expect( castToBrowser(this.element, 'div').querySelector(selector) as HTMLButtonElement, `BUG: expected to find ${selector}` ); } beforeEach() { // might error if getOnManagerInstance fails this.startingCounters = getOnManager().counters; } assertCounts(expected: Counters) { this.assert.deepEqual( getOnManager().counters, { adds: expected.adds + this.startingCounters.adds, removes: expected.removes + this.startingCounters.removes, }, `counters have incremented by ${JSON.stringify(expected)}` ); } @test 'SUPPORTS_EVENT_OPTIONS is correct (private API usage)'(assert: Assert) { let { SUPPORTS_EVENT_OPTIONS } = getOnManager(); if (isChrome || isFirefox) { assert.strictEqual(SUPPORTS_EVENT_OPTIONS, true, 'is true in chrome and firefox'); } else if (isIE11) { assert.strictEqual(SUPPORTS_EVENT_OPTIONS, false, 'is false in IE11'); } } @test 'it adds an event listener'(assert: Assert) { let count = 0; this.render('<button {{on "click" this.callback}}>Click Me</button>', { callback() { count++; }, }); assert.equal(count, 0, 'not called on initial render'); this.assertStableRerender(); this.assertCounts({ adds: 1, removes: 0 }); assert.equal(count, 0, 'not called on a rerender'); this.findButton().click(); assert.equal(count, 1, 'has been called 1 time'); this.findButton().click(); assert.equal(count, 2, 'has been called 2 times'); this.assertCounts({ adds: 1, removes: 0 }); } @test 'passes the event to the listener'(assert: Assert) { let event: UIEvent; this.render('<button {{on "click" this.callback}}>Click Me</button>', { callback(evt: UIEvent) { event = evt; }, }); let button = this.findButton(); button.click(); assert.strictEqual(event!.target, button, 'has a valid event with a target'); this.assertCounts({ adds: 1, removes: 0 }); } @test 'the listener callback is bound'(assert: Assert) { let first = 0; let second = 0; let firstCallback = () => first++; let secondCallback = () => second++; this.render('<button {{on "click" this.callback}}>Click Me</button>', { callback: firstCallback, }); let button = this.findButton(); assert.equal(first, 0, 'precond - first not called on initial render'); assert.equal(second, 0, 'precond - second not called on initial render'); button.click(); assert.equal(first, 1, 'first has been called 1 time'); assert.equal(second, 0, 'second not called on initial render'); this.rerender({ callback: secondCallback }); button.click(); assert.equal(first, 1, 'first has been called 1 time'); assert.equal(second, 1, 'second has been called 1 times'); this.assertCounts({ adds: 2, removes: 1 }); } @test 'setting once named argument ensures the callback is only called once'(assert: Assert) { let count = 0; this.render('<button {{on "click" this.callback once=true}}>Click Me</button>', { callback() { count++; }, }); let button = this.findButton(); assert.equal(count, 0, 'not called on initial render'); this.assertStableRerender(); assert.equal(count, 0, 'not called on a rerender'); button.click(); assert.equal(count, 1, 'has been called 1 time'); button.click(); assert.equal(count, 1, 'has been called 1 times'); if (isIE11) { this.assertCounts({ adds: 1, removes: 1 }); } else { this.assertCounts({ adds: 1, removes: 0 }); } } @test 'changing from `once=false` to `once=true` ensures the callback can only be called once'( assert: Assert ) { let count = 0; this.render('<button {{on "click" this.callback once=this.once}}>Click Me</button>', { callback() { count++; }, once: false, }); let button = this.findButton(); button.click(); assert.equal(count, 1, 'has been called 1 time'); button.click(); assert.equal(count, 2, 'has been called 2 times'); this.rerender({ once: true }); button.click(); assert.equal(count, 3, 'has been called 3 time'); button.click(); assert.equal(count, 3, 'is not called again'); if (isIE11) { this.assertCounts({ adds: 2, removes: 2 }); } else { this.assertCounts({ adds: 2, removes: 1 }); } } @test 'setting passive named argument prevents calling preventDefault'(assert: Assert) { let matcher = /You marked this listener as 'passive', meaning that you must not call 'event.preventDefault\(\)'/; this.render('<button {{on "click" this.callback passive=true}}>Click Me</button>', { callback(event: UIEvent) { assert.throws(() => { event.preventDefault(); }, matcher); }, }); this.findButton().click(); } @test 'by default bubbling is used (capture: false)'(assert: Assert) { this.render( ` <button class="outer" {{on 'click' this.handleOuterClick}}> <button class="inner" {{on 'click' this.handleInnerClick}}></button> </button> `, { handleOuterClick() { assert.step('outer clicked'); }, handleInnerClick() { assert.step('inner clicked'); }, } ); this.findButton('.inner').click(); assert.verifySteps(['inner clicked', 'outer clicked'], 'uses capture: false by default'); } @test 'specifying capture named argument uses capture semantics'(assert: Assert) { this.render( ` <button class="outer" {{on 'click' this.handleOuterClick capture=true}}> <button class="inner" {{on 'click' this.handleInnerClick}}></button> </button> `, { handleOuterClick() { assert.step('outer clicked'); }, handleInnerClick() { assert.step('inner clicked'); }, } ); this.findButton('.inner').click(); assert.verifySteps(['outer clicked', 'inner clicked'], 'capture works'); } @test 'can use capture and once together'(assert: Assert) { this.render( ` <button class="outer" {{on 'click' this.handleOuterClick once=true capture=true}}> <button class="inner" {{on 'click' this.handleInnerClick}}></button> </button> `, { handleOuterClick() { assert.step('outer clicked'); }, handleInnerClick() { assert.step('inner clicked'); }, } ); this.findButton('.inner').click(); assert.verifySteps(['outer clicked', 'inner clicked'], 'capture works'); this.findButton('.inner').click(); assert.verifySteps(['inner clicked'], 'once works'); } @test 'unrelated updates to `this` context does not result in removing + re-adding'(assert: Assert) { let called = false; this.render('<button {{on "click" this.callback}}>Click Me</button>', { callback() { called = true; }, otherThing: 0, }); this.assertCounts({ adds: 1, removes: 0 }); this.findButton().click(); assert.equal(called, 1, 'callback is being invoked'); this.rerender({ otherThing: 1 }); this.assertCounts({ adds: 1, removes: 0 }); } @test 'asserts when eventName is missing'(assert: Assert) { assert.throws(() => { this.render(`<button {{on undefined this.callback}}>Click Me</button>`, { callback() {}, }); }, /You must pass a valid DOM event name as the first argument to the `on` modifier/); } @test 'asserts when eventName is a bound undefined value'(assert: Assert) { assert.throws(() => { this.render(`<button {{on this.someUndefinedThing this.callback}}>Click Me</button>`, { callback() {}, }); }, /You must pass a valid DOM event name as the first argument to the `on` modifier/); } @test 'asserts when eventName is a function'(assert: Assert) { assert.throws(() => { this.render(`<button {{on this.callback}}>Click Me</button>`, { callback() {}, }); }, /You must pass a valid DOM event name as the first argument to the `on` modifier/); } @test 'asserts when callback is missing'(assert: Assert) { assert.throws(() => { this.render(`<button {{on 'click'}}>Click Me</button>`); }, /You must pass a function as the second argument to the `on` modifier/); } @test 'asserts when callback is undefined'(assert: Assert) { assert.throws(() => { this.render(`<button {{on 'click' this.foo}}>Click Me</button>`); }, /You must pass a function as the second argument to the `on` modifier, you passed undefined. While rendering:\n\nthis.foo/); } @test 'asserts when callback is null'(assert: Assert) { assert.throws(() => { this.render(`<button {{on 'click' this.foo}}>Click Me</button>`, { foo: null }); }, /You must pass a function as the second argument to the `on` modifier, you passed null. While rendering:\n\nthis.foo/); } @test 'asserts if the provided callback accesses `this` without being bound prior to passing to on'( assert: Assert ) { this.render(`<button {{on 'click' this.myFunc}}>Click Me</button>`, { myFunc(this: any) { if (HAS_NATIVE_PROXY) { assert.throws(() => { // eslint-disable-next-line no-unused-expressions this.arg1; }, /You accessed `this.arg1` from a function passed to the `on` modifier, but the function itself was not bound to a valid `this` context. Consider updating to use a bound function/); } else { // IE11 assert.strictEqual(this, null, 'this is null on browsers without native proxy support'); } }, arg1: 'foo', }); this.findButton().click(); } @test 'asserts if more than 2 positional parameters are provided'(assert: Assert) { assert.throws(() => { this.render(`<button {{on 'click' this.callback this.someArg}}>Click Me</button>`, { callback() {}, someArg: 'foo', }); }, /You can only pass two positional arguments \(event name and callback\) to the `on` modifier, but you provided 3. Consider using the `fn` helper to provide additional arguments to the `on` callback./); } @test 'it removes the modifier when the element is removed'(assert: Assert) { let count = 0; this.render( '{{#if this.showButton}}<button {{on "click" this.callback}}>Click Me</button>{{/if}}', { callback() { count++; }, showButton: true, } ); this.assertCounts({ adds: 1, removes: 0 }); this.findButton().click(); assert.equal(count, 1, 'has been called 1 time'); this.rerender({ showButton: false }); this.assertCounts({ adds: 1, removes: 1 }); } } jitSuite(OnTest); }
the_stack
import { EmptyTree } from '@angular-devkit/schematics'; import { SchematicTestRunner, UnitTestTree } from '@angular-devkit/schematics/testing'; describe('Migration to remove polyfills specific to Internet Explorer', () => { const schematicRunner = new SchematicTestRunner( 'migrations', require.resolve('../migration-collection.json'), ); let tree: UnitTestTree; beforeEach(async () => { tree = await schematicRunner .runExternalSchematicAsync( require.resolve('../../collection.json'), 'ng-new', { name: 'migration-test', version: '1.2.3', directory: '.', }, new UnitTestTree(new EmptyTree()), ) .toPromise(); }); it('should remove used `classlist.js` polyfill', async () => { tree.overwrite( 'src/polyfills.ts', ` /** * IE11 requires the following for NgClass support on SVG elements */ import 'classlist.js'; // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); }); it('should remove used `web-animations-js` polyfill', async () => { tree.overwrite( 'src/polyfills.ts', ` /** * Web Animations \`@angular/platform-browser/animations\` * Only required if AnimationBuilder is used within the application and using IE/Edge or Safari. * Standard animation support in Angular DOES NOT require any polyfills (as of Angular 6.0). */ import 'web-animations-js'; // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); }); it('should remove unused `classlist.js` polyfill', async () => { tree.overwrite( 'src/polyfills.ts', ` /** * IE11 requires the following for NgClass support on SVG elements */ // import 'classlist.js'; // Run \`npm install --save classlist.js\`. // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); }); it('should remove unused `web-animations-js` polyfill', async () => { tree.overwrite( 'src/polyfills.ts', ` /** * Web Animations \`@angular/platform-browser/animations\` * Only required if AnimationBuilder is used within the application and using IE/Edge or Safari. * Standard animation support in Angular DOES NOT require any polyfills (as of Angular 6.0). */ // import 'web-animations-js'; // Run \`npm install --save web-animations-js\`. // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); }); it('warns on a polyfill path that does not reference a valid file', async () => { tree.delete('src/polyfills.ts'); const logs = [] as string[]; schematicRunner.logger.subscribe((log) => logs.push(log.message)); await schematicRunner.runSchematicAsync('drop-ie-polyfills', {}, tree).toPromise(); expect(logs).toEqual([ 'Polyfill path from workspace configuration could not be read, does the file exist?', ]); }); it('handles byte-order-marks (BOMs) in the polyfill file', async () => { tree.overwrite( 'src/polyfills.ts', // File with leading BOM (\uFEFF). ` \uFEFF/** * IE11 requires the following for NgClass support on SVG elements */ import 'classlist.js'; // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); }); it('handles carriage returns with newlines if present', async () => { tree.overwrite( 'src/polyfills.ts', // File each `\r\n` for newline separators. ` /**\r * IE11 requires the following for NgClass support on SVG elements\r */\r // import 'classlist.js'; // Run \`npm install --save classlist.js\`.\r \r // Other stuff.\r /***************************************************************************************************\r * Zone JS is required by default for Angular itself.\r */\r import 'zone.js'; // Included with Angular CLI.\r `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` // Other stuff.\r /***************************************************************************************************\r * Zone JS is required by default for Angular itself.\r */\r import 'zone.js'; // Included with Angular CLI.\r `.trim(), ); }); it('removes older-style `classlist.js` polyfill comment', async () => { // Previous Angular versions used a single-line comment, this should still be removed. tree.overwrite( 'src/polyfills.ts', ` /** IE11 requires the following for NgClass support on SVG elements */ import 'classlist.js'; // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); }); it('should remove an unused polyfill from the last item in the file', async () => { // TypeScript APIs require special-casing any trailing comments in the file, so we need to test // this explicitly. tree.overwrite( 'src/polyfills.ts', ` /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. /** * IE11 requires the following for NgClass support on SVG elements */ // import 'classlist.js'; // Run \`npm install --save classlist.js\`. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); }); it('keeps the file overview comment preceeding a used polyfill', async () => { tree.overwrite( 'src/polyfills.ts', ` /** * This file includes polyfills needed by Angular and is loaded before the app. * You can add your own extra polyfills to this file. * * This file is divided into 2 sections: * 1. Browser polyfills. These are applied before loading ZoneJS and are sorted by browsers. * 2. Application imports. Files imported after ZoneJS that should be loaded before your main * file. * * The current setup is for so-called "evergreen" browsers; the last versions of browsers that * automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera), * Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile. * * Learn more in https://angular.io/guide/browser-support */ /** * IE11 requires the following for NgClass support on SVG elements */ import 'classlist.js'; `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` /** * This file includes polyfills needed by Angular and is loaded before the app. * You can add your own extra polyfills to this file. * * This file is divided into 2 sections: * 1. Browser polyfills. These are applied before loading ZoneJS and are sorted by browsers. * 2. Application imports. Files imported after ZoneJS that should be loaded before your main * file. * * The current setup is for so-called "evergreen" browsers; the last versions of browsers that * automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera), * Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile. * * Learn more in https://angular.io/guide/browser-support */ `.trim(), ); }); it('keeps the file overview comment preceeding an unused polyfill', async () => { tree.overwrite( 'src/polyfills.ts', ` /** * This file includes polyfills needed by Angular and is loaded before the app. * You can add your own extra polyfills to this file. * * This file is divided into 2 sections: * 1. Browser polyfills. These are applied before loading ZoneJS and are sorted by browsers. * 2. Application imports. Files imported after ZoneJS that should be loaded before your main * file. * * The current setup is for so-called "evergreen" browsers; the last versions of browsers that * automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera), * Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile. * * Learn more in https://angular.io/guide/browser-support */ /** * IE11 requires the following for NgClass support on SVG elements */ // import 'classlist.js'; // Run \`npm install --save classlist.js\`. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` /** * This file includes polyfills needed by Angular and is loaded before the app. * You can add your own extra polyfills to this file. * * This file is divided into 2 sections: * 1. Browser polyfills. These are applied before loading ZoneJS and are sorted by browsers. * 2. Application imports. Files imported after ZoneJS that should be loaded before your main * file. * * The current setup is for so-called "evergreen" browsers; the last versions of browsers that * automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera), * Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile. * * Learn more in https://angular.io/guide/browser-support */ `.trim(), ); }); it('keeps the BROWSER POLYFILLS comment preceeding a used polyfill', async () => { tree.overwrite( 'src/polyfills.ts', ` /*************************************************************************************************** * BROWSER POLYFILLS */ /** * IE11 requires the following for NgClass support on SVG elements */ import 'classlist.js'; `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` /*************************************************************************************************** * BROWSER POLYFILLS */ `.trim(), ); }); it('keeps the BROWSER POLYFILLS comment preceeding an unused polyfill', async () => { tree.overwrite( 'src/polyfills.ts', ` /*************************************************************************************************** * BROWSER POLYFILLS */ /** * IE11 requires the following for NgClass support on SVG elements */ // import 'classlist.js'; // Run \`npm install --save classlist.js\`. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` /*************************************************************************************************** * BROWSER POLYFILLS */ `.trim(), ); }); it('ignores a `package.json` that does not include polyfill dependencies', async () => { const packageJson = ` { "name": "ng-new", "version": "0.0.0", "dependencies": { "@angular/core": "^13.0.0" }, "devDependencies": { "@angular/cli": "^13.0.0" } } `.trim(); tree.overwrite('package.json', packageJson); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('package.json')).toBe(packageJson); // No `npm install` should be scheduled, nothing to remove. expect(schematicRunner.tasks).toEqual([]); }); it('uninstalls `classlist.js` and `web-animations-js` packages', async () => { tree.overwrite( 'package.json', ` { "name": "ng-new", "version": "0.0.0", "dependencies": { "@angular/core": "^13.0.0", "classlist.js": "^1.0.0" }, "devDependencies": { "@angular/cli": "^13.0.0", "web-animations-js": "^1.0.0" } } `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); // Assert `package.json` no longer contains removed dependencies. expect(migrated.readContent('package.json')).toBe( ` { "name": "ng-new", "version": "0.0.0", "dependencies": { "@angular/core": "^13.0.0" }, "devDependencies": { "@angular/cli": "^13.0.0" } } `.trim(), ); // Assert that `npm install` is scheduled. const taskNames = schematicRunner.tasks.map((task) => task.name); expect(taskNames).toEqual(['node-package']); }); it('removes preceeding newline from used polyfill', async () => { tree.overwrite( 'src/polyfills.ts', ` /** Some other polyfill. */ import 'some-other-polyfill'; /** * IE11 requires the following for NgClass support on SVG elements */ import 'classlist.js'; // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` /** Some other polyfill. */ import 'some-other-polyfill'; // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); }); it('removes preceeding newline from unused polyfill', async () => { tree.overwrite( 'src/polyfills.ts', ` /** Some other polyfill. */ import 'some-other-polyfill'; /** * IE11 requires the following for NgClass support on SVG elements */ // import 'classlist.js'; // Run \`npm install --save classlist.js\`. // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` /** Some other polyfill. */ import 'some-other-polyfill'; // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); }); it('removes newlines between multiple removed polyfills', async () => { tree.overwrite( 'src/polyfills.ts', ` /** Some other polyfill. */ import 'some-other-polyfill'; /** * IE11 requires the following for NgClass support on SVG elements */ // import 'classlist.js'; // Run \`npm install --save classlist.js\`. /** * Web Animations \`@angular/platform-browser/animations\` * Only required if AnimationBuilder is used within the application and using IE/Edge or Safari. * Standard animation support in Angular DOES NOT require any polyfills (as of Angular 6.0). */ import 'web-animations-js'; // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); const migrated = await schematicRunner .runSchematicAsync('drop-ie-polyfills', {}, tree) .toPromise(); expect(migrated.readContent('src/polyfills.ts').trim()).toBe( ` /** Some other polyfill. */ import 'some-other-polyfill'; // Other stuff. /*************************************************************************************************** * Zone JS is required by default for Angular itself. */ import 'zone.js'; // Included with Angular CLI. `.trim(), ); }); });
the_stack
import React, { useState } from "react"; import CustomScrollArea from "react-perfect-scrollbar"; import { AutoFocusZone, Box, Button, Flex, FocusZoneTabbableElements, GridRowBehaviorProps, Input, MenuButton, Ref, SiteVariablesPrepared, Text, gridRowNestedBehavior, } from "@fluentui/react-northstar"; import { ArrowLeftIcon, ArrowRightIcon, MoreIcon, TrashCanIcon, } from "@fluentui/react-icons-northstar"; import { ICSSInJSStyle } from "@fluentui/styles"; import { useAccessibility } from "@fluentui/react-bindings"; import { getCode, keyboardKey } from "@fluentui/keyboard-key"; import { Draggable, Droppable } from "react-beautiful-dnd"; import { getText, TTextObject, TTranslations } from "../../translations"; import { BoardItem, IPreparedBoardItem, IBoardItemCardLayout, IBoardItem, } from "./BoardItem"; import { TUsers } from "../../types/types"; import setMultiple from "../../lib/setMultiple"; export interface IBoardLaneProps { lane?: TBoardLane; laneKey: string; last?: boolean; first?: boolean; addItemDialog?: JSX.Element; editItemDialog?: (boardItem: IBoardItem) => JSX.Element; preparedItems: IPreparedBoardItem[]; users: TUsers; t: TTranslations; rtl: boolean; boardItemCardLayout: IBoardItemCardLayout; placeholderPosition: TPlaceholderPosition; exitPendingLane?: (value: string) => void; moveLane?: (laneKey: string, delta: -1 | 1) => void; deleteLane?: (laneKey: string) => void; pending?: boolean; } /** * Board lanes currently only need a `title`. * @public */ export type TBoardLane = { title: TTextObject; }; /** * Each Board lane has a unique key, which is associated with the lane’s configuration. * @public */ export type TBoardLanes = { [laneKey: string]: TBoardLane; }; export type TPlaceholderPosition = null | [number, number, number, number]; const boardLaneBehavior = (props: GridRowBehaviorProps) => { return setMultiple(gridRowNestedBehavior(props), { "focusZone.props": { handleTabKey: FocusZoneTabbableElements.all, isCircularNavigation: true, shouldEnterInnerZone: (event: React.KeyboardEvent<HTMLElement>) => getCode(event) === keyboardKey.Enter, }, "attributes.root": { role: "group", "data-is-focusable": true, tabIndex: -1, }, "keyActions.root.focus.keyCombinations": [{ keyCode: keyboardKey.Escape }], "keyActions.root.ignore.keyCombinations": [ { keyCode: keyboardKey.ArrowRight }, { keyCode: keyboardKey.ArrowDown }, { keyCode: keyboardKey.ArrowLeft }, { keyCode: keyboardKey.ArrowUp }, ], }); }; const separatorStyles: ICSSInJSStyle = { position: "relative", "&::after": { content: '""', display: "block", position: "absolute", top: 0, bottom: 0, right: 0, width: "1px", }, }; const Placeholder = ({ position }: { position: TPlaceholderPosition }) => position && ( <Box variables={({ colorScheme }: SiteVariablesPrepared) => ({ backgroundColor: colorScheme.brand.background1, borderColor: colorScheme.brand.foreground3, })} styles={{ left: position[0] + "px", top: position[1] + "px", width: position[2] + "px", height: position[3] + "px", position: "absolute", borderRadius: "4px", borderWidth: "1px", zIndex: 0, }} /> ); const laneFocusBorderStyles = { content: '""', display: "block", position: "absolute", borderStyle: "solid", borderWidth: 0, top: 0, bottom: 0, left: "1px", right: "2px", borderRadius: "4px", pointerEvents: "none", }; export const BoardLane = (props: IBoardLaneProps) => { const { users, lane, preparedItems, t, rtl, laneKey, last, first, addItemDialog, editItemDialog, boardItemCardLayout, placeholderPosition, exitPendingLane, deleteLane, moveLane, } = props; const [laneNode, setLaneNode] = useState<HTMLElement | null>(null); const getA11Props = useAccessibility(boardLaneBehavior, { actionHandlers: { preventDefault: (event) => { // preventDefault only if event coming from inside the lane if (event.currentTarget !== event.target) { event.preventDefault(); } }, focus: (event) => { if (laneNode && event.target !== event.currentTarget) { laneNode.focus(); event.stopPropagation(); } }, ignore: (event) => { event.preventDefault(); event.stopPropagation(); }, }, }); return ( <Ref innerRef={setLaneNode}> {getA11Props.unstable_wrapWithFocusZone( <Box {...getA11Props("root", { className: "board__lane", styles: { display: "flex", flexFlow: "column nowrap", minWidth: "15rem", maxWidth: "22.5rem", borderRight: "1px solid transparent", flex: "1 0 0", position: "relative", ":focus": { outline: "none" }, "&::before": laneFocusBorderStyles, "&::after": laneFocusBorderStyles, }, variables: ({ colorScheme }: SiteVariablesPrepared) => ({ borderFocus: colorScheme.default.borderFocus, borderFocusWithin: colorScheme.default.borderFocusWithin, }), "aria-label": `${ lane ? getText(t.locale, lane.title) : t["lane pending"] }, ${t["board lane instructions"]}`, })} > {props.pending ? ( <AutoFocusZone> <Input placeholder={t["name lane"]} onBlur={(e) => { exitPendingLane!(e.target.value); }} onKeyDown={(e) => { switch (e.key) { case "Escape": return exitPendingLane!(""); case "Enter": return exitPendingLane!( (e.target as HTMLInputElement).value ); } }} fluid styles={{ padding: ".05rem 1.25rem .25rem 1.25rem" }} /> </AutoFocusZone> ) : ( <Flex> <Text weight="bold" content={getText(t.locale, lane!.title)} style={{ flex: "1 0 auto", padding: ".375rem 1.25rem .75rem 1.25rem", fontSize: "inherit", margin: "inherit", }} as="h1" /> <MenuButton trigger={ <Button text iconOnly icon={<MoreIcon outline />} styles={{ marginRight: "1.25rem" }} aria-label={t["lane options"]} /> } menu={[ { content: t["move lane nearer"], icon: rtl ? ( <ArrowRightIcon outline /> ) : ( <ArrowLeftIcon outline /> ), disabled: first, onClick: () => { moveLane && moveLane(laneKey, -1); }, }, { content: t["move lane further"], icon: rtl ? ( <ArrowLeftIcon outline /> ) : ( <ArrowRightIcon outline /> ), disabled: last, onClick: () => { moveLane && moveLane(laneKey, 1); }, }, { kind: "divider", }, { content: t["delete"], icon: <TrashCanIcon outline />, disabled: preparedItems?.length, onClick: () => { deleteLane && deleteLane(laneKey); }, }, ]} /> </Flex> )} <Box variables={({ colorScheme }: SiteVariablesPrepared) => ({ backgroundColor: colorScheme.default.background2, separatorColor: colorScheme.default.border2, })} styles={{ flex: "0 0 auto", padding: "0 1.25rem .75rem 1.25rem", ...(last ? {} : separatorStyles), }} > {addItemDialog} </Box> <Box variables={({ colorScheme }: SiteVariablesPrepared) => ({ separatorColor: colorScheme.default.border2, })} styles={{ flex: "1 0 0", overflow: "hidden", ...(last ? {} : separatorStyles), }} > <Droppable droppableId={laneKey}> {(provided, snapshot) => ( <CustomScrollArea style={{ position: "relative" }} containerRef={(container: HTMLElement) => { provided.innerRef(container); }} {...provided.droppableProps} > {preparedItems?.length ? preparedItems.map((item) => ( <Draggable draggableId={item.itemKey} key={`Board__DraggableItem__${item.itemKey}`} index={item.order} > {(provided, snapshot) => ( <Ref innerRef={provided.innerRef}> <BoardItem isDragging={snapshot.isDragging} draggableProps={provided.draggableProps} dragHandleProps={provided.dragHandleProps!} {...{ item, users, t, boardItemCardLayout, }} {...(editItemDialog && { editItemDialog: editItemDialog(item), })} /> </Ref> )} </Draggable> )) : null} {provided.placeholder} {snapshot.isDraggingOver && ( <Placeholder position={placeholderPosition} /> )} </CustomScrollArea> )} </Droppable> </Box> </Box> )} </Ref> ); };
the_stack
import { Component, QueryList, Input, HostListener, ViewChild, ContentChildren, AfterContentInit, ViewChildren, ElementRef, TemplateRef, OnChanges, SimpleChanges, OnInit, ChangeDetectorRef } from "@angular/core"; import { EventService } from "carbon-components-angular/utils"; import { Tab } from "./tab.component"; /** * The `TabHeaders` component contains the `Tab` items and controls scroll functionality * if content has overflow. */ @Component({ selector: "ibm-tab-headers", template: ` <nav class="bx--tabs--scrollable" [ngClass]="{ 'bx--skeleton': skeleton, 'bx--tabs--container bx--tabs--scrollable--container': type === 'container' }" role="navigation" [attr.aria-label]="ariaLabel" [attr.aria-labelledby]="ariaLabelledby"> <button #leftOverflowNavButton type="button" [ngClass]="{ 'bx--tab--overflow-nav-button': hasHorizontalOverflow, 'bx--tab--overflow-nav-button--hidden': leftOverflowNavButtonHidden }" (click)="handleOverflowNavClick(-1)" (mousedown)="handleOverflowNavMouseDown(-1)" (mouseup)="handleOverflowNavMouseUp()"> <svg focusable="false" preserveAspectRatio="xMidYMid meet" xmlns="http://www.w3.org/2000/svg" fill="currentColor" width="16" height="16" viewBox="0 0 16 16" aria-hidden="true"> <path d="M5 8L10 3 10.7 3.7 6.4 8 10.7 12.3 10 13z"></path> </svg> </button> <div *ngIf="!leftOverflowNavButtonHidden" class="bx--tabs__overflow-indicator--left"></div> <ul #tabList class="bx--tabs--scrollable__nav" role="tablist" (scroll)="handleScroll()"> <li role="presentation"> <ng-container *ngIf="contentBefore" [ngTemplateOutlet]="contentBefore"></ng-container> </li> <li *ngFor="let tab of tabs; let i = index;" [ngClass]="{ 'bx--tabs__nav-item--selected bx--tabs--scrollable__nav-item--selected': tab.active, 'bx--tabs__nav-item--disabled bx--tabs--scrollable__nav-item--disabled': tab.disabled }" class="bx--tabs--scrollable__nav-item" role="presentation" (click)="selectTab(tabItem, tab, i)"> <button #tabItem [attr.aria-selected]="tab.active" [attr.tabindex]="(tab.active?0:-1)" [attr.aria-controls]="tab.id" [attr.aria-disabled]="tab.disabled" (focus)="onTabFocus(tabItem, i)" (click)="$event.preventDefault()" draggable="false" id="{{tab.id}}-header" class="bx--tabs--scrollable__nav-link" [title]="tab.title ? tab.title : tab.heading" href="#" role="tab"> <ng-container *ngIf="!tab.headingIsTemplate"> {{ tab.heading }} </ng-container> <ng-template *ngIf="tab.headingIsTemplate" [ngTemplateOutlet]="tab.heading" [ngTemplateOutletContext]="{$implicit: tab.context}"> </ng-template> </button> </li> <li role="presentation"> <ng-container *ngIf="contentAfter" [ngTemplateOutlet]="contentAfter"></ng-container> </li> </ul> <div *ngIf="!rightOverflowNavButtonHidden" class="bx--tabs__overflow-indicator--right"></div> <button #rightOverflowNavButton type="button" [ngClass]="{ 'bx--tab--overflow-nav-button': hasHorizontalOverflow, 'bx--tab--overflow-nav-button--hidden': rightOverflowNavButtonHidden }" (click)="handleOverflowNavClick(1)" (mousedown)="handleOverflowNavMouseDown(1)" (mouseup)="handleOverflowNavMouseUp()"> <svg focusable="false" preserveAspectRatio="xMidYMid meet" xmlns="http://www.w3.org/2000/svg" fill="currentColor" width="16" height="16" viewBox="0 0 16 16" aria-hidden="true"> <path d="M11 8L6 13 5.3 12.3 9.6 8 5.3 3.7 6 3z"></path> </svg> </button> </nav> ` }) export class TabHeaders implements AfterContentInit, OnChanges, OnInit { /** * List of `Tab` components. */ // disable the next line because we need to rename the input // tslint:disable-next-line @Input("tabs") tabInput: QueryList<Tab>; /** * Set to 'true' to have `Tab` items cached and not reloaded on tab switching. * Duplicate from `n-tabs` to support standalone headers */ @Input() cacheActive = false; /** * Set to 'true' to have tabs automatically activated and have their content displayed when they receive focus. */ @Input() followFocus: boolean; /** * Set to `true` to put tabs in a loading state. */ @Input() skeleton = false; /** * Sets the aria label on the nav element. */ @Input() ariaLabel: string; /** * Sets the aria labelledby on the nav element. */ @Input() ariaLabelledby: string; @Input() contentBefore: TemplateRef<any>; @Input() contentAfter: TemplateRef<any>; @Input() type: "default" | "container" = "default"; /** * Gets the Unordered List element that holds the `Tab` headings from the view DOM. */ // @ts-ignore @ViewChild("tabList", { static: true }) headerContainer; // @ts-ignore @ViewChild("rightOverflowNavButton", { static: true }) rightOverflowNavButton; // @ts-ignore @ViewChild("leftOverflowNavButton", { static: true }) leftOverflowNavButton; /** * ContentChild of all the n-tabs */ @ContentChildren(Tab) tabQuery: QueryList<Tab>; /** * set to tabQuery if tabInput is empty */ public tabs: QueryList<Tab>; /** * The index of the first visible tab. */ public firstVisibleTab = 0; /** * The DOM element containing the `Tab` headings displayed. */ @ViewChildren("tabItem") allTabHeaders: QueryList<ElementRef>; /** * Controls the manual focusing done by tabbing through headings. */ public currentSelectedTab: number; public get hasHorizontalOverflow() { const tabList = this.headerContainer.nativeElement; return tabList.scrollWidth > tabList.clientWidth; } public get leftOverflowNavButtonHidden() { const tabList = this.headerContainer.nativeElement; return !this.hasHorizontalOverflow || !tabList.scrollLeft; } public get rightOverflowNavButtonHidden() { const tabList = this.headerContainer.nativeElement; return !this.hasHorizontalOverflow || (tabList.scrollLeft + tabList.clientWidth) === tabList.scrollWidth; } // width of the overflow buttons OVERFLOW_BUTTON_OFFSET = 40; private overflowNavInterval; constructor( protected elementRef: ElementRef, protected changeDetectorRef: ChangeDetectorRef, protected eventService: EventService ) { } // keyboard accessibility /** * Controls the keydown events used for tabbing through the headings. */ @HostListener("keydown", ["$event"]) keyboardInput(event) { let tabsArray = Array.from<any>(this.tabs); // "Right" is an ie11 specific value if (event.key === "Right" || event.key === "ArrowRight") { if (this.currentSelectedTab < this.allTabHeaders.length - 1) { event.preventDefault(); if (this.followFocus) { this.selectTab(event.target, tabsArray[this.currentSelectedTab + 1], this.currentSelectedTab); } this.allTabHeaders.toArray()[this.currentSelectedTab + 1].nativeElement.focus(); } else { event.preventDefault(); if (this.followFocus) { this.selectTab(event.target, tabsArray[0], 0); } this.allTabHeaders.first.nativeElement.focus(); } } // "Left" is an ie11 specific value if (event.key === "Left" || event.key === "ArrowLeft") { if (this.currentSelectedTab > 0) { event.preventDefault(); if (this.followFocus) { this.selectTab(event.target, tabsArray[this.currentSelectedTab - 1], this.currentSelectedTab); } this.allTabHeaders.toArray()[this.currentSelectedTab - 1].nativeElement.focus(); } else { event.preventDefault(); if (this.followFocus) { this.selectTab(event.target, tabsArray[this.allTabHeaders.length - 1], this.allTabHeaders.length); } this.allTabHeaders.toArray()[this.allTabHeaders.length - 1].nativeElement.focus(); } } if (event.key === "Home") { event.preventDefault(); if (this.followFocus) { this.selectTab(event.target, tabsArray[0], 0); } this.allTabHeaders.toArray()[0].nativeElement.focus(); } if (event.key === "End") { event.preventDefault(); if (this.followFocus) { this.selectTab(event.target, tabsArray[this.allTabHeaders.length - 1], this.allTabHeaders.length); } this.allTabHeaders.toArray()[this.allTabHeaders.length - 1].nativeElement.focus(); } // `"Spacebar"` is IE11 specific value if ((event.key === " " || event.key === "Spacebar") && !this.followFocus) { this.selectTab(event.target, tabsArray[this.currentSelectedTab], this.currentSelectedTab); } } ngOnInit() { this.eventService.on(window as any, "resize", () => this.handleScroll()); } ngAfterContentInit() { if (!this.tabInput) { this.tabs = this.tabQuery; } else { this.tabs = this.tabInput; } this.tabs.forEach(tab => tab.cacheActive = this.cacheActive); this.tabs.changes.subscribe(() => { this.setFirstTab(); }); this.setFirstTab(); } ngOnChanges(changes: SimpleChanges) { if (this.tabs && changes.cacheActive) { this.tabs.forEach(tab => tab.cacheActive = this.cacheActive); } } /** * Controls manually focusing tabs. */ public onTabFocus(ref: HTMLElement, index: number) { this.currentSelectedTab = index; // reset scroll left because we're already handling it this.headerContainer.nativeElement.parentElement.scrollLeft = 0; } public getSelectedTab(): any { const selected = this.tabs.find(tab => tab.active); if (selected) { return selected; } return { headingIsTemplate: false, heading: "" }; } /** * Selects `Tab` 'tab' and moves it into view on the view DOM if it is not already. */ public selectTab(ref: HTMLElement, tab: Tab, tabIndex: number) { if (tab.disabled) { return; } this.currentSelectedTab = tabIndex; this.tabs.forEach(_tab => _tab.active = false); tab.active = true; tab.doSelect(); } public handleScroll() { this.changeDetectorRef.markForCheck(); } public handleOverflowNavClick(direction: number, multiplier = 15) { const tabList = this.headerContainer.nativeElement; const { clientWidth, scrollLeft, scrollWidth } = tabList; if (direction === 1 && !scrollLeft) { tabList.scrollLeft += this.OVERFLOW_BUTTON_OFFSET; } tabList.scrollLeft += direction * multiplier; const leftEdgeReached = direction === -1 && scrollLeft < this.OVERFLOW_BUTTON_OFFSET; const rightEdgeReached = direction === 1 && scrollLeft + clientWidth >= scrollWidth - this.OVERFLOW_BUTTON_OFFSET; if (leftEdgeReached) { this.rightOverflowNavButton.nativeElement.focus(); } if (rightEdgeReached) { this.leftOverflowNavButton.nativeElement.focus(); } } public handleOverflowNavMouseDown(direction: number) { const tabList = this.headerContainer.nativeElement; this.overflowNavInterval = setInterval(() => { const { clientWidth, scrollLeft, scrollWidth } = tabList; // clear interval if scroll reaches left or right edge const leftEdgeReached = direction === -1 && scrollLeft < this.OVERFLOW_BUTTON_OFFSET; const rightEdgeReached = direction === 1 && scrollLeft + clientWidth >= scrollWidth - this.OVERFLOW_BUTTON_OFFSET; if (leftEdgeReached || rightEdgeReached) { clearInterval(this.overflowNavInterval); } // account for overflow button appearing and causing tablist width change this.handleOverflowNavClick(direction); }); } public handleOverflowNavMouseUp() { clearInterval(this.overflowNavInterval); } /** * Determines which `Tab` is initially selected. */ protected setFirstTab() { setTimeout(() => { let firstTab = this.tabs.find(tab => tab.active); if (!firstTab && this.tabs.first) { firstTab = this.tabs.first; firstTab.active = true; } if (firstTab) { firstTab.doSelect(); } }); } }
the_stack
import { Constants } from "./constants"; import * as Utils from "./utils"; import { InputControl } from "./inputcontrol"; import { v4 as uuidv4 } from "uuid"; export abstract class PopupControl { private _isOpen: boolean = false; private _overlayElement: HTMLElement; private _popupElement: HTMLElement; protected abstract renderContent(): HTMLElement; onClose: (popupControl: PopupControl, wasCancelled: boolean) => void; keyDown(e: KeyboardEvent) { switch (e.key) { case Constants.keys.escape: this.closePopup(true); break; } } render(rootElementBounds: ClientRect): HTMLElement { let element = document.createElement("div"); element.tabIndex = 0; element.className = "ms-ctrl ms-ctrl-popup-container"; element.setAttribute("role", "dialog"); element.setAttribute("aria-modal", "true"); element.onkeydown = (e) => { this.keyDown(e); return !e.cancelBubble; }; element.appendChild(this.renderContent()); return element; } focus() { if (this._popupElement) { (<HTMLElement>this._popupElement.firstElementChild).focus(); } } popup(rootElement: HTMLElement) { if (!this._isOpen) { this._overlayElement = document.createElement("div"); this._overlayElement.className = "ms-ctrl-overlay"; this._overlayElement.tabIndex = 0; this._overlayElement.style.width = document.documentElement.scrollWidth + "px"; this._overlayElement.style.height = document.documentElement.scrollHeight + "px"; this._overlayElement.onfocus = (e) => { this.closePopup(true); }; document.body.appendChild(this._overlayElement); var rootElementBounds = rootElement.getBoundingClientRect(); this._popupElement = this.render(rootElementBounds); this._popupElement.classList.remove( "ms-ctrl-slide", "ms-ctrl-slideLeftToRight", "ms-ctrl-slideRightToLeft", "ms-ctrl-slideTopToBottom", "ms-ctrl-slideRightToLeft"); window.addEventListener("resize", (e) => { this.closePopup(true); }); const rootElementLabel = rootElement.getAttribute("aria-label"); if (rootElementLabel) { this._popupElement.setAttribute("aria-label", rootElementLabel); } this._overlayElement.appendChild(this._popupElement); var popupElementBounds = this._popupElement.getBoundingClientRect(); var availableSpaceBelow = window.innerHeight - rootElementBounds.bottom; var availableSpaceAbove = rootElementBounds.top; var availableSpaceRight = window.innerWidth - rootElementBounds.left; var availableSpaceRight = window.innerWidth - rootElementBounds.right; var availableSpaceLeft = rootElementBounds.left; var left = rootElementBounds.left + Utils.getScrollX(); var top; if (availableSpaceAbove < popupElementBounds.height && availableSpaceBelow < popupElementBounds.height) { // Not enough space above or below root element var actualPopupHeight = Math.min(popupElementBounds.height, window.innerHeight); this._popupElement.style.maxHeight = actualPopupHeight + "px"; if (actualPopupHeight < popupElementBounds.height) { top = Utils.getScrollY(); } else { top = Utils.getScrollY() + rootElementBounds.top + (rootElementBounds.height - actualPopupHeight) /2; } if (availableSpaceLeft < popupElementBounds.width && availableSpaceRight < popupElementBounds.width) { // Not enough space left or right of root element var actualPopupWidth = Math.min(popupElementBounds.width, window.innerWidth); this._popupElement.style.maxWidth = actualPopupWidth + "px"; if (actualPopupWidth < popupElementBounds.width) { left = Utils.getScrollX(); } else { left = Utils.getScrollX() + rootElementBounds.left + (rootElementBounds.width - actualPopupWidth) /2; } } else { // Enough space on the left or right of the root element if (availableSpaceRight >= popupElementBounds.width) { left = Utils.getScrollX() + rootElementBounds.right; this._popupElement.classList.add("ms-ctrl-slide", "ms-ctrl-slideLeftToRight"); } else { left = Utils.getScrollX() + rootElementBounds.left - popupElementBounds.width; this._popupElement.classList.add("ms-ctrl-slide", "ms-ctrl-slideRightToLeft"); } } } else { // Enough space above or below root element if (availableSpaceBelow >= popupElementBounds.height) { top = Utils.getScrollY() + rootElementBounds.bottom; this._popupElement.classList.add("ms-ctrl-slide", "ms-ctrl-slideTopToBottom"); } else { top = Utils.getScrollY() + rootElementBounds.top - popupElementBounds.height this._popupElement.classList.add("ms-ctrl-slide", "ms-ctrl-slideBottomToTop"); } if (availableSpaceRight < popupElementBounds.width) { left = Utils.getScrollX() + rootElementBounds.right - popupElementBounds.width; } } this._popupElement.style.left = left + "px"; this._popupElement.style.top = top + "px"; this.focus(); this._isOpen = true; } } closePopup(wasCancelled: boolean) { if (this._isOpen) { document.body.removeChild(this._overlayElement); this._isOpen = false; if (this.onClose) { this.onClose(this, wasCancelled); } } } get isOpen(): boolean { return this._isOpen; } } export abstract class InputWithPopup<TPopupControl extends PopupControl, TValue> extends InputControl { private _labelElement: HTMLElement; private _dropDownButtonElement: HTMLElement; private _popupControl: TPopupControl; private _placeholderText: string; private _value: TValue; protected keyDown(e: KeyboardEvent) { switch (e.key) { case Constants.keys.enter: this.popup(); break; } } private updateLabel() { if (this._labelElement) { if (this._value) { this._labelElement.innerHTML = this.getValueAsString(); this._labelElement.classList.remove("placeholder"); } else { this._labelElement.innerText = this._placeholderText ? this._placeholderText : ""; this._labelElement.classList.add("placeholder"); } } } protected get popupControl(): TPopupControl { return this._popupControl; } protected abstract createPopupControl(): TPopupControl; protected abstract getCssClassName(): string; protected getButtonIconCssClassName(): string { return "ms-icon-chevronDown"; } protected getValueAsString(): string { return this._value.toString(); } protected valueChanged() { if (this.onValueChanged) { this.onValueChanged(this); } } onValueChanged: (sender: InputControl) => void; attach(rootElement: HTMLElement) { super.attach(rootElement); rootElement.tabIndex = 0; rootElement.className = this.getCssClassName(); window.addEventListener("resize", (e) => { this.closePopup(true); }); this.rootElement.onclick = (e) => { if (this.isOpen) { this.closePopup(true); } else { this.popup(); } }; let placeHolderDomItem = this.rootElement.attributes.getNamedItem("placeholder"); if (placeHolderDomItem) { this._placeholderText = placeHolderDomItem.value; } this._labelElement = document.createElement("span"); this._labelElement.className = "ms-ctrl ms-dropdown-label"; this._labelElement.id = uuidv4(); // generate unique id for our label element this._dropDownButtonElement = document.createElement("i"); this._dropDownButtonElement.className = "ms-icon ms-ctrl-dropdown-button " + this.getButtonIconCssClassName(); this.rootElement.appendChild(this._labelElement); this.rootElement.appendChild(this._dropDownButtonElement); this.updateLabel(); } popup() { this._popupControl = this.createPopupControl(); this._popupControl.onClose = (sender, wasCancelled) => { this.closePopup(wasCancelled); this.rootElement.focus(); }; this._popupControl.popup(this.rootElement); } closePopup(wasCancelled: boolean) { if (this.popupControl) { this.popupControl.closePopup(wasCancelled); } } get labelId(): string { if (this._labelElement) { return this._labelElement.id; } return undefined; } get isOpen(): boolean { return this._popupControl ? this._popupControl.isOpen : false; } get placeholderText(): string { return this._placeholderText; } set placeholderText(value: string) { this._placeholderText = value; } get value(): TValue { return this._value; } set value(newValue: TValue) { if (this._value != newValue) { this._value = newValue; this.updateLabel(); this.valueChanged(); } } }
the_stack
import YAML from 'yaml'; import { v4 as uuidv4 } from 'uuid'; import { constants } from '../constants'; import { ImageRegistryInfo } from '../models/redux/image_registry'; const validateNamespace = (chaosEngine: any) => { // Condition to check the namespace if (typeof chaosEngine.metadata.namespace === 'object') { // Removes any whitespace in '{{workflow.parameters.adminModeNamespace}}' const namespace = Object.keys(chaosEngine.metadata.namespace)[0].replace( /\s/g, '' ); chaosEngine.metadata.namespace = `{${namespace}}`; } }; const nameextractor = (val: any) => { const embeddedworkflowyamlstring = val; const parsedEmbeddedYaml = YAML.parse(embeddedworkflowyamlstring as string); const experimentNames = ['']; const experimentList = parsedEmbeddedYaml.spec.experiments; (experimentList as any).forEach((element: any) => { experimentNames.push(element.name); }); if (experimentNames.length >= 2) { experimentNames.shift(); } return experimentNames; }; export const updateEngineName = (parsedYaml: any) => { let engineInstance: string = ''; try { if (parsedYaml.spec !== undefined) { const yamlData = parsedYaml.spec; yamlData.templates.forEach((template: any) => { if (template.inputs && template.inputs.artifacts) { template.inputs.artifacts.forEach((artifact: any) => { const chaosEngine = YAML.parse(artifact.raw.data); validateNamespace(chaosEngine); // Condition to check for the kind as ChaosEngine if (chaosEngine.kind === 'ChaosEngine') { if (chaosEngine.metadata.generateName === undefined) { chaosEngine.metadata['generateName'] = chaosEngine.metadata.name; delete chaosEngine.metadata.name; } chaosEngine.metadata['labels'] = { instance_id: uuidv4(), }; validateNamespace(chaosEngine); // Edge Case: Condition to check the appns // Required because while parsing the chaos engine // '{{workflow.parameters.adminModeNamespace}}' changes to a JSON object if (chaosEngine.spec.appinfo && chaosEngine.spec.appinfo.appns) if (typeof chaosEngine.spec.appinfo.appns === 'object') { // Removes any whitespace in '{{workflow.parameters.adminModeNamespace}}' const appns = Object.keys( chaosEngine.spec.appinfo.appns )[0].replace(/\s/g, ''); chaosEngine.spec.appinfo.appns = `{${appns}}`; } engineInstance += `${chaosEngine.metadata.labels['instance_id']}, `; } // Update the artifact in template const artifactData = artifact; artifactData.raw.data = YAML.stringify(chaosEngine); }); } if (template.name.includes('revert-')) { // Update the args in revert chaos template const revertTemplate = template; revertTemplate.container.args[0] = `kubectl delete chaosengine -l 'instance_id in (${engineInstance})' -n {{workflow.parameters.adminModeNamespace}} `; } }); } return YAML.stringify(parsedYaml); } catch (err) { console.error(err); return YAML.stringify(parsedYaml); } }; export const updateWorkflowNameLabel = ( parsedYaml: any, workflowName: string ) => { try { if (parsedYaml.spec !== undefined) { const yamlData = parsedYaml.kind === constants.workflow ? parsedYaml.spec : parsedYaml.spec.workflowSpec; yamlData.templates.forEach((template: any) => { if (template.inputs && template.inputs.artifacts) { template.inputs.artifacts.forEach((artifact: any) => { const chaosEngine = YAML.parse(artifact.raw.data); validateNamespace(chaosEngine); // Condition to check for the kind as ChaosEngine if (chaosEngine.kind === 'ChaosEngine') { if (chaosEngine.metadata.labels !== undefined) { chaosEngine.metadata.labels['workflow_name'] = workflowName; } else { chaosEngine.metadata['labels'] = { workflow_name: workflowName, }; } validateNamespace(chaosEngine); // Edge Case: Condition to check the appns // Required because while parsing the chaos engine // '{{workflow.parameters.adminModeNamespace}}' changes to a JSON object if (chaosEngine.spec.appinfo && chaosEngine.spec.appinfo.appns) if (typeof chaosEngine.spec.appinfo.appns === 'object') { // Removes any whitespace in '{{workflow.parameters.adminModeNamespace}}' const appns = Object.keys( chaosEngine.spec.appinfo.appns )[0].replace(/\s/g, ''); chaosEngine.spec.appinfo.appns = `{${appns}}`; } } // Update the artifact in template const artifactData = artifact; artifactData.raw.data = YAML.stringify(chaosEngine); }); } }); } return parsedYaml; } catch (err) { console.error(err); return parsedYaml; } }; const parsed = (yaml: string) => { const file = yaml; if (file === 'error') { const testNames = ['none']; return testNames; } let testNames: string[] = []; try { const parsedYaml = YAML.parse(file as string); try { if (parsedYaml.kind === 'CronWorkflow') { const totalSteps = parsedYaml.spec.workflowSpec.templates.length - 1; // Total Steps in CronWorkflow for (let i = 0; i < totalSteps; i++) { const TemplateElement = YAML.stringify( parsedYaml.spec.workflowSpec.templates[1 + i] ); // Accessing Current Step if (TemplateElement.match(/kind: ChaosEngine/g)) { // Checking if current step contains "kind: ChaosEngine" const embeddedYaml = parsedYaml.spec.workflowSpec.templates[1 + i].inputs.artifacts[0] .raw.data; const testName = nameextractor(embeddedYaml); testNames.push(...testName); } } } else { const totalSteps = parsedYaml.spec.templates.length - 1; // Total Steps in Workflow for (let i = 0; i < totalSteps; i++) { const TemplateElement = YAML.stringify( parsedYaml.spec.templates[1 + i] ); // Accessing Current Step if (TemplateElement.match(/kind: ChaosEngine/g)) { // Checking if current step contains "kind: ChaosEngine" const embeddedYaml = parsedYaml.spec.templates[1 + i].inputs.artifacts[0].raw.data; const testName = nameextractor(embeddedYaml); testNames.push(...testName); } } } } catch (err) { testNames = []; } finally { return testNames; } } catch (err) { testNames = []; return testNames; } }; export const fetchWorkflowNameFromManifest = (manifest: string) => { return YAML.parse(manifest).metadata.name; }; export const getWorkflowParameter = (parameterString: string) => { return parameterString .substring(1, parameterString.length - 1) .replace(/^\s+|\s+$/gm, '') .split('.')[2]; }; export const generateChaosQuery = ( chaosQueryStringTemplate: string, engineName: string, namespace: string ) => { const queryStringWithEngineName: string = chaosQueryStringTemplate.replaceAll( '#{}', engineName ); return queryStringWithEngineName.replaceAll('*{}', namespace); }; export const updateNamespace = (manifest: string, namespace: string) => { const updatedManifest = YAML.parse(manifest); updatedManifest.metadata.namespace = namespace; if (updatedManifest.kind.toLowerCase() === 'workflow') updatedManifest.spec.arguments.parameters.forEach( (parameter: any, index: number) => { if (parameter.name === constants.adminMode) { updatedManifest.spec.arguments.parameters[index].value = namespace; } } ); if (updatedManifest.kind.toLowerCase() === 'cronworkflow') updatedManifest.spec.workflowSpec.arguments.parameters.forEach( (parameter: any, index: number) => { if (parameter.name === constants.adminMode) { updatedManifest.spec.workflowSpec.arguments.parameters[index].value = namespace; } } ); return updatedManifest; }; // This is a utility function for extracting embedded // yaml as a string for chaosengine with provided name export const stepEmbeddedYAMLExtractor = ( manifest: string, stepName: string ) => { const file = manifest; let embeddedYaml = ''; try { const parsedYaml = YAML.parse(file as string); try { if (parsedYaml.kind === 'CronWorkflow') { const totalSteps = parsedYaml.spec.workflowSpec.templates.length - 1; // Total Steps in CronWorkflow for (let i = 0; i < totalSteps; i++) { if (parsedYaml.spec.workflowSpec.templates[1 + i].name === stepName) { embeddedYaml = parsedYaml.spec.workflowSpec.templates[1 + i].inputs.artifacts[0] .raw.data; break; } } } else { const totalSteps = parsedYaml.spec.templates.length - 1; // Total Steps in Workflow for (let i = 0; i < totalSteps; i++) { if (parsedYaml.spec.templates[1 + i].name === stepName) { embeddedYaml = parsedYaml.spec.templates[1 + i].inputs.artifacts[0].raw.data; break; } } } } catch (err) { embeddedYaml = ''; } finally { return embeddedYaml; } } catch (err) { embeddedYaml = ''; return embeddedYaml; } }; export default parsed; /** * updateManifestImage updates the image registry of the workflow manifest */ export const updateManifestImage = ( parsedYaml: any, registryData: ImageRegistryInfo ) => { if (registryData.update_registry) { if (parsedYaml.spec !== undefined) { if (parsedYaml.kind.toLowerCase() === 'workflow') { if ( registryData.image_registry_type.toLocaleLowerCase() === 'private' ) { parsedYaml.spec['imagePullSecrets'] = [ { name: registryData.secret_name, }, ]; } parsedYaml.spec.templates.forEach((template: any) => { if (template.container) { if (!registryData.is_default) { const imageData = template.container.image.split('/'); const imageName = imageData[imageData.length - 1]; template.container.image = `${registryData.image_registry_name}/${registryData.image_repo_name}/${imageName}`; } else { const imageData = template.container.image.split('/'); const imageName = imageData[imageData.length - 1]; template.container.image = `${constants.litmus}/${imageName}`; } } }); } if (parsedYaml.kind.toLowerCase() === 'cronworkflow') { if ( registryData.image_registry_type.toLocaleLowerCase() === 'private' ) { parsedYaml.spec.workflowSpec['imagePullSecrets'] = [ { name: registryData.secret_name, }, ]; } parsedYaml.spec.workflowSpec.templates.forEach((template: any) => { if (template.container) { if (!registryData.is_default) { const imageData = template.container.image.split('/'); const imageName = imageData[imageData.length - 1]; template.container.image = `${registryData.image_registry_name}/${registryData.image_repo_name}/${imageName}`; } else { const imageData = template.container.image.split('/'); const imageName = imageData[imageData.length - 1]; template.container.image = `${constants.litmus}/${imageName}`; } } }); } } } return YAML.stringify(parsedYaml); };
the_stack
* Unit tests for training.ts, focusing on the tf.LayersModel.fitDataset() and * tf.LayersModel.evaluateDataset() methods. */ import * as tfc from '@tensorflow/tfjs-core'; import {CustomCallback, DEFAULT_YIELD_EVERY_MS} from '../base_callbacks'; import * as tfl from '../index'; import {Logs} from '../logs'; import {describeMathCPUAndGPU, expectTensorsClose} from '../utils/test_utils'; import {FakeNumericDataset} from './dataset_fakes'; function createDenseModel(): tfl.LayersModel { const model = tfl.sequential(); model.add(tfl.layers.dense( {units: 1, inputShape: [1], kernelInitializer: 'zeros'})); return model; } describeMathCPUAndGPU('LayersModel.fitDataset', () => { // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // batch_size = 8 // num_batches = 3 // epochs = 2 // // xs = np.ones([batch_size * num_batches * epochs, 1]) // ys = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices((xs, ys)).batch(batch_size) // // model = tf.keras.Sequential() // model.add(tf.keras.layers.Dense( // 1, // input_shape=[1], // kernel_initializer='zeros')) // model.compile(loss='mean_squared_error', optimizer='sgd') // // history = model.fit(dataset, steps_per_epoch=num_batches, epochs=epochs) // print(history.history) // print(model.get_weights()[0]) // print(model.get_weights()[1]) // ``` it('1 input, 1 output, no metric, no validation, with batchesPerEpoch', async () => { const model = createDenseModel(); model.compile({loss: 'meanSquaredError', optimizer: 'sgd'}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {batchesPerEpoch, epochs: 1}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, {batchesPerEpoch, epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history)).toEqual(['loss']); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.923649); expect(history.history.loss[1]).toBeCloseTo(0.722993); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); }); it('1 input, 1 output, no metric, no validation, no batchesPerEpoch', async () => { const model = createDenseModel(); model.compile({loss: 'meanSquaredError', optimizer: 'sgd'}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {epochs: 1}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, {epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history)).toEqual(['loss']); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.923649); expect(history.history.loss[1]).toBeCloseTo(0.722993); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); }); // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // tf.enable_eager_execution(): // // batch_size = 8 // num_batches = 3 // epochs = 2 // // xs = np.ones([batch_size * num_batches * epochs, 1]) // ys = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices((xs, ys)).batch(batch_size) // // model = tf.keras.Sequential() // model.add(tf.keras.layers.Dense( // 1, // input_shape=[1], // kernel_initializer='zeros')) // model.compile(loss='mean_squared_error', // optimizer='sgd', // metrics=['acc']) // // history = model.fit(dataset, steps_per_epoch=num_batches, epochs=epochs) // print(history.history) // print(model.get_weights()[0]) // print(model.get_weights()[1]) // ``` it('1 input, 1 output, 1 metric, no validation, with batchesPerEpoch', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {batchesPerEpoch, epochs: 1}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, {batchesPerEpoch, epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history)).toEqual(['loss', 'acc']); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.923649); expect(history.history.loss[1]).toBeCloseTo(0.722993); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); }); it('1 input, 1 output, 1 metric, no validation, no batchesPerEpoch', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {epochs: 1}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, {epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history)).toEqual(['loss', 'acc']); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.923649); expect(history.history.loss[1]).toBeCloseTo(0.722993); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); }); // Reference Python tf.keras code. // // ```py // import numpy as np // import tensorflow as tf // // tf.enable_eager_execution(): // // batch_size = 8 // num_batches = 3 // epochs = 2 // // xs = np.ones([batch_size * num_batches * epochs, 1]) // ys = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices((xs, ys)).batch(batch_size) // val_xs = np.zeros([batch_size * 2, 1]) // val_ys = np.zeros([batch_size * 2, 1]) // // model = tf.keras.Sequential() // model.add(tf.keras.layers.Dense( // 1, // input_shape=[1], // kernel_initializer='zeros')) // model.compile(loss='mean_squared_error', optimizer='sgd', // metrics=['accuracy']) // // class CustomCallback(tf.keras.callbacks.Callback): // def on_epoch_end(self, epoch, logs): // print('epoch = %d; logs = %s' % (epoch, logs)) // // history = model.fit(dataset, // steps_per_epoch=num_batches, // epochs=epochs, // validation_steps=2, // validation_data=(val_xs, val_ys), // callbacks=[CustomCallback()]) // print(history.history) // print(model.get_weights()[0]) // print(model.get_weights()[1]) // ``` it('1 input, 1 output, 1 metric, tensor validation, callback, ' + 'with batchesPerEpoch', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); const valXs = tfc.zeros([batchSize * 2, 1]); const valYs = tfc.zeros([batchSize * 2, 1]); // Do a burn-in call to account for initialization of cached // tensors (for the memory-leak check below). await model.fitDataset( dataset, {batchesPerEpoch, epochs: 1, validationData: [valXs, valYs]}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const epochEndValLosses: number[] = []; const epochEndValAccs: number[] = []; const history = await model.fitDataset(dataset, { batchesPerEpoch, epochs, validationData: [valXs, valYs], callbacks: { onEpochEnd: async (epoch, logs) => { epochEndValLosses.push(logs.val_loss); epochEndValAccs.push(logs.val_acc); } } }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history).sort()).toEqual([ 'loss', 'acc', 'val_loss', 'val_acc' ].sort()); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.923649); expect(history.history.loss[1]).toBeCloseTo(0.722993); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expect(history.history.val_loss.length).toEqual(2); expect(history.history.val_loss[0]).toBeCloseTo(0.003321); expect(history.history.val_loss[1]).toBeCloseTo(0.011799); expect(history.history.val_acc.length).toEqual(2); expect(history.history.val_acc[0]).toBeCloseTo(1); expect(history.history.val_acc[1]).toBeCloseTo(1); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); expect(epochEndValLosses.length).toEqual(2); expect(epochEndValLosses[0]).toBeCloseTo(0.003321); expect(epochEndValLosses[1]).toBeCloseTo(0.011799); expect(epochEndValAccs.length).toEqual(2); expect(epochEndValAccs[0]).toBeCloseTo(1); expect(epochEndValAccs[1]).toBeCloseTo(1); }); it('1 input, 1 output, 1 metric, tensor validation, callback, ' + 'no batchesPerEpoch', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); const valXs = tfc.zeros([batchSize * 2, 1]); const valYs = tfc.zeros([batchSize * 2, 1]); // Do a burn-in call to account for initialization of cached // tensors (for the memory-leak check below). await model.fitDataset( dataset, {epochs: 1, validationData: [valXs, valYs]}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const epochEndValLosses: number[] = []; const epochEndValAccs: number[] = []; const history = await model.fitDataset(dataset, { epochs, validationData: [valXs, valYs], callbacks: { onEpochEnd: async (epoch, logs) => { epochEndValLosses.push(logs.val_loss); epochEndValAccs.push(logs.val_acc); } } }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history).sort()).toEqual([ 'loss', 'acc', 'val_loss', 'val_acc' ].sort()); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.923649); expect(history.history.loss[1]).toBeCloseTo(0.722993); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expect(history.history.val_loss.length).toEqual(2); expect(history.history.val_loss[0]).toBeCloseTo(0.003321); expect(history.history.val_loss[1]).toBeCloseTo(0.011799); expect(history.history.val_acc.length).toEqual(2); expect(history.history.val_acc[0]).toBeCloseTo(1); expect(history.history.val_acc[1]).toBeCloseTo(1); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); expect(epochEndValLosses.length).toEqual(2); expect(epochEndValLosses[0]).toBeCloseTo(0.003321); expect(epochEndValLosses[1]).toBeCloseTo(0.011799); expect(epochEndValAccs.length).toEqual(2); expect(epochEndValAccs[0]).toBeCloseTo(1); expect(epochEndValAccs[1]).toBeCloseTo(1); }); it('Earlier logs are not overwritten', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); const trainLogs: Logs[] = []; await model.fitDataset(dataset, { epochs, callbacks: { onEpochEnd: async (epoch, logs) => { trainLogs.push(logs); } } }); expect(trainLogs.length).toEqual(2); // Assert that the the first log and the second logs do not overwrite each // other. expect(trainLogs[0].loss).not.toEqual(trainLogs[1].loss); }); it('dataset.size != null feeds stepPerEpoch to callbacks', async () => { const batchSize = 8; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); let recordedSteps: number; class TestCallback extends CustomCallback { constructor() { super({ onTrainBegin: async (logs?: Logs) => { recordedSteps = this.params.steps as number; } }); } } const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const epochs = 1; await model.fitDataset(dataset, {epochs, callbacks: new TestCallback()}); expect(dataset.size).toEqual(batchesPerEpoch); expect(recordedSteps).toEqual(batchesPerEpoch); }); it('explicit stepPerEpoch overrides dataset.size for callbacks', async () => { const batchSize = 8; const numBatches = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches, xTensorsFunc, yTensorsFunc }); let recordedSteps: number; class TestCallback extends CustomCallback { constructor() { super({ onTrainBegin: async (logs?: Logs) => { recordedSteps = this.params.steps as number; } }); } } const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const epochs = 1; await model.fitDataset(dataset, { epochs, batchesPerEpoch: numBatches - 1, callbacks: new TestCallback() }); expect(dataset.size).toEqual(numBatches); expect(recordedSteps).toEqual(numBatches - 1); }); // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // tf.enable_eager_execution() // // batch_size = 8 // num_batches = 3 // epochs = 2 // // xs = np.ones([batch_size * num_batches * epochs, 1]) // ys = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices((xs, ys)).batch(batch_size) // val_xs = np.zeros([batch_size * 2, 1]) // val_ys = np.zeros([batch_size * 2, 1]) // val_dataset = tf.data.Dataset.from_tensor_slices( // (val_xs, val_ys)).batch(batch_size) // // model = tf.keras.Sequential() // model.add(tf.keras.layers.Dense( // 1, // input_shape=[1], // kernel_initializer='zeros')) // model.compile(loss='mean_squared_error', // optimizer=tf.train.GradientDescentOptimizer(0.01), // metrics=['accuracy']) // // class CustomCallback(tf.keras.callbacks.Callback): // def on_batch_end(self, batch, logs): // print('batch = %d; logs = %s' % (batch, logs)) // // def on_epoch_end(self, epoch, logs): // print('epoch = %d; logs = %s' % (epoch, logs)) // // history = model.fit(dataset, // steps_per_epoch=num_batches, // epochs=epochs, // batch_size=4, // validation_steps=2, // validation_data=val_dataset, // callbacks=[CustomCallback()]) // print(history.history) // print(model.get_weights()[0]) // print(model.get_weights()[1]) // ``` it('1 input, 1 output, 1 metric, dataset validation, callback, ' + 'with batchesPerEpoch', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; // Training dataset. const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Validation dataset. const valXTensorsFunc = () => [tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1])]; const valYTensorsFunc = () => [tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1])]; const valDataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc: valXTensorsFunc, yTensorsFunc: valYTensorsFunc }); // Do a burn-in call to account for initialization of cached // tensors (for the memory-leak check below). await model.fitDataset(dataset, { batchesPerEpoch, epochs, validationData: valDataset, validationBatches: batchesPerEpoch * epochs }); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const epochEndValLosses: number[] = []; const epochEndValAccs: number[] = []; const history = await model.fitDataset(dataset, { batchesPerEpoch, epochs, validationData: valDataset, validationBatches: batchesPerEpoch * epochs, callbacks: { onEpochEnd: async (epoch, logs) => { epochEndValLosses.push(logs.val_loss); epochEndValAccs.push(logs.val_acc); } } }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history).sort()).toEqual([ 'loss', 'acc', 'val_loss', 'val_acc' ].sort()); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.923649); expect(history.history.loss[1]).toBeCloseTo(0.722993); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expect(history.history.val_loss.length).toEqual(2); expect(history.history.val_loss[0]).toBeCloseTo(0.003321); expect(history.history.val_loss[1]).toBeCloseTo(0.011799); expect(history.history.val_acc.length).toEqual(2); expect(history.history.val_acc[0]).toBeCloseTo(1); expect(history.history.val_acc[1]).toBeCloseTo(1); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); expect(epochEndValLosses.length).toEqual(2); expect(epochEndValLosses[0]).toBeCloseTo(0.003321); expect(epochEndValLosses[1]).toBeCloseTo(0.011799); expect(epochEndValAccs.length).toEqual(2); expect(epochEndValAccs[0]).toBeCloseTo(1); expect(epochEndValAccs[1]).toBeCloseTo(1); }); it('1 input, 1 output, 1 metric, dataset validation, callback, ' + 'no batchesPerEpoch', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; // Training dataset. const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); // Validation dataset. const valXTensorsFunc = () => [tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1])]; const valYTensorsFunc = () => [tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1])]; const valDataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc: valXTensorsFunc, yTensorsFunc: valYTensorsFunc }); // Do a burn-in call to account for initialization of cached // tensors (for the memory-leak check below). await model.fitDataset(dataset, {epochs, validationData: valDataset}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const epochEndValLosses: number[] = []; const epochEndValAccs: number[] = []; const history = await model.fitDataset(dataset, { epochs, validationData: valDataset, callbacks: { onEpochEnd: async (epoch, logs) => { epochEndValLosses.push(logs.val_loss); epochEndValAccs.push(logs.val_acc); } } }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history).sort()).toEqual([ 'loss', 'acc', 'val_loss', 'val_acc' ].sort()); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.923649); expect(history.history.loss[1]).toBeCloseTo(0.722993); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expect(history.history.val_loss.length).toEqual(2); expect(history.history.val_loss[0]).toBeCloseTo(0.003321); expect(history.history.val_loss[1]).toBeCloseTo(0.011799); expect(history.history.val_acc.length).toEqual(2); expect(history.history.val_acc[0]).toBeCloseTo(1); expect(history.history.val_acc[1]).toBeCloseTo(1); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); expect(epochEndValLosses.length).toEqual(2); expect(epochEndValLosses[0]).toBeCloseTo(0.003321); expect(epochEndValLosses[1]).toBeCloseTo(0.011799); expect(epochEndValAccs.length).toEqual(2); expect(epochEndValAccs[0]).toBeCloseTo(1); expect(epochEndValAccs[1]).toBeCloseTo(1); }); it('Memory leak check with metric and validation, with batchesPerEpoch', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 3; const batchesPerEpoch = 3; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs }); const valXs = tfc.zeros([batchSize * 2, 1]); const valYs = tfc.zeros([batchSize * 2, 1]); // Do a burn-in call to account for initialization of cached // tensors (for the memory-leak check below). await model.fitDataset( dataset, {batchesPerEpoch, epochs: 1, validationData: [valXs, valYs]}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; await model.fitDataset(dataset, { batchesPerEpoch, epochs, validationData: [valXs, valYs], callbacks: { onEpochEnd: async (epoch, logs) => { expect(tfc.memory().numTensors).toEqual(numTensors0); } } }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); }); it('Memory leak check with metric and validation, no batchesPerEpoch', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 3; const batchesPerEpoch = 3; const dataset = new FakeNumericDataset( {xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch}); const valXs = tfc.zeros([batchSize * 2, 1]); const valYs = tfc.zeros([batchSize * 2, 1]); // Do a burn-in call to account for initialization of cached // tensors (for the memory-leak check below). await model.fitDataset( dataset, {epochs: 1, validationData: [valXs, valYs]}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; await model.fitDataset(dataset, { epochs, validationData: [valXs, valYs], callbacks: { onEpochEnd: async (epoch, logs) => { expect(tfc.memory().numTensors).toEqual(numTensors0); } } }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); }); // Refence Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // tf.enable_eager_execution(): // // batch_size = 8 // num_batches = 3 // epochs = 2 // // xs = np.ones([batch_size * num_batches * epochs, 1]) // ys = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices((xs, ys)).batch(batch_size) // // model = tf.keras.Sequential() // model.add(tf.keras.layers.Dense( // 1, // input_shape=[1], // kernel_initializer='zeros')) // model.compile(loss='mean_squared_error', optimizer='sgd', // metrics=['accuracy']) // // class CustomCallback(tf.keras.callbacks.Callback): // def on_batch_end(self, batch, logs): // print('batch = %d; logs = %s' % (batch, logs)) // // history = model.fit(dataset, // steps_per_epoch=num_batches, // epochs=epochs, // callbacks=[CustomCallback()]) // print(history.history) // ``` it('1 input, 1 output, 1 metric, no validation, callback, ' + 'with batchesPerEpoch', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {batchesPerEpoch, epochs: 1}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; let onTrainBeginCalls = 0; let onTrainEndCalls = 0; const epochBeginEpochs: number[] = []; const epochEndEpochs: number[] = []; const batchBeginBatches: number[] = []; const batchEndBatches: number[] = []; const epochEndLosses: number[] = []; const epochEndAccs: number[] = []; const batchEndLosses: number[] = []; const batchEndAccs: number[] = []; const history = await model.fitDataset(dataset, { batchesPerEpoch, epochs, callbacks: { onTrainBegin: async () => { onTrainBeginCalls++; }, onTrainEnd: async () => { onTrainEndCalls++; }, onEpochBegin: async (epoch) => { epochBeginEpochs.push(epoch); }, onEpochEnd: async (epoch, logs) => { epochEndEpochs.push(epoch); epochEndLosses.push(logs.loss); epochEndAccs.push(logs.acc); }, onBatchBegin: async (batch, logs) => { batchBeginBatches.push(batch); }, onBatchEnd: async (batch, logs) => { batchEndBatches.push(batch); batchEndLosses.push(logs.loss); batchEndAccs.push(logs.acc); }, } }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history)).toEqual(['loss', 'acc']); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.923649); expect(history.history.loss[1]).toBeCloseTo(0.722993); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); expect(onTrainBeginCalls).toEqual(1); expect(onTrainEndCalls).toEqual(1); expect(epochBeginEpochs).toEqual([0, 1]); expect(epochEndEpochs).toEqual([0, 1]); expect(batchBeginBatches).toEqual([0, 1, 2, 0, 1, 2]); expect(batchEndBatches).toEqual([0, 1, 2, 0, 1, 2]); expect(epochEndLosses.length).toEqual(2); expect(epochEndLosses[0]).toBeCloseTo(0.923649); expect(epochEndLosses[1]).toBeCloseTo(0.722993); expect(epochEndAccs.length).toEqual(2); expect(epochEndAccs[0]).toBeCloseTo(0); expect(epochEndAccs[1]).toBeCloseTo(0); expectTensorsClose( batchEndLosses, [1, 0.9216, 0.849347, 0.782758, 0.721390, 0.664832]); }); it('1 input, 1 output, 1 metric, no validation, callback, no batchesPerEpoch', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {epochs: 1}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; let onTrainBeginCalls = 0; let onTrainEndCalls = 0; const epochBeginEpochs: number[] = []; const epochEndEpochs: number[] = []; const batchBeginBatches: number[] = []; const batchEndBatches: number[] = []; const epochEndLosses: number[] = []; const epochEndAccs: number[] = []; const batchEndLosses: number[] = []; const batchEndAccs: number[] = []; const history = await model.fitDataset(dataset, { epochs, callbacks: { onTrainBegin: async () => { onTrainBeginCalls++; }, onTrainEnd: async () => { onTrainEndCalls++; }, onEpochBegin: async (epoch) => { epochBeginEpochs.push(epoch); }, onEpochEnd: async (epoch, logs) => { epochEndEpochs.push(epoch); epochEndLosses.push(logs.loss); epochEndAccs.push(logs.acc); }, onBatchBegin: async (batch, logs) => { batchBeginBatches.push(batch); }, onBatchEnd: async (batch, logs) => { batchEndBatches.push(batch); batchEndLosses.push(logs.loss); batchEndAccs.push(logs.acc); }, } }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history)).toEqual(['loss', 'acc']); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.923649); expect(history.history.loss[1]).toBeCloseTo(0.722993); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); expect(onTrainBeginCalls).toEqual(1); expect(onTrainEndCalls).toEqual(1); expect(epochBeginEpochs).toEqual([0, 1]); expect(epochEndEpochs).toEqual([0, 1]); expect(batchBeginBatches).toEqual([0, 1, 2, 0, 1, 2]); expect(batchEndBatches).toEqual([0, 1, 2, 0, 1, 2]); expect(epochEndLosses.length).toEqual(2); expect(epochEndLosses[0]).toBeCloseTo(0.923649); expect(epochEndLosses[1]).toBeCloseTo(0.722993); expect(epochEndAccs.length).toEqual(2); expect(epochEndAccs[0]).toBeCloseTo(0); expect(epochEndAccs[1]).toBeCloseTo(0); expectTensorsClose( batchEndLosses, [1, 0.9216, 0.849347, 0.782758, 0.721390, 0.664832]); }); // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // batch_size = 8 // num_batches = 3 // epochs = 2 // // input1 = tf.keras.Input(shape = [1], name = 'x1') // input2 = tf.keras.Input(shape = [1], name = 'x2') // concat = tf.keras.layers.concatenate([input1, input2]) // y = tf.keras.layers.Dense( // 1, kernel_initializer = 'zeros')(concat) // model = tf.keras.Model(inputs = [input1, input2], outputs = y) // model.compile( // loss = 'mean_squared_error', optimizer = 'sgd', metrics = // ['accuracy']) // model.summary() // print(input1.name) // print(input2.name) // // xs1 = np.ones([batch_size * num_batches * epochs, 1]) // xs2 = np.ones([batch_size * num_batches * epochs, 1]) // ys = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices( // ({'x1': xs1, 'x2': xs2}, ys)).batch(batch_size) // // history = model.fit(dataset, // steps_per_epoch=num_batches, // epochs=epochs) // print(history.history) // print(model.get_weights()[0]) // print(model.get_weights()[1]) // ``` it('2 inputs, 1 output, 1 metric, no validation, with batchesPerEpoch', async () => { // Create a functional model with 2 inputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const y = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: y}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[input2.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {batchesPerEpoch, epochs: 1}); model.setWeights([tfc.zeros([2, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, {batchesPerEpoch, epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history)).toEqual(['loss', 'acc']); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.888116); expect(history.history.loss[1]).toBeCloseTo(0.612685); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expectTensorsClose(model.getWeights()[0], [0.103377, 0.103377]); expectTensorsClose(model.getWeights()[1], [0.103377]); }); it('2 inputs, 1 output, 1 metric, no validation, no batchesPerEpoch', async () => { // Create a functional model with 2 inputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const y = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: y}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[input2.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {epochs: 1}); model.setWeights([tfc.zeros([2, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, {epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history)).toEqual(['loss', 'acc']); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.888116); expect(history.history.loss[1]).toBeCloseTo(0.612685); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expectTensorsClose(model.getWeights()[0], [0.103377, 0.103377]); expectTensorsClose(model.getWeights()[1], [0.103377]); }); // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // tf.enable_eager_execution() // // batch_size = 8 // num_batches = 3 // epochs = 2 // // input1 = tf.keras.Input(shape = [1], name = 'x1') // input2 = tf.keras.Input(shape = [1], name = 'x2') // concat = tf.keras.layers.concatenate([input1, input2]) // y = tf.keras.layers.Dense( // 1, kernel_initializer = 'zeros')(concat) // model = tf.keras.Model(inputs = [input1, input2], outputs = y) // model.compile( // loss='mean_squared_error', // optimizer=tf.train.GradientDescentOptimizer(0.01), // metrics=['accuracy']) // model.summary() // print(input1.name) // print(input2.name) // // xs1 = np.ones([batch_size * num_batches * epochs, 1]) // xs2 = np.ones([batch_size * num_batches * epochs, 1]) // ys = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices( // ({'x1': xs1, 'x2': xs2}, ys)).batch(batch_size) // // val_xs = [np.zeros([batch_size, 1]), // np.zeros([batch_size, 1])] // val_ys = np.zeros([batch_size, 1]) // // history = model.fit(dataset, // steps_per_epoch=num_batches, // epochs=epochs, // batch_size=batch_size, // validation_data=[val_xs, val_ys]) // print(history.history) // print(model.get_weights()[0]) // print(model.get_weights()[1]) // ``` it('2 inputs, 1 output, 1 metric, tensor array validation, ' + 'with batchesPerEpoch', async () => { // Create a functional model with 2 inputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const y = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: y}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; // Training data. const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[input2.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Validation data. const valXs: tfc.Tensor[] = [tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1])]; const valYs = tfc.zeros([batchSize, 1]); // Do a burn-in call to account for initialization of cached tensors // (for the memory-leak check below). await model.fitDataset(dataset, { batchesPerEpoch, epochs: 1, validationData: [valXs, valYs], validationBatchSize: batchSize }); model.setWeights([tfc.zeros([2, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, { batchesPerEpoch, epochs, validationData: [valXs, valYs], validationBatchSize: batchSize }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history).sort()).toEqual([ 'acc', 'loss', 'val_acc', 'val_loss' ]); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.888116); expect(history.history.loss[1]).toBeCloseTo(0.612685); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expect(history.history.val_loss.length).toEqual(2); expect(history.history.val_loss[0]).toBeCloseTo(0.003189); expect(history.history.val_loss[1]).toBeCloseTo(0.010687); expect(history.history.val_acc.length).toEqual(2); expect(history.history.val_acc[0]).toBeCloseTo(1.0); expect(history.history.val_acc[1]).toBeCloseTo(1.0); expectTensorsClose(model.getWeights()[0], [0.103377, 0.103377]); expectTensorsClose(model.getWeights()[1], [0.103377]); }); it('2 inputs, 1 output, 1 metric, tensor array validation, ' + 'no batchesPerEpoch', async () => { // Create a functional model with 2 inputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const y = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: y}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; // Training data. const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[input2.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); // Validation data. const valXs: tfc.Tensor[] = [tfc.zeros([batchSize, 1]), tfc.zeros([batchSize, 1])]; const valYs = tfc.zeros([batchSize, 1]); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, { epochs: 1, validationData: [valXs, valYs], validationBatchSize: batchSize }); model.setWeights([tfc.zeros([2, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, { epochs, validationData: [valXs, valYs], validationBatchSize: batchSize }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history).sort()).toEqual([ 'acc', 'loss', 'val_acc', 'val_loss' ]); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.888116); expect(history.history.loss[1]).toBeCloseTo(0.612685); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expect(history.history.val_loss.length).toEqual(2); expect(history.history.val_loss[0]).toBeCloseTo(0.003189); expect(history.history.val_loss[1]).toBeCloseTo(0.010687); expect(history.history.val_acc.length).toEqual(2); expect(history.history.val_acc[0]).toBeCloseTo(1.0); expect(history.history.val_acc[1]).toBeCloseTo(1.0); expectTensorsClose(model.getWeights()[0], [0.103377, 0.103377]); expectTensorsClose(model.getWeights()[1], [0.103377]); }); // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // tf.enable_eager_execution() // // batch_size = 8 // num_batches = 3 // epochs = 2 // // input1 = tf.keras.Input(shape = [1], name = 'x1') // input2 = tf.keras.Input(shape = [1], name = 'x2') // concat = tf.keras.layers.concatenate([input1, input2]) // y = tf.keras.layers.Dense( // 1, kernel_initializer = 'zeros')(concat) // model = tf.keras.Model(inputs = [input1, input2], outputs = y) // model.compile( // loss='mean_squared_error', // optimizer=tf.train.GradientDescentOptimizer(0.01), // metrics=['accuracy']) // model.summary() // print(input1.name) // print(input2.name) // // xs1 = np.ones([batch_size * num_batches * epochs, 1]) // xs2 = np.ones([batch_size * num_batches * epochs, 1]) // ys = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices( // ({'x1': xs1, 'x2': xs2}, ys)).batch(batch_size) // // val_xs = { // 'x1': np.zeros([batch_size, 1]), // 'x2': np.zeros([batch_size, 1]) // } // val_ys = np.zeros([batch_size, 1]) // // history = model.fit(dataset, // steps_per_epoch=num_batches, // epochs=epochs, // batch_size=batch_size, // validation_data=[val_xs, val_ys]) // print(history.history) // print(model.get_weights()[0]) // print(model.get_weights()[1]) // ``` it('2 input, 1 output, 1 metric, tensor array validation, ' + 'with batchesPerEpoch', async () => { // Create a functional model with 2 inputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const y = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: y}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; // Training data. const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[input2.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Validation data. const valXs: tfc.NamedTensorMap = {}; valXs[input1.name] = tfc.zeros([batchSize, 1]); valXs[input2.name] = tfc.zeros([batchSize, 1]); const valYs = tfc.zeros([batchSize, 1]); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, { batchesPerEpoch, epochs: 1, validationData: [valXs, valYs], validationBatchSize: batchSize }); model.setWeights([tfc.zeros([2, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, { batchesPerEpoch, epochs, validationData: [valXs, valYs], validationBatchSize: batchSize }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history).sort()).toEqual([ 'acc', 'loss', 'val_acc', 'val_loss' ]); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.888116); expect(history.history.loss[1]).toBeCloseTo(0.612685); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expect(history.history.val_loss.length).toEqual(2); expect(history.history.val_loss[0]).toBeCloseTo(0.003189); expect(history.history.val_loss[1]).toBeCloseTo(0.010687); expect(history.history.val_acc.length).toEqual(2); expect(history.history.val_acc[0]).toBeCloseTo(1.0); expect(history.history.val_acc[1]).toBeCloseTo(1.0); expectTensorsClose(model.getWeights()[0], [0.103377, 0.103377]); expectTensorsClose(model.getWeights()[1], [0.103377]); }); it('2 input, 1 output, 1 metric, tensor array validation, ' + 'no batchesPerEpoch', async () => { // Create a functional model with 2 inputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const y = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: y}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; // Training data. const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[input2.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); // Validation data. const valXs: tfc.NamedTensorMap = {}; valXs[input1.name] = tfc.zeros([batchSize, 1]); valXs[input2.name] = tfc.zeros([batchSize, 1]); const valYs = tfc.zeros([batchSize, 1]); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, { epochs: 1, validationData: [valXs, valYs], validationBatchSize: batchSize }); model.setWeights([tfc.zeros([2, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, { epochs, validationData: [valXs, valYs], validationBatchSize: batchSize }); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history).sort()).toEqual([ 'acc', 'loss', 'val_acc', 'val_loss' ]); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(0.888116); expect(history.history.loss[1]).toBeCloseTo(0.612685); expect(history.history.acc.length).toEqual(2); expect(history.history.acc[0]).toBeCloseTo(0); expect(history.history.acc[1]).toBeCloseTo(0); expect(history.history.val_loss.length).toEqual(2); expect(history.history.val_loss[0]).toBeCloseTo(0.003189); expect(history.history.val_loss[1]).toBeCloseTo(0.010687); expect(history.history.val_acc.length).toEqual(2); expect(history.history.val_acc[0]).toBeCloseTo(1.0); expect(history.history.val_acc[1]).toBeCloseTo(1.0); expectTensorsClose(model.getWeights()[0], [0.103377, 0.103377]); expectTensorsClose(model.getWeights()[1], [0.103377]); }); it('2 input, 1 missing input in dataset, with batchesPerEpoch', async () => { // Create a functional model with 2 inputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const y = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: y}); model.compile({loss: 'meanSquaredError', optimizer: 'sgd'}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; // Note: input2 is missing from the data, by intention. return output; }; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); let errorCaught: Error; try { await model.fitDataset(dataset, {batchesPerEpoch, epochs}); } catch (err) { errorCaught = err; } expect(errorCaught.message) .toEqual( 'The feature data generated by the dataset lacks the required ' + `input key '${input2.name}'.`); }); it('2 input, 1 missing input in dataset, no batchesPerEpoch', async () => { // Create a functional model with 2 inputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const y = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: y}); model.compile({loss: 'meanSquaredError', optimizer: 'sgd'}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; // Note: input2 is missing from the data, by intention. return output; }; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); let errorCaught: Error; try { await model.fitDataset(dataset, {epochs}); } catch (err) { errorCaught = err; } expect(errorCaught.message) .toEqual( 'The feature data generated by the dataset lacks the required ' + `input key '${input2.name}'.`); }); // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // batch_size = 8 // num_batches = 3 // epochs = 2 // // x = tf.keras.Input(shape=[1], name='x') // // output1 = tf.keras.layers.Dense( // 1, kernel_initializer='zeros')(x) // output2 = tf.keras.layers.Dense( // 1, kernel_initializer='zeros')(x) // // model = tf.keras.Model(inputs=x, outputs=[output1, output2]) // model.compile( // loss = 'mean_squared_error', optimizer = 'sgd', metrics = // ['accuracy']) // model.summary() // output1_name = model.output_names[0] // output2_name = model.output_names[1] // print(output1_name) // print(output2_name) // // xs = np.ones([batch_size * num_batches * epochs, 1]) // ys1 = np.ones([batch_size * num_batches * epochs, 1]) // ys2 = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices( // (xs, {output1_name: ys1, output2_name: ys2})).batch(batch_size) // // history = model.fit(dataset, // steps_per_epoch=num_batches, // epochs=epochs) // print(history.history) // print(model.get_weights()[0]) // print(model.get_weights()[1]) // print(model.get_weights()[2]) // print(model.get_weights()[3]) // ``` it('1 input, 2 outputs, 1 metric, no validation, with batchesPerEpoch', async () => { // Create a functional model with 2 outputs. const x = tfl.layers.input({shape: [1]}); const output1 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const output2 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const model = tfl.model({inputs: x, outputs: [output1, output2]}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const yTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[model.outputNames[0]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[model.outputNames[1]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yShape: {[name: string]: number[]} = {}; yShape[model.outputNames[0]] = [1]; yShape[model.outputNames[1]] = [1]; const dataset = new FakeNumericDataset({ xShape: [1], yShape, batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {batchesPerEpoch, epochs: 1}); model.setWeights([ tfc.zeros([1, 1]), tfc.zeros([1]), tfc.zeros([1, 1]), tfc.zeros([1]) ]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, {batchesPerEpoch, epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); const output1AccName = model.outputNames[0] + '_acc'; const output1LossName = model.outputNames[0] + '_loss'; const output2AccName = model.outputNames[1] + '_acc'; const output2LossName = model.outputNames[1] + '_loss'; expect(Object.keys(history.history)).toEqual([ 'loss', output1LossName, output2LossName, output1AccName, output2AccName ]); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(1.847297); expect(history.history.loss[1]).toBeCloseTo(1.445986); expect(history.history[output1LossName].length).toEqual(2); expect(history.history[output1LossName][0]).toBeCloseTo(0.923648); expect(history.history[output1LossName][1]).toBeCloseTo(0.722993); expect(history.history[output2LossName].length).toEqual(2); expect(history.history[output2LossName][0]).toBeCloseTo(0.923648); expect(history.history[output2LossName][1]).toBeCloseTo(0.722993); expect(history.history[output1AccName].length).toEqual(2); expect(history.history[output1AccName][0]).toBeCloseTo(0); expect(history.history[output1AccName][1]).toBeCloseTo(0); expect(history.history[output2AccName].length).toEqual(2); expect(history.history[output2AccName][0]).toBeCloseTo(0); expect(history.history[output2AccName][1]).toBeCloseTo(0); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); expectTensorsClose(model.getWeights()[2], [0.108621]); expectTensorsClose(model.getWeights()[3], [0.108621]); }); it('1 input, 2 outputs, 1 metric, no validation, no batchesPerEpoch', async () => { // Create a functional model with 2 outputs. const x = tfl.layers.input({shape: [1]}); const output1 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const output2 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const model = tfl.model({inputs: x, outputs: [output1, output2]}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[model.outputNames[0]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[model.outputNames[1]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yShape: {[name: string]: number[]} = {}; yShape[model.outputNames[0]] = [1]; yShape[model.outputNames[1]] = [1]; const dataset = new FakeNumericDataset({ xShape: [1], yShape, batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {epochs: 1}); model.setWeights([ tfc.zeros([1, 1]), tfc.zeros([1]), tfc.zeros([1, 1]), tfc.zeros([1]) ]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, {epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); const output1AccName = model.outputNames[0] + '_acc'; const output1LossName = model.outputNames[0] + '_loss'; const output2AccName = model.outputNames[1] + '_acc'; const output2LossName = model.outputNames[1] + '_loss'; expect(Object.keys(history.history)).toEqual([ 'loss', output1LossName, output2LossName, output1AccName, output2AccName ]); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(1.847297); expect(history.history.loss[1]).toBeCloseTo(1.445986); expect(history.history[output1LossName].length).toEqual(2); expect(history.history[output1LossName][0]).toBeCloseTo(0.923648); expect(history.history[output1LossName][1]).toBeCloseTo(0.722993); expect(history.history[output2LossName].length).toEqual(2); expect(history.history[output2LossName][0]).toBeCloseTo(0.923648); expect(history.history[output2LossName][1]).toBeCloseTo(0.722993); expect(history.history[output1AccName].length).toEqual(2); expect(history.history[output1AccName][0]).toBeCloseTo(0); expect(history.history[output1AccName][1]).toBeCloseTo(0); expect(history.history[output2AccName].length).toEqual(2); expect(history.history[output2AccName][0]).toBeCloseTo(0); expect(history.history[output2AccName][1]).toBeCloseTo(0); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); expectTensorsClose(model.getWeights()[2], [0.108621]); expectTensorsClose(model.getWeights()[3], [0.108621]); }); // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // batch_size = 8 // num_batches = 3 // epochs = 2 // // x = tf.keras.Input(shape=[1], name='x') // // output1 = tf.keras.layers.Dense( // 1, kernel_initializer='zeros')(x) // output2 = tf.keras.layers.Dense( // 1, kernel_initializer='zeros')(x) // // model = tf.keras.Model(inputs=x, outputs=[output1, output2]) // model.compile( // loss = 'mean_squared_error', optimizer = 'sgd', metrics = // ['accuracy']) // model.summary() // output1_name = model.output_names[0] // output2_name = model.output_names[1] // print(output1_name) // print(output2_name) // // xs = np.ones([batch_size * num_batches * epochs, 1]) // ys1 = np.ones([batch_size * num_batches * epochs, 1]) // ys2 = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices( // (xs, {output1_name: ys1, output2_name: ys2})).batch(batch_size) // // val_xs = np.zeros([batch_size, 1]) // val_ys = {output1_name: np.zeros([batch_size, 1]), // output1_name: np.zeros([batch_size, 1])} // // history = model.fit(dataset, // steps_per_epoch=num_batches, // epochs=epochs, // validation_data=[val_xs, val_ys]) // print(history.history) // print(model.get_weights()[0]) // print(model.get_weights()[1]) // print(model.get_weights()[2]) // print(model.get_weights()[3]) // ``` it('1 input, 2 outputs, 1 metric, tensor array validation, ' + 'with batchesPerEpoch', async () => { // Create a functional model with 2 outputs. const x = tfl.layers.input({shape: [1]}); const output1 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const output2 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const model = tfl.model({inputs: x, outputs: [output1, output2]}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const yTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[model.outputNames[0]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[model.outputNames[1]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yShape: {[name: string]: number[]} = {}; yShape[model.outputNames[0]] = [1]; yShape[model.outputNames[1]] = [1]; const dataset = new FakeNumericDataset({ xShape: [1], yShape, batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Validation data. const valXs = tfc.zeros([batchSize, 1]); const valYs: tfc.NamedTensorMap = {}; valYs[model.outputNames[0]] = tfc.zeros([batchSize, 1]); valYs[model.outputNames[1]] = tfc.zeros([batchSize, 1]); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, { batchesPerEpoch, epochs: 1, validationData: [valXs, valYs], validationBatchSize: batchSize }); model.setWeights([ tfc.zeros([1, 1]), tfc.zeros([1]), tfc.zeros([1, 1]), tfc.zeros([1]) ]); const history = await model.fitDataset(dataset, { batchesPerEpoch, epochs, validationData: [valXs, valYs], validationBatchSize: batchSize }); const output1AccName = model.outputNames[0] + '_acc'; const output1LossName = model.outputNames[0] + '_loss'; const output2AccName = model.outputNames[1] + '_acc'; const output2LossName = model.outputNames[1] + '_loss'; const valOutput1AccName = 'val_' + output1AccName; const valOutput1LossName = 'val_' + output1LossName; const valOutput2AccName = 'val_' + output2AccName; const valOutput2LossName = 'val_' + output2LossName; expect(Object.keys(history.history)).toEqual([ 'val_loss', valOutput1LossName, valOutput2LossName, valOutput1AccName, valOutput2AccName, 'loss', output1LossName, output2LossName, output1AccName, output2AccName ]); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(1.847297); expect(history.history.loss[1]).toBeCloseTo(1.445986); expect(history.history[output1LossName].length).toEqual(2); expect(history.history[output1LossName][0]).toBeCloseTo(0.923648); expect(history.history[output1LossName][1]).toBeCloseTo(0.722993); expect(history.history[output2LossName].length).toEqual(2); expect(history.history[output2LossName][0]).toBeCloseTo(0.923648); expect(history.history[output2LossName][1]).toBeCloseTo(0.722993); expect(history.history[output1AccName].length).toEqual(2); expect(history.history[output1AccName][0]).toBeCloseTo(0); expect(history.history[output1AccName][1]).toBeCloseTo(0); expect(history.history[output2AccName].length).toEqual(2); expect(history.history[output2AccName][0]).toBeCloseTo(0); expect(history.history[output2AccName][1]).toBeCloseTo(0); expect(history.history.val_loss.length).toEqual(2); expect(history.history.val_loss[0]).toBeCloseTo(0.003321); expect(history.history.val_loss[1]).toBeCloseTo(0.011798); expect(history.history[valOutput1LossName].length).toEqual(2); expect(history.history[valOutput1LossName][0]).toBeCloseTo(0.006642); expect(history.history[valOutput1LossName][1]).toBeCloseTo(0.023597); expect(history.history[valOutput2LossName].length).toEqual(2); expect(history.history[valOutput2LossName][0]).toBeCloseTo(0.003321); expect(history.history[valOutput2LossName][1]).toBeCloseTo(0.011798); expect(history.history[valOutput1AccName].length).toEqual(2); expect(history.history[valOutput1AccName][0]).toBeCloseTo(0.003321); expect(history.history[valOutput1AccName][1]).toBeCloseTo(0.011798); expect(history.history[valOutput2AccName].length).toEqual(2); expect(history.history[valOutput2AccName][0]).toBeCloseTo(1); expect(history.history[valOutput2AccName][1]).toBeCloseTo(1); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); expectTensorsClose(model.getWeights()[2], [0.108621]); expectTensorsClose(model.getWeights()[3], [0.108621]); }); it('1 input, 2 outputs, 1 metric, tensor array validation, ' + 'no batchesPerEpoch', async () => { // Create a functional model with 2 outputs. const x = tfl.layers.input({shape: [1]}); const output1 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const output2 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const model = tfl.model({inputs: x, outputs: [output1, output2]}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[model.outputNames[0]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[model.outputNames[1]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yShape: {[name: string]: number[]} = {}; yShape[model.outputNames[0]] = [1]; yShape[model.outputNames[1]] = [1]; const dataset = new FakeNumericDataset({ xShape: [1], yShape, batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); // Validation data. const valXs = tfc.zeros([batchSize, 1]); const valYs: tfc.NamedTensorMap = {}; valYs[model.outputNames[0]] = tfc.zeros([batchSize, 1]); valYs[model.outputNames[1]] = tfc.zeros([batchSize, 1]); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, { epochs: 1, validationData: [valXs, valYs], validationBatchSize: batchSize }); model.setWeights([ tfc.zeros([1, 1]), tfc.zeros([1]), tfc.zeros([1, 1]), tfc.zeros([1]) ]); const history = await model.fitDataset(dataset, { epochs, validationData: [valXs, valYs], validationBatchSize: batchSize }); const output1AccName = model.outputNames[0] + '_acc'; const output1LossName = model.outputNames[0] + '_loss'; const output2AccName = model.outputNames[1] + '_acc'; const output2LossName = model.outputNames[1] + '_loss'; const valOutput1AccName = 'val_' + output1AccName; const valOutput1LossName = 'val_' + output1LossName; const valOutput2AccName = 'val_' + output2AccName; const valOutput2LossName = 'val_' + output2LossName; expect(Object.keys(history.history)).toEqual([ 'val_loss', valOutput1LossName, valOutput2LossName, valOutput1AccName, valOutput2AccName, 'loss', output1LossName, output2LossName, output1AccName, output2AccName ]); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(1.847297); expect(history.history.loss[1]).toBeCloseTo(1.445986); expect(history.history[output1LossName].length).toEqual(2); expect(history.history[output1LossName][0]).toBeCloseTo(0.923648); expect(history.history[output1LossName][1]).toBeCloseTo(0.722993); expect(history.history[output2LossName].length).toEqual(2); expect(history.history[output2LossName][0]).toBeCloseTo(0.923648); expect(history.history[output2LossName][1]).toBeCloseTo(0.722993); expect(history.history[output1AccName].length).toEqual(2); expect(history.history[output1AccName][0]).toBeCloseTo(0); expect(history.history[output1AccName][1]).toBeCloseTo(0); expect(history.history[output2AccName].length).toEqual(2); expect(history.history[output2AccName][0]).toBeCloseTo(0); expect(history.history[output2AccName][1]).toBeCloseTo(0); expect(history.history.val_loss.length).toEqual(2); expect(history.history.val_loss[0]).toBeCloseTo(0.003321); expect(history.history.val_loss[1]).toBeCloseTo(0.011798); expect(history.history[valOutput1LossName].length).toEqual(2); expect(history.history[valOutput1LossName][0]).toBeCloseTo(0.006642); expect(history.history[valOutput1LossName][1]).toBeCloseTo(0.023597); expect(history.history[valOutput2LossName].length).toEqual(2); expect(history.history[valOutput2LossName][0]).toBeCloseTo(0.003321); expect(history.history[valOutput2LossName][1]).toBeCloseTo(0.011798); expect(history.history[valOutput1AccName].length).toEqual(2); expect(history.history[valOutput1AccName][0]).toBeCloseTo(0.003321); expect(history.history[valOutput1AccName][1]).toBeCloseTo(0.011798); expect(history.history[valOutput2AccName].length).toEqual(2); expect(history.history[valOutput2AccName][0]).toBeCloseTo(1); expect(history.history[valOutput2AccName][1]).toBeCloseTo(1); expectTensorsClose(model.getWeights()[0], [0.108621]); expectTensorsClose(model.getWeights()[1], [0.108621]); expectTensorsClose(model.getWeights()[2], [0.108621]); expectTensorsClose(model.getWeights()[3], [0.108621]); }); it('2 outputs, 1 missing output in dataset, with batchesPerEpoch', async () => { // Create a functional model with 2 outputs. const x = tfl.layers.input({shape: [1]}); const output1 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const output2 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const model = tfl.model({inputs: x, outputs: [output1, output2]}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const yTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[model.outputNames[0]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yShape: {[name: string]: number[]} = {}; yShape[model.outputNames[0]] = [1]; yShape[model.outputNames[1]] = [1]; const dataset = new FakeNumericDataset({ xShape: [1], yShape, batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); let errorCaught: Error; try { await model.fitDataset(dataset, {batchesPerEpoch, epochs}); } catch (err) { errorCaught = err; } expect(errorCaught.message) .toEqual( 'The feature data generated by the dataset lacks the required ' + `output key '${model.outputNames[1]}'.`); }); it('2 outputs, 1 missing output in dataset, no batchesPerEpoch', async () => { // Create a functional model with 2 outputs. const x = tfl.layers.input({shape: [1]}); const output1 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const output2 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}).apply(x) as tfl.SymbolicTensor; const model = tfl.model({inputs: x, outputs: [output1, output2]}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const yTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[model.outputNames[0]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yShape: {[name: string]: number[]} = {}; yShape[model.outputNames[0]] = [1]; yShape[model.outputNames[1]] = [1]; const dataset = new FakeNumericDataset({ xShape: [1], yShape, batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); let errorCaught: Error; try { await model.fitDataset(dataset, {epochs}); } catch (err) { errorCaught = err; } expect(errorCaught.message) .toEqual( 'The feature data generated by the dataset lacks the required ' + `output key '${model.outputNames[1]}'.`); }); // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // batch_size = 8 // num_batches = 3 // epochs = 2 // // input1 = tf.keras.Input(shape=[1], name='x1') // input2 = tf.keras.Input(shape=[1], name='x2') // concat = tf.keras.layers.concatenate([input1, input2]) // // output1 = tf.keras.layers.Dense( // 1, kernel_initializer='zeros')(concat) // output2 = tf.keras.layers.Dense( // 1, kernel_initializer='zeros')(concat) // // model = tf.keras.Model( // inputs=[input1, input2], // outputs=[output1, output2]) // model.compile( // loss = 'mean_squared_error', optimizer = 'sgd', metrics = // ['accuracy']) // model.summary() // output1_name = model.output_names[0] // output2_name = model.output_names[1] // print(x1) // print(x2) // print(output1_name) // print(output2_name) // // xs1 = np.ones([batch_size * num_batches * epochs, 1]) // xs2 = np.ones([batch_size * num_batches * epochs, 1]) // ys1 = np.ones([batch_size * num_batches * epochs, 1]) // ys2 = np.ones([batch_size * num_batches * epochs, 1]) // dataset = tf.data.Dataset.from_tensor_slices( // ({ // x1: xs1, // x2: xs2 // },{ // output1_name: ys1, // output2_name: ys2 // })).batch(batch_size) // // history = model.fit(dataset, // steps_per_epoch=num_batches, // epochs=epochs) // print(history.history) // print(model.get_weights()[0]) // print(model.get_weights()[1]) // print(model.get_weights()[2]) // print(model.get_weights()[3]) // ``` it('2 inputs, 2 outputs, 1 metric, no validation, with batchesPerEpoch', async () => { // Create a functional model with 2 inputs and 2 outputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const output1 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const output2 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: [output1, output2]}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; // Training data. const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[input2.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[model.outputNames[0]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[model.outputNames[1]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yShape: {[name: string]: number[]} = {}; yShape[model.outputNames[0]] = [1]; yShape[model.outputNames[1]] = [1]; const dataset = new FakeNumericDataset({ xShape: [1], yShape, batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {batchesPerEpoch, epochs: 1}); model.setWeights([ tfc.zeros([2, 1]), tfc.zeros([1]), tfc.zeros([2, 1]), tfc.zeros([1]) ]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, {batchesPerEpoch, epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); const output1AccName = model.outputNames[0] + '_acc'; const output1LossName = model.outputNames[0] + '_loss'; const output2AccName = model.outputNames[1] + '_acc'; const output2LossName = model.outputNames[1] + '_loss'; expect(Object.keys(history.history)).toEqual([ 'loss', output1LossName, output2LossName, output1AccName, output2AccName ]); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(1.776232); expect(history.history.loss[1]).toBeCloseTo(1.225369); expect(history.history[output1LossName].length).toEqual(2); expect(history.history[output1LossName][0]).toBeCloseTo(0.888116); expect(history.history[output1LossName][1]).toBeCloseTo(0.612684); expect(history.history[output2LossName].length).toEqual(2); expect(history.history[output2LossName][0]).toBeCloseTo(0.888116); expect(history.history[output2LossName][1]).toBeCloseTo(0.612684); expect(history.history[output1AccName].length).toEqual(2); expect(history.history[output1AccName][0]).toBeCloseTo(0); expect(history.history[output1AccName][1]).toBeCloseTo(0); expect(history.history[output2AccName].length).toEqual(2); expect(history.history[output2AccName][0]).toBeCloseTo(0); expect(history.history[output2AccName][1]).toBeCloseTo(0); expectTensorsClose(model.getWeights()[0], [0.103376, 0.103376]); expectTensorsClose(model.getWeights()[1], [0.103376]); expectTensorsClose(model.getWeights()[2], [0.103376, 0.103376]); expectTensorsClose(model.getWeights()[3], [0.103376]); }); it('2 inputs, 2 outputs, 1 metric, no validation, no batchesPerEpoch', async () => { // Create a functional model with 2 inputs and 2 outputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const output1 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const output2 = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: [output1, output2]}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; // Training data. const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[input2.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[model.outputNames[0]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[model.outputNames[1]] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yShape: {[name: string]: number[]} = {}; yShape[model.outputNames[0]] = [1]; yShape[model.outputNames[1]] = [1]; const dataset = new FakeNumericDataset({ xShape: [1], yShape, batchSize, numBatches: batchesPerEpoch, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {epochs: 1}); model.setWeights([ tfc.zeros([2, 1]), tfc.zeros([1]), tfc.zeros([2, 1]), tfc.zeros([1]) ]); const numTensors0 = tfc.memory().numTensors; const history = await model.fitDataset(dataset, {epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); const output1AccName = model.outputNames[0] + '_acc'; const output1LossName = model.outputNames[0] + '_loss'; const output2AccName = model.outputNames[1] + '_acc'; const output2LossName = model.outputNames[1] + '_loss'; expect(Object.keys(history.history)).toEqual([ 'loss', output1LossName, output2LossName, output1AccName, output2AccName ]); expect(history.history.loss.length).toEqual(2); expect(history.history.loss[0]).toBeCloseTo(1.776232); expect(history.history.loss[1]).toBeCloseTo(1.225369); expect(history.history[output1LossName].length).toEqual(2); expect(history.history[output1LossName][0]).toBeCloseTo(0.888116); expect(history.history[output1LossName][1]).toBeCloseTo(0.612684); expect(history.history[output2LossName].length).toEqual(2); expect(history.history[output2LossName][0]).toBeCloseTo(0.888116); expect(history.history[output2LossName][1]).toBeCloseTo(0.612684); expect(history.history[output1AccName].length).toEqual(2); expect(history.history[output1AccName][0]).toBeCloseTo(0); expect(history.history[output1AccName][1]).toBeCloseTo(0); expect(history.history[output2AccName].length).toEqual(2); expect(history.history[output2AccName][0]).toBeCloseTo(0); expect(history.history[output2AccName][1]).toBeCloseTo(0); expectTensorsClose(model.getWeights()[0], [0.103376, 0.103376]); expectTensorsClose(model.getWeights()[1], [0.103376]); expectTensorsClose(model.getWeights()[2], [0.103376, 0.103376]); expectTensorsClose(model.getWeights()[3], [0.103376]); }); it('Exhausting iterator with batchesPerEpoch throws warning', async () => { const model = createDenseModel(); model.compile({loss: 'meanSquaredError', optimizer: 'sgd'}); const batchSize = 8; const batchesPerEpoch = 3; const dataset = new FakeNumericDataset( {xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch}); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.fitDataset(dataset, {batchesPerEpoch, epochs: 1}); model.setWeights([tfc.zeros([1, 1]), tfc.zeros([1])]); const warningMessages: string[] = []; spyOn(console, 'warn') .and.callFake((msg: string) => warningMessages.push(msg)); const numTensors0 = tfc.memory().numTensors; const epochs = 3; const history = await model.fitDataset(dataset, {batchesPerEpoch, epochs}); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(Object.keys(history.history)).toEqual(['loss']); // Only the loss value from the first epoch should be logged. // The 2nd and 3rd epochs are cut short because of dataset iterator // exhaustion. expect(history.history.loss.length).toEqual(1); expect(warningMessages.length).toEqual(2); expect(warningMessages[0]) .toMatch(/You provided `batchesPerEpoch` as .* 9 batches/); expect(warningMessages[1]) .toMatch(/You provided `batchesPerEpoch` as .* 9 batches/); }); it('Calling fitDataset() without calling compile() errors', async () => { const model = createDenseModel(); const batchSize = 8; const numBatches = 3; const epochs = 2; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches, }); let errorCaught: Error; try { await model.fitDataset(dataset, {epochs}); } catch (err) { errorCaught = err; } expect(errorCaught.message) .toEqual('The model needs to be compiled before being used.'); }); it('Wrong validationBatches leads to Error', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; // Training dataset. const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs }); // Validation dataset. const valDataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs }); // Do a burn-in call to account for initialization of cached // tensors (for the memory-leak check below). let errorCaught: Error; try { await model.fitDataset(dataset, { batchesPerEpoch, epochs, validationData: valDataset, validationBatches: 0 }); } catch (err) { errorCaught = err; } expect(errorCaught.message) .toMatch(/fitDataset.*dataset-based validation.*not to be provided.*0/); }); it('Calling fitDataset with validationSplit leads to Error', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 3; // Training dataset. const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs }); let errorCaught: Error; try { await model.fitDataset( dataset, // tslint:disable-next-line:no-any {epochs: 1, batchesPerEpoch: 2, validationSplit: 0.25} as any); } catch (err) { errorCaught = err; } expect(errorCaught.message) .toMatch(/.*validationSplit.*not supported.*validationData/); }); class StopAfterNBatches extends tfl.Callback { private readonly batchesToTrain: number; constructor(epochsToTrain: number) { super(); this.batchesToTrain = epochsToTrain; } async onBatchEnd(batch: number, logs?: Logs) { if (batch === this.batchesToTrain - 1) { this.model.stopTraining = true; } } } it('Stop training resets at start of LayersModel.fitDataset()', async () => { const model = createDenseModel(); model.compile({loss: 'meanSquaredError', optimizer: 'sgd'}); const batchSize = 8; const epochs = 2; const batchesPerEpoch = 1; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batchesPerEpoch * epochs, xTensorsFunc, yTensorsFunc }); // Order 2 epochs of training, but the training should stop after only one // epochs due to the callback that orders the training to stop after one // batches. let history = await model.fitDataset( dataset, {batchesPerEpoch, epochs, callbacks: [new StopAfterNBatches(1)]}); expect(history.history.loss.length).toEqual(1); // Running fitDataset again should now run to completion history = await model.fitDataset(dataset, {batchesPerEpoch, epochs}); expect(history.history.loss.length).toEqual(2); }); it('onYield with yieldEvery: auto', async () => { const wait = DEFAULT_YIELD_EVERY_MS; const timeBetweenCalls = [ 0, 1, 1, wait + 1, // Should call. wait + 1, // Should call. 1, 1, ]; let counter = 0; let prevTime = 0; const nowFunc = jasmine.createSpy('now').and.callFake(() => { prevTime += timeBetweenCalls[counter++]; return prevTime; }); let nextFrameCallCount = 0; const nextFrameFunc = jasmine.createSpy('nextFrame').and.callFake(async () => { nextFrameCallCount++; }); const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const epochs = 2; const batchSize = 8; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), ]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), ]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: 3, xTensorsFunc, yTensorsFunc, }); const onYieldEpochIds: number[] = []; const onYieldBatchesIds: number[] = []; const history = await model.fitDataset(dataset, { epochs, callbacks: { nowFunc, nextFrameFunc, onYield: async (epoch, batch, _logs) => { onYieldBatchesIds.push(batch); onYieldEpochIds.push(epoch); } } }); expect(history.history.loss.length).toEqual(epochs); // There are 5 batches in total (1 batch per epoch). We expect next frame // to be called twice, after epoch 1 and after epoch 2. expect(nextFrameCallCount).toBe(2); expect(onYieldEpochIds).toEqual([0, 1]); expect(onYieldBatchesIds).toEqual([2, 0]); }); it('fails when onYield is provided, but yieldEvery is never', async done => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const epochs = 2; const batchSize = 8; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), ]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), ]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: 3, xTensorsFunc, yTensorsFunc, }); try { await model.fitDataset(dataset, { epochs, yieldEvery: 'never', callbacks: {onYield: async (_epoch, _batch, _logs) => {}}, }); done.fail('Model.fit should fail'); } catch { done(); } }); }); // TODO(cais): The corresponding test for predict() and evaluate(). describeMathCPUAndGPU('LayersModel.evaluateDataset', () => { // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // tf.enable_eager_execution() // // batch_size = 8 // num_batches = 3 // // xs = np.ones([batch_size * num_batches, 1]) // ys = np.ones([batch_size * num_batches, 1]) // dataset = tf.data.Dataset.from_tensor_slices((xs, ys)).batch(batch_size) // // model = tf.keras.Sequential() // model.add(tf.keras.layers.Dense( // 1, // input_shape=[1], // kernel_initializer='zeros')) // model.compile(loss='mean_squared_error', // optimizer=tf.train.GradientDescentOptimizer(0.01)) // // out = model.evaluate(dataset, steps=3, verbose=0) // print(out) // ``` it('1 input, 1 output, no metric, with batches specified', async () => { const model = createDenseModel(); model.compile({loss: 'meanSquaredError', optimizer: 'sgd'}); const batchSize = 8; const batches = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batches, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). tfc.dispose( await model.evaluateDataset(dataset, {batches}) as tfc.Scalar[]); const numTensors0 = tfc.memory().numTensors; const evalOut = await model.evaluateDataset(dataset, {batches}) as tfc.Scalar; const expectedLoss = tfc.scalar(1.0); expectTensorsClose(evalOut, expectedLoss); tfc.dispose(evalOut); tfc.dispose(expectedLoss); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); }); it('1 input, 1 output, no metric, no batches specified', async () => { const model = createDenseModel(); model.compile({loss: 'meanSquaredError', optimizer: 'sgd'}); const batchSize = 8; const batches = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batches, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). tfc.dispose(await model.evaluateDataset(dataset, {}) as tfc.Scalar[]); const numTensors0 = tfc.memory().numTensors; const evalOut = await model.evaluateDataset(dataset, {}) as tfc.Scalar; const expectedLoss = tfc.scalar(1.0); expectTensorsClose(evalOut, expectedLoss); tfc.dispose(evalOut); tfc.dispose(expectedLoss); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); }); // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // tf.enable_eager_execution() // // batch_size = 8 // num_batches = 3 // // xs = np.ones([batch_size * num_batches, 1]) // ys = np.ones([batch_size * num_batches, 1]) // dataset = tf.data.Dataset.from_tensor_slices((xs, ys)).batch(batch_size) // // model = tf.keras.Sequential() // model.add(tf.keras.layers.Dense( // 1, // input_shape=[1], // kernel_initializer='zeros')) // model.compile(loss='mean_squared_error', // optimizer=tf.train.GradientDescentOptimizer(0.01), // metrics=['accuracy']) // // out = model.evaluate(dataset, steps=3, verbose=0) // print(out) // ``` it('1 input, 1 output, 1 metric, with batches specified', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['acc']}); const batchSize = 8; const batches = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batches, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). tfc.dispose( await model.evaluateDataset(dataset, {batches}) as tfc.Scalar[]); const numTensors0 = tfc.memory().numTensors; const evalOut = await model.evaluateDataset(dataset, {batches}) as tfc.Scalar[]; expect(evalOut.length).toEqual(2); const expectedLoss = tfc.scalar(1.0); const expectedAcc = tfc.scalar(0.0); expectTensorsClose(evalOut[0], expectedLoss); expectTensorsClose(evalOut[1], expectedAcc); tfc.dispose(evalOut); tfc.dispose([expectedLoss, expectedAcc]); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); }); it('1 input, 1 output, 1 metric, no batches specified', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['acc']}); const batchSize = 8; const batches = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batches, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). tfc.dispose(await model.evaluateDataset(dataset, {}) as tfc.Scalar[]); const numTensors0 = tfc.memory().numTensors; const evalOut = await model.evaluateDataset(dataset, {}) as tfc.Scalar[]; expect(evalOut.length).toEqual(2); const expectedLoss = tfc.scalar(1.0); const expectedAcc = tfc.scalar(0.0); expectTensorsClose(evalOut[0], expectedLoss); expectTensorsClose(evalOut[1], expectedAcc); tfc.dispose(evalOut); tfc.dispose([expectedLoss, expectedAcc]); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); }); it('1 input, 1 output, 1 metric, no batches, only 1 arg', async () => { const model = createDenseModel(); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['acc']}); const batchSize = 8; const batches = 3; const xTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batches, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). Use 1-arg call. tfc.dispose(await model.evaluateDataset(dataset) as tfc.Scalar[]); const numTensors0 = tfc.memory().numTensors; // Use 1-arg call, omitting the config object. const evalOut = await model.evaluateDataset(dataset) as tfc.Scalar[]; expect(evalOut.length).toEqual(2); const expectedLoss = tfc.scalar(1.0); const expectedAcc = tfc.scalar(0.0); expectTensorsClose(evalOut[0], expectedLoss); expectTensorsClose(evalOut[1], expectedAcc); tfc.dispose(evalOut); tfc.dispose([expectedLoss, expectedAcc]); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); }); it('1 input, 1 output, iterator exhaustion with batches', async () => { const model = createDenseModel(); model.compile({loss: 'meanSquaredError', optimizer: 'sgd'}); const batchSize = 8; const batches = 3; const dataset = new FakeNumericDataset( {xShape: [1], yShape: [1], batchSize, numBatches: batches}); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). tfc.dispose( await model.evaluateDataset(dataset, {batches}) as tfc.Scalar[]); const warningMessages: string[] = []; spyOn(console, 'warn') .and.callFake((msg: string) => warningMessages.push(msg)); const numTensors0 = tfc.memory().numTensors; tfc.dispose(await model.evaluateDataset(dataset, {batches: batches + 2})); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); expect(warningMessages.length).toEqual(1); expect(warningMessages[0]) .toMatch( /dataset iterator ran out of data during evaluate.* 5 batches/); }); // Reference Python tf.keras code: // // ```py // import numpy as np // import tensorflow as tf // // tf.enable_eager_execution() // // batch_size = 8 // num_batches = 3 // // xs1 = np.ones([batch_size * num_batches, 1]) // xs2 = np.ones([batch_size * num_batches, 1]) // ys = np.ones([batch_size * num_batches, 1]) // dataset = tf.data.Dataset.from_tensor_slices( // ({'input1': xs1, 'input2': xs2}, ys)).batch(batch_size) // // input1 = tf.keras.Input(shape=[1], name='input1') // input2 = tf.keras.Input(shape=[1], name='input2') // concat = tf.keras.layers.concatenate([input1, input2]) // output = tf.keras.layers.Dense( // 1, // input_shape=[1], // kernel_initializer='zeros').apply(concat) // model = tf.keras.Model(inputs=[input1, input2], outputs=output) // model.compile(loss='mean_squared_error', // optimizer=tf.train.GradientDescentOptimizer(0.01), // metrics=['accuracy']) // // out = model.evaluate(dataset, steps=3, verbose=0) // print(out) // ``` it('2 inputs, 1 output, 1 metric, no validation, with batches specified', async () => { // Create a functional model with 2 inputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const y = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: y}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const batches = 3; const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[input2.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batches, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.evaluateDataset(dataset, {batches}); model.setWeights([tfc.zeros([2, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const evalOut = await model.evaluateDataset(dataset, {batches}) as tfc.Scalar[]; const expectedLoss = tfc.scalar(1.0); const expectedAcc = tfc.scalar(0.0); expectTensorsClose(evalOut[0], expectedLoss); expectTensorsClose(evalOut[1], expectedAcc); tfc.dispose(evalOut); tfc.dispose([expectedLoss, expectedAcc]); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); }); it('2 inputs, 1 output, 1 metric, no validation, no batches specified', async () => { // Create a functional model with 2 inputs. const input1 = tfl.layers.input({shape: [1]}); const input2 = tfl.layers.input({shape: [1]}); const concat = tfl.layers.concatenate().apply([input1, input2]); const y = tfl.layers.dense({units: 1, kernelInitializer: 'zeros'}) .apply(concat) as tfl.SymbolicTensor; const model = tfl.model({inputs: [input1, input2], outputs: y}); model.compile( {loss: 'meanSquaredError', optimizer: 'sgd', metrics: ['accuracy']}); const batchSize = 8; const batches = 3; const xTensorsFunc = () => { const output: {[name: string]: tfc.Tensor[]} = {}; output[input1.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; output[input2.name] = [ tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]) ]; return output; }; const yTensorsFunc = () => [tfc.ones([batchSize, 1]), tfc.ones([batchSize, 1]), tfc.ones([ batchSize, 1 ])]; const dataset = new FakeNumericDataset({ xShape: [1], yShape: [1], batchSize, numBatches: batches, xTensorsFunc, yTensorsFunc }); // Do a burn-in call to account for initialization of cached tensors (for // the memory-leak check below). await model.evaluateDataset(dataset, {}); model.setWeights([tfc.zeros([2, 1]), tfc.zeros([1])]); const numTensors0 = tfc.memory().numTensors; const evalOut = await model.evaluateDataset(dataset, {}) as tfc.Scalar[]; const expectedLoss = tfc.scalar(1.0); const expectedAcc = tfc.scalar(0.0); expectTensorsClose(evalOut[0], expectedLoss); expectTensorsClose(evalOut[1], expectedAcc); tfc.dispose(evalOut); tfc.dispose([expectedLoss, expectedAcc]); const numTensors1 = tfc.memory().numTensors; expect(numTensors1).toEqual(numTensors0); }); });
the_stack
import { Ripemd160, Secp256k1, Sha256, Sha512 } from '../crypto/crypto'; import { TransactionContextCommon } from '../transaction/transaction-types'; import { AuthenticationVirtualMachine } from '../vm/virtual-machine'; import { AuthenticationProgramTransactionContextCommon } from '../vm/vm-types'; import { CompilationResult, CompilationResultError, } from './language/language-types'; import { AuthenticationTemplateScenario, AuthenticationTemplateVariable, } from './template-types'; export interface CompilerOperationDebug { /** * An additional, complex property which may be returned by custom compiler * operations. For use in extending the compiler to support additional return * information like `CompilerOperationSuccessSignature`. */ debug?: unknown; } /** * A non-recoverable error in a compiler operation. This is any error which * cannot be resolved by simply providing a missing variable. */ export interface CompilerOperationErrorFatal extends CompilerOperationDebug { status: 'error'; error: string; } /** * A recoverable error in a compiler operation. This occurs when a required * variable was not provided. */ export interface CompilerOperationErrorRecoverable extends CompilerOperationErrorFatal { /** * The full identifier (including any compilation operations) of the variable * missing from compilation, e.g. `my_key.signature.all_outputs` or * `my_key.public_key`. */ recoverable: true; } /** * An unsuccessful compiler operation result. */ export type CompilerOperationError = | CompilerOperationErrorFatal | CompilerOperationErrorRecoverable; /** * A successful compiler operation result. */ export type CompilerOperationSuccess = | CompilerOperationSuccessGeneric | CompilerOperationSuccessSignatureType; export interface CompilerOperationSuccessGeneric extends CompilerOperationDebug { status: 'success'; bytecode: Uint8Array; } /** * A successful signature-generation compiler operation. This provides slightly * more debugging information than `CompilerOperationSuccessGeneric`. The * signing serialization or data message which was hashed to produce the * to-be-signed message is also provided in the result. */ export type CompilerOperationSuccessSignatureType = | CompilerOperationSuccessSignature | CompilerOperationSuccessDataSignature; /** * The result of a successful `signature` compiler operation. */ export interface CompilerOperationSuccessSignature extends CompilerOperationSuccessGeneric { signature: { /** * The transaction signing serialization signed by a signature. This signing * serialization is hashed twice with `sha256`, and the digest is signed. */ serialization: Uint8Array; }; } /** * The result of a successful `data_signature` compiler operation. */ export interface CompilerOperationSuccessDataSignature extends CompilerOperationSuccessGeneric { signature: { /** * The raw message signed by a data signature. This message is hashed once * with `sha256`, and the digest is signed. */ message: Uint8Array; }; } /** * An unsuccessful compiler operation result which should be skipped by the * compiler. See `attemptCompilerOperations` for details. */ export interface CompilerOperationSkip { status: 'skip'; } export type CompilerOperationResult< CanBeSkipped extends boolean = false > = CanBeSkipped extends true ? CompilerOperationError | CompilerOperationSuccess | CompilerOperationSkip : CompilerOperationError | CompilerOperationSuccess; /** * A compiler operation method which accepts the identifier being evaluated, the * compilation data, and the compilation environment, and returns a * `CompilerOperationResult`. * * @typeParam TransactionContext - the type of the `TransactionContext` in * `CompilationData<TransactionContext>` expected by this operation * @typeParam CanBeSkipped - if true, this operation may return * `CompilerOperationSkip` to indicate that it cannot be applied and should be * skipped * @typeParam Data - the type of the `CompilationData` expected by this * operation * @typeParam Environment - the type of the `CompilationEnvironment` expected by * this operation * @param identifier - The full identifier used to describe this operation, e.g. * `owner.signature.all_outputs`. * @param data - The `CompilationData` provided to the compiler * @param environment - The `CompilationEnvironment` provided to the compiler */ export type CompilerOperation< TransactionContext = unknown, CanBeSkipped extends boolean = false, Data extends CompilationData<TransactionContext> = CompilationData< TransactionContext >, Environment extends AnyCompilationEnvironment< TransactionContext > = CompilationEnvironment<TransactionContext> > = ( identifier: string, data: Data, environment: Environment ) => CompilerOperationResult<CanBeSkipped>; export type CompilerOperationsKeysCommon = 'public_key' | 'signature'; /** * Valid identifiers for full transaction signing serialization algorithms. Each * full serialization is double-sha256 hashed to produce the digest which is * signed. */ export type CompilerOperationsSigningSerializationFull = | 'full_all_outputs' | 'full_all_outputs_single_input' | 'full_corresponding_output' | 'full_corresponding_output_single_input' | 'full_no_outputs' | 'full_no_outputs_single_input'; /** * Valid identifiers for components of transaction signing serializations. * Components are combined in various orders to produce each of the valid * "full" signing serializations. */ export type CompilerOperationsSigningSerializationComponent = | 'version' | 'transaction_outpoints' | 'transaction_outpoints_hash' | 'transaction_sequence_numbers' | 'transaction_sequence_numbers_hash' | 'outpoint_transaction_hash' | 'outpoint_index' | 'covered_bytecode_length' | 'covered_bytecode' | 'output_value' | 'sequence_number' | 'corresponding_output' | 'corresponding_output_hash' | 'transaction_outputs' | 'transaction_outputs_hash' | 'locktime'; /** * Valid identifiers describing the various full and partial signing * serializations available to the compiler. */ export type CompilerOperationsSigningSerializationCommon = | CompilerOperationsSigningSerializationComponent | CompilerOperationsSigningSerializationFull; /** * The full context required to compile a given Bitauth Template script – * everything required for the compiler to understand the CompilationData and * generate the compiled bytecode (targeting a specific * `AuthenticationVirtualMachine`). * * @remarks * A `CompilationEnvironment` must include a subset of the script's * `AuthenticationTemplate` – all the variables and scripts referenced * (including children of children) by the script in question. * * The context must also include an object mapping of opcode identifiers to the * bytecode they generate. * * If keys are used, an implementation of `sha256` and `secp256k1` is * required. If the script requires evaluations during compilation, the * evaluating `AuthenticationVirtualMachine` must also be included. * * @typeParam TransactionContext - additional data available to compiler * operations, e.g. transaction signing serialization components * @typeParam CompilerKeyOperations - a list of valid compiler operations for * `Key` and `HdKey` variables, e.g. `'public_key' | 'signature'`, or `false` if * only a single compiler operation is used for all instances * @typeParam CompilerSigningSerializationOperations - a list of valid compiler * operations for `Key` and `HdKey` variables, e.g. * `"version" | "transaction_outpoints" | ...`, or `false` if only a single * compiler operation is used for all `signing_serialization` instances * @typeParam CompilerAddressDataOperations - a list of valid compiler * operations for `AddressData` variables or `false` if only a single compiler * operation is used for all `AddressData` instances (default: `false`) * @typeParam CompilerWalletDataOperations - a list of valid compiler * operations for `WalletData` variables or `false` if only a single compiler * operation is used for all `WalletData` instances (default: `false`) * @typeParam CompilerCurrentBlockHeightOperations - a list of valid compiler * operations for `current_block_height` variables or `false` if only a single * compiler operation is used for all instances (default: `false`) * @typeParam CompilerCurrentBlockTimeOperations - a list of valid compiler * operations for `current_block_time` variables or `false` if only a single * compiler operation is used for all instances (default: `false`) */ export interface CompilationEnvironment< TransactionContext = unknown, CompilerKeyOperations extends string | false = CompilerOperationsKeysCommon, CompilerSigningSerializationOperations extends | string | false = CompilerOperationsSigningSerializationCommon, CompilerAddressDataOperations extends string | false = false, CompilerWalletDataOperations extends string | false = false, CompilerCurrentBlockHeightOperations extends string | false = false, CompilerCurrentBlockTimeOperations extends string | false = false > { /** * A method which accepts the compiled bytecode contents of a BTL evaluation * and produces the equivalent `AuthenticationProgram` to be evaluated by the * VM. This method is used internally to compute BTL evaluations. See * `createAuthenticationProgramEvaluationCommon` for details. */ createAuthenticationProgram?: ( evaluationBytecode: Uint8Array // eslint-disable-next-line @typescript-eslint/no-explicit-any ) => any; /** * An object mapping template variable identifiers to the entity identifiers * responsible for them. This is required for `HdKey` support, as each entity * uses a single HD private key (provided in `hdKeys.hdPrivateKeys`) or HD * public key (provided in `hdKeys.hdPublicKeys`) per compilation, and each * `HdKey` variable is derived from this key. * * To avoid compilation errors, this object must contain all `HdKey` variables * referenced by the script being compiled (including in child scripts). To * enable support for error handling like `extractMissingVariables`, it's * recommended that all variables be provided here. */ // eslint-disable-next-line functional/no-mixed-type entityOwnership?: { [variableId: string]: string; }; /** * An object mapping the script identifiers of locking scripts to their * locking script type, either `standard` or `p2sh`. * * This is used to transform compilation results into the proper structure for * P2SH locking and unlocking scripts. * * When compiling locking scripts of type `p2sh`, the result will be placed in * a P2SH "redeemScript" format: * `OP_HASH160 <$(<result> OP_HASH160)> OP_EQUAL` * * When compiling unlocking scripts which unlock locking scripts of type * `p2sh`, the result will be transformed into the P2SH unlocking format: * `result <locking_script>` (where `locking_script` is the compiled bytecode * of the locking script, without the "redeemScript" transformation.) * * By default, all scripts are assumed to have the type `standard`. */ lockingScriptTypes?: { [lockingScriptId: string]: 'p2sh' | 'standard'; }; /** * An object mapping opcode identifiers to the bytecode they generate. */ opcodes?: { [opcodeIdentifier: string]: Uint8Array; }; /** * An object specifying the operations made available by this compilation * environment for each variable type. For example, keys typically support * public key derivation (`.public_key`) and several signature types. * * Compiler operations can be specified as a single operation for all * instances of a variable type (as is the default for `AddressData` or * `WalletData`), or they can be specified as an object, where each key is a * valid operation name (as is the default for `Key` and `HdKey`). */ operations?: { hdKey?: CompilerKeyOperations extends string ? { [operationId in CompilerKeyOperations]?: CompilerOperation< TransactionContext >; } : CompilerOperation<TransactionContext>; key?: CompilerKeyOperations extends string ? { [operationId in CompilerKeyOperations]?: CompilerOperation< TransactionContext >; } : CompilerOperation<TransactionContext>; addressData?: CompilerAddressDataOperations extends string ? { [operationId in CompilerAddressDataOperations]?: CompilerOperation< TransactionContext >; } : CompilerOperation<TransactionContext>; walletData?: CompilerWalletDataOperations extends string ? { [operationId in CompilerWalletDataOperations]?: CompilerOperation< TransactionContext >; } : CompilerOperation<TransactionContext>; currentBlockHeight?: CompilerCurrentBlockHeightOperations extends string ? { [operationId in CompilerCurrentBlockHeightOperations]?: CompilerOperation< TransactionContext >; } : CompilerOperation<TransactionContext>; currentBlockTime?: CompilerCurrentBlockTimeOperations extends string ? { [operationId in CompilerCurrentBlockTimeOperations]?: CompilerOperation< TransactionContext >; } : CompilerOperation<TransactionContext>; signingSerialization?: CompilerSigningSerializationOperations extends string ? { [operationId in CompilerSigningSerializationOperations]?: CompilerOperation< TransactionContext >; } : CompilerOperation<TransactionContext>; }; /** * An implementation of ripemd160 is required for any scripts which include * `HdKey`s. This can be instantiated with `instantiateRipemd160`. */ ripemd160?: { hash: Ripemd160['hash'] }; /** * An object mapping scenario identifiers to the * `AuthenticationTemplateScenario`s they represent. */ scenarios?: { [scriptId: string]: AuthenticationTemplateScenario; }; /** * An object mapping script identifiers to the text of script in Bitauth * Templating Language. * * To avoid compilation errors, this object must contain all scripts * referenced by the script being compiled (including children of children). */ scripts: { [scriptId: string]: string; }; /** * An implementation of secp256k1 is required for any scripts which include * signatures. This can be instantiated with `instantiateSecp256k1`. */ secp256k1?: { addTweakPrivateKey: ( privateKey: Uint8Array, tweakValue: Uint8Array ) => Uint8Array; addTweakPublicKeyCompressed: ( publicKey: Uint8Array, tweakValue: Uint8Array ) => Uint8Array; derivePublicKeyCompressed: Secp256k1['derivePublicKeyCompressed']; signMessageHashSchnorr: Secp256k1['signMessageHashSchnorr']; signMessageHashDER: Secp256k1['signMessageHashDER']; }; /** * An implementation of sha256 is required for any scripts which include * signatures. This can be instantiated with `instantiateSha256`. */ sha256?: { hash: Sha256['hash'] }; /** * An implementation of sha512 is required for any scripts which include * `HdKey`s. This can be instantiated with `instantiateSha512`. */ sha512?: { hash: Sha512['hash'] }; /** * Only for use when recursively calling `compileScript` (e.g. in compiler * operations). * * The "breadcrumb" path of script IDs currently being compiled, including the * current script. (E.g. `["grandparentId", "parentId", "scriptId"]`) * * BTL identifier resolution must be acyclic. To prevent an infinite loop, * `IdentifierResolutionFunction`s must abort resolution if they encounter * their own `id` while resolving another identifier. Likewise, child scripts * being resolved by a parent script may not reference any script which is * already in the process of being resolved. */ sourceScriptIds?: string[]; /** * An object mapping the identifiers of unlocking scripts to the identifiers * of the locking scripts they unlock. This is used to identify the * `coveredBytecode` used in signing serializations, and it is required for * all signature operations and many signing serialization operations. */ unlockingScripts?: { [unlockingScriptId: string]: string; }; /** * An object mapping the identifiers of unlocking scripts to their * `timeLockType`. * * The `timestamp` type indicates that the transaction's locktime is provided * as a UNIX timestamp (the `locktime` value is greater than or equal to * `500000000`). * * The `height` type indicates that the transaction's locktime is provided as * a block height (the `locktime` value is less than `500000000`). * * See `AuthenticationTemplateScript.timeLockType` for details. */ unlockingScriptTimeLockTypes?: { [unlockingScriptId: string]: 'timestamp' | 'height'; }; /** * An object mapping template variable identifiers to the * `AuthenticationTemplateVariable` describing them. * * To avoid compilation errors, this object must contain all variables * referenced by the script being compiled (including in child scripts). */ variables?: { [variableId: string]: AuthenticationTemplateVariable; }; /** * The AuthenticationVirtualMachine on which BTL `evaluation` results will be * computed. */ // eslint-disable-next-line @typescript-eslint/no-explicit-any vm?: AuthenticationVirtualMachine<any, any>; } /** * Data required at compilation time to generate the bytecode for a particular * Bitauth Template script. */ export interface CompilationData< TransactionContext = TransactionContextCommon > { /** * A map of full identifiers to pre-computed bytecode for this compilation. * * This is always used to provide bytecode for `AddressData` and `WalletData`, * and it can also be used to provide public keys and signatures which have * been pre-computed by other entities (e.g. when computing these would * require access to private keys held by another entities). * * The provided `fullIdentifier` should match the complete identifier for * each item, e.g. `some_wallet_data`, `variable_id.public_key`, or * `variable_id.signature.all_outputs`. * * To provide `AddressData` or `WalletData` from advanced user interfaces, * consider parsing input with `compileBtl`. * * @remarks * It is security-critical that only identifiers provided by the entities * expected to provide them are included here. For example: * * 1. When generating a `lockingBytecode` for a 2-of-2 wallet, a * malicious entity could provide a pre-computed value for `us.public_key` * which is equal to `them.public_key` such that the resulting * `lockingBytecode` is entirely controlled by that entity. * * 2. When generating an `unlockingBytecode` which includes a data signature, * if a malicious entity can provide a pre-computed value for identifiers * present in the message, the malicious entity can trick the compiling entity * into signing an unintended message, e.g. creating a false attestation or * releasing funds from an unrelated wallet. (This can be partially mitigated * by avoiding key reuse.) * * To safely include identifiers from external entities, the compilation must * first be evaluated only with trusted information (variables owned by or * previously validated by the compiling entity). On unsuccessful * compilations, missing variables can be extracted with * `extractMissingVariables`, and each missing variable should be filled only * by bytecode values provided by entities from which they were expected. */ bytecode?: { [fullIdentifier: string]: Uint8Array; }; /** * The current block height at address creation time. */ currentBlockHeight?: number; /** * The current MTP block time as a UNIX timestamp at address creation time. * * Note, this is never a current timestamp, but rather the median timestamp of * the last 11 blocks. It is therefore approximately one hour in the past. * * Every block has a precise MTP block time, much like a block height. See * BIP113 for details. */ currentBlockTime?: number; /** * An object describing the settings used for `HdKey` variables in this * compilation. */ hdKeys?: { /** * The current address index to be used for this compilation. The * `addressIndex` gets added to each `HdKey`s `addressOffset` to calculate * the dynamic index (`i`) used in each `privateDerivationPath` or * `publicDerivationPath`. * * This is required for any compiler operation which requires derivation. * Typically, the value is incremented by one for each address in a wallet. */ addressIndex?: number; /** * A map of entity IDs to HD public keys. These HD public keys are used to * derive public keys for each `HdKey` variable assigned to that entity (as * specified in `CompilationEnvironment.entityOwnership`) according to its * `publicDerivationPath`. * * HD public keys may be encoded for either mainnet or testnet (the network * information is ignored). * * If both an HD private key (in `hdPrivateKeys`) and HD public key (in * `hdPublicKeys`) are provided for the same entity in the same compilation * (not recommended), the HD private key is used. */ hdPublicKeys?: { [entityId: string]: string; }; /** * A map of entity IDs to master HD private keys. These master HD private * keys are used to derive each `HdKey` variable assigned to that entity (as * specified in `CompilationEnvironment.entityOwnership`) according to its * `privateDerivationPath`. * * HD private keys may be encoded for either mainnet or testnet (the network * information is ignored). * * If both an HD private key (in `hdPrivateKeys`) and HD public key (in * `hdPublicKeys`) are provided for the same entity in the same compilation * (not recommended), only the HD private key is used. */ hdPrivateKeys?: { [entityId: string]: string; }; }; /** * An object describing the settings used for `Key` variables in this * compilation. */ keys?: { /** * A map of `Key` variable IDs to their private keys for this compilation. */ privateKeys?: { [variableId: string]: Uint8Array; }; }; /** * The `TransactionContext` expected by this particular compiler for any * operations used in the compilation. */ transactionContext?: TransactionContext; } /** * Any compilation environment, where each data type may use either a single or * multiple operations. */ export type AnyCompilationEnvironment< TransactionContext > = CompilationEnvironment< TransactionContext, string | false, string | false, string | false, string | false, string | false, string | false >; /** * Any compilation environment where the type of the `operations` value is * irrelevant. */ export type AnyCompilationEnvironmentIgnoreOperations< TransactionContext = TransactionContextCommon > = Omit<AnyCompilationEnvironment<TransactionContext>, 'operations'>; export type BytecodeGenerationResult<ProgramState> = | { bytecode: Uint8Array; success: true; } | CompilationResultError<ProgramState>; /** * A fully-generated authentication template scenario. Useful for estimating * transactions and testing of authentication templates. See * `AuthenticationTemplateScenario` for details. */ export interface Scenario { data: CompilationData; program: AuthenticationProgramTransactionContextCommon; } /** * A `Compiler` is a wrapper around a specific `CompilationEnvironment` which * exposes a purely-functional interface and allows for stronger type checking. */ export interface Compiler< TransactionContext, CompilationEnvironment, ProgramState > { environment: CompilationEnvironment; /** * Generate the bytecode for the given script and compilation data. * * @param script - the identifer of the script to compile * @param data - the compilation data required to compile this script * @param debug - enable compilation debugging information (default: `false`) */ // eslint-disable-next-line functional/no-mixed-type generateBytecode: <Debug extends boolean>( scriptId: string, data: CompilationData<TransactionContext>, debug?: Debug ) => Debug extends true ? CompilationResult<ProgramState> : BytecodeGenerationResult<ProgramState>; /** * Generate the compilation data for a scenario specified in this compilation * environment. Returns either the full `CompilationData` for the selected * scenario or an error message (as a `string`). * * Note, generated compilation data always uses a `transactionContext` of type * `TransactionContextCommon`. * * @param scenario - the identifer of the scenario to generate */ generateScenario: ({ scenarioId, unlockingScriptId, }: { scenarioId?: string; unlockingScriptId?: string; }) => Scenario | string; }
the_stack