text
stringlengths
2.5k
6.39M
kind
stringclasses
3 values
import { join, sep } from 'path'; import { workspace, ExtensionContext, commands, window, Uri, RelativePattern } from 'vscode'; import { promises } from 'fs'; import { reduce } from 'rxjs/operators'; import { findFiles } from './lib/FileHelper'; import { getOrderedCartridges } from './extensionScriptServer'; import { parse } from './server/scriptServer/propertiesParser'; import { getLineOffsets, positionAt } from './server/getLineOffsets'; import { ICartridgeProperties, IProperty } from './server/scriptServer/serverUtils'; function notEmpty<TValue>(value: TValue | null | undefined): value is TValue { return value !== null && value !== undefined; } export function registerImportExportCSV(context: ExtensionContext) { context.subscriptions.push(commands.registerCommand('extension.prophet.command.export.properties.to.csv', async () => { const outputPath = await window.showSaveDialog({ filters: { 'CSV': ['csv'] }, saveLabel: 'Export properties to cvs file...' }); if (outputPath && workspace.workspaceFolders && workspace.workspaceFolders.length) { const preferences = (await Promise.all(workspace.workspaceFolders.filter(wrk => wrk.uri.scheme === 'file').map(wrk => findFiles({ base: wrk.uri.fsPath, pattern: '**/preferences.xml' }).pipe(reduce((acc, val) => { return acc.concat(val); }, [] as Uri[])).toPromise()))).filter(Boolean); const preferencesFlat = ([] as Uri[]).concat(...preferences); const sitesLocales = (await Promise.all(preferencesFlat.map(async (preferenceUri) => { const fileContent = (await workspace.fs.readFile(preferenceUri)).toString(); const matches = (/= ?"SiteLocales" ?>(.+?)<\/ ?preference>/g).exec(fileContent); if (matches && matches[1]) { return matches[1].split(':'); } }))); const sitesLocalesUniq = new Set<string>(); sitesLocales.forEach(locales => { if (locales) { locales.forEach(locale => { sitesLocalesUniq.add(locale); sitesLocalesUniq.add(locale.split('_')[0]); }); } }); const sitesLocalesUniqSorted = Array.from(sitesLocalesUniq).sort((a, b) => a.length - b.length); const cartridges = await getOrderedCartridges(workspace.workspaceFolders); if (cartridges && cartridges.length) { const orderedCartridgesWithProperties = (await Promise.all(cartridges.map(async (cartridge) => { if (cartridge.fsPath) { const files = await findFiles(new RelativePattern(cartridge.fsPath, 'cartridge/templates/resources/*.properties')) .pipe(reduce((acc, val) => { return acc.concat(val); }, [] as Uri[])).toPromise(); if (files.length) { return { name: cartridge.name, fsPath: Uri.file(cartridge.fsPath).toString(), files: files.map(file => ({ name: file.fsPath.split(sep).join('/').split('/cartridge/templates/resources/').pop()?.replace('.properties', ''), fsPath: Uri.file(file.fsPath).toString() })) }; } } }))).filter(Boolean); const orderedCartridgesWithPropertiesFiltered = orderedCartridgesWithProperties.filter(notEmpty); const uniqProperties = new Set<string>(); orderedCartridgesWithPropertiesFiltered.forEach(cartridge => { if (cartridge) { cartridge.files.forEach(file => { if (file.name) { const sections = file.name.split('_'); if (sections.length && sections[0]) { uniqProperties.add(sections[0]); } } }); } }); const cartridgesProperties = await Promise.all(orderedCartridgesWithPropertiesFiltered.map(async (cartridge) => { const cartridgeControllers: ICartridgeProperties = { name: cartridge.name, fsPath: cartridge.fsPath, properties: new Map() }; for (const file of cartridge.files) { if (file.name) { // ignore locale specific translations, yet try { const fileName = Uri.parse(file.fsPath).fsPath; const fileContent = await promises.readFile(fileName, 'utf8'); if (fileContent) { const records = parse(fileContent); const property: IProperty = { fsPath: file.fsPath, name: file.name, linesCount: getLineOffsets(fileContent).length, records: new Map() }; records.forEach(record => { property.records.set(record.recordName, { value: record.value, startPosition: positionAt(record.startPos, fileContent), endPosition: positionAt(record.endPos, fileContent) }); }); cartridgeControllers.properties.set(file.name, property); } } catch (e) { console.error('Error parse properties file: \n' + JSON.stringify(e, null, ' ')); } } } return cartridgeControllers; })); const csvToWrite = [ ['PropertyFile', 'Key', 'default', ...sitesLocalesUniqSorted] ]; function getAllKeys(file: string) { const keys = new Set<string>(); cartridgesProperties.forEach(cartridgesProperty => { cartridgesProperty.properties.forEach((v, key) => { if (['', ...sitesLocalesUniqSorted].some(k => v.name === (k ? file + '_' + k : file))) { v.records.forEach((_, key) => { keys.add(key); }); }; }); }); return Array.from(keys); } function getValueForKeyInLocale(file: string, key: string, locale: string) { const loc = locale ? file + '_' + locale : file; for (const cartridgesProperty of cartridgesProperties) { for (const [name, property] of cartridgesProperty.properties) { if (name === loc) { for (const [recordName, record] of property.records) { if (recordName === key) { return record.value; } } } } } return ''; } for (const uniqProperty of uniqProperties) { const keysOf = getAllKeys(uniqProperty); for (const keyOf of keysOf) { const recordToSave = [ uniqProperty, keyOf, getValueForKeyInLocale(uniqProperty, keyOf, ''), ...(sitesLocalesUniqSorted.map(locale => getValueForKeyInLocale(uniqProperty, keyOf, locale))) ]; csvToWrite.push(recordToSave); } } function needsQuote(str: string): boolean { return str.includes(',') || str.includes('\n') || str.includes('"'); } function quoteField(field: string): string { return `"${field.replace(/"/g, '""')}"`; } var enc = new TextEncoder(); await workspace.fs.writeFile(outputPath, enc.encode(csvToWrite.map(r => { return r.map(col => { return needsQuote(col) ? quoteField(col) : col; }).join(','); }).join('\n'))); window.showInformationMessage('Data successfully exported to csv'); } } })); context.subscriptions.push(commands.registerCommand('extension.prophet.command.import.csv.to.properties', async () => { const inputPath = await window.showOpenDialog({ canSelectFiles: true, canSelectFolders: false, canSelectMany: false, filters: { 'CSV': ['csv'] }, openLabel: 'Import cvs file...' }); // ref: http://stackoverflow.com/a/1293163/2343 // This will parse a delimited string into an array of // arrays. The default delimiter is the comma, but this // can be overriden in the second argument. function CSVToArray(strData, strDelimiter) { // Check to see if the delimiter is defined. If not, // then default to comma. strDelimiter = (strDelimiter || ","); // Create a regular expression to parse the CSV values. var objPattern = new RegExp( ( // Delimiters. "(\\" + strDelimiter + "|\\r?\\n|\\r|^)" + // Quoted fields. "(?:\"([^\"]*(?:\"\"[^\"]*)*)\"|" + // Standard fields. "([^\"\\" + strDelimiter + "\\r\\n]*))" ), "gi" ); // Create an array to hold our data. Give the array // a default empty first row. var arrData: string[][] = [[]]; // Create an array to hold our individual pattern // matching groups. var arrMatches: RegExpExecArray | null = null; // Keep looping over the regular expression matches // until we can no longer find a match. while (arrMatches = objPattern.exec(strData)) { // Get the delimiter that was found. var strMatchedDelimiter = arrMatches[1]; // Check to see if the given delimiter has a length // (is not the start of string) and if it matches // field delimiter. If id does not, then we know // that this delimiter is a row delimiter. if (strMatchedDelimiter.length && strMatchedDelimiter !== strDelimiter) { // Since we have reached a new row of data, // add an empty row to our data array. arrData.push([]); } var strMatchedValue: string | undefined; // Now that we have our delimiter out of the way, // let's check to see which kind of value we // captured (quoted or unquoted). if (arrMatches[2]) { // We found a quoted value. When we capture // this value, unescape any double quotes. strMatchedValue = arrMatches[2].replace( new RegExp("\"\"", "g"), "\"" ); } else { // We found a non-quoted value. strMatchedValue = arrMatches[3]; } // Now that we have our value string, let's add // it to the data array. arrData[arrData.length - 1].push(strMatchedValue); } // Return the parsed data. return (arrData); } if (inputPath && inputPath.length && workspace.workspaceFolders && workspace.workspaceFolders.length) { const cartridges = await getOrderedCartridges(workspace.workspaceFolders); if (cartridges && cartridges.length) { const selected = await window.showQuickPick(cartridges.map(cartridge => cartridge.name), { placeHolder: 'Select cartridge to unpack csv' }); if (selected) { const selectedCartridge = cartridges.find(cartridge => cartridge.name === selected); const selectedCartridgePath = selectedCartridge?.fsPath; if (!selectedCartridgePath) { window.showInformationMessage('Not found cartridge path '); return; } const fileContent = (await workspace.fs.readFile(inputPath[0])).toString(); const parsedContent = CSVToArray(fileContent, ','); const [csvTitle, ...csvBody] = parsedContent; const uniqProperties = new Set<string>(); csvBody.forEach(record => { uniqProperties.add(record[0]); }); const [, , , ...sitesLocalesUniq] = csvTitle; function getRows(uniqProperty: string, localeIdx: number) { return csvBody.filter(row => row[0] === uniqProperty).map(row => { return [row[1], row[localeIdx]]; }); } for (const uniqProperty of uniqProperties) { ['', ...sitesLocalesUniq].forEach(async (uniqLocale, idx) => { const filename = uniqLocale ? uniqProperty + '_' + uniqLocale : uniqProperty; const localeRows = getRows(uniqProperty, idx + 2); const localeRowsWithValue = localeRows.filter(row => row[1]); if (localeRowsWithValue && localeRowsWithValue.length) { const fullFilePath = join(selectedCartridgePath, 'cartridge', 'templates', 'resources', filename + '.properties'); const contentToWrite = localeRowsWithValue.map(r => r.join('=')).join('\n'); var enc = new TextEncoder(); await workspace.fs.createDirectory(Uri.parse(join(selectedCartridgePath, 'cartridge', 'templates', 'resources'))); await workspace.fs.writeFile(Uri.parse(fullFilePath), enc.encode(contentToWrite)); } }); } } else { window.showInformationMessage('No cartridge selected'); } } } })); }
the_stack
import { Lexer } from "@siteimprove/alfa-css"; import { Device } from "@siteimprove/alfa-device"; import { Declaration, Element, ImportRule, MediaRule, Rule, Sheet, StyleRule, } from "@siteimprove/alfa-dom"; import { Iterable } from "@siteimprove/alfa-iterable"; import { Serializable } from "@siteimprove/alfa-json"; import { Media } from "@siteimprove/alfa-media"; import { Option } from "@siteimprove/alfa-option"; import { Predicate } from "@siteimprove/alfa-predicate"; import { Refinement } from "@siteimprove/alfa-refinement"; import { Context, Selector } from "@siteimprove/alfa-selector"; import * as json from "@siteimprove/alfa-json"; import { UserAgent } from "./user-agent"; import { AncestorFilter } from "./ancestor-filter"; const { equals, property } = Predicate; const { and } = Refinement; const { isAttribute, isClass, isComplex, isCompound, isId, isType, isPseudoClass, isPseudoElement, } = Selector; const isDescendantSelector = and( isComplex, property( "combinator", equals(Selector.Combinator.Descendant, Selector.Combinator.DirectDescendant) ) ); /** * Cascading origins defined in ascending order; origins defined first have * lower precedence than origins defined later. * * {@link https://www.w3.org/TR/css-cascade/#cascading-origins} * * @internal */ export enum Origin { /** * {@link https://www.w3.org/TR/css-cascade/#cascade-origin-ua} */ UserAgent = 1, /** * {@link https://www.w3.org/TR/css-cascade/#cascade-origin-author} */ Author = 2, } /** * The selector map is a data structure used for providing indexed access to the * rules that are likely to match a given element. Rules are indexed according * to their key selector, which is the selector that a given element MUST match * in order for the rest of the selector to also match. A key selector can be * either an ID selector, a class selector, or a type selector. In a relative * selector, the key selector will be the right-most selector, e.g. given * `main .foo + div` the key selector would be `div`. In a compound selector, * the key selector will be left-most selector, e.g. given `div.foo` the key * selector would also be `div`. * * Internally, the selector map has three maps and a list in one of which it * will store a given selector. The three maps are used for selectors for which * a key selector exist; one for ID selectors, one for class selectors, and one * for type selectors. The list is used for any remaining selectors. When * looking up the rules that match an element, the ID, class names, and type of * the element are used for looking up potentially matching selectors in the * three maps. Selector matching is then performed against this list of * potentially matching selectors, plus the list of remaining selectors, in * order to determine the final set of matches. * * {@link http://doc.servo.org/style/selector_map/struct.SelectorMap.html} * * @internal */ export class SelectorMap implements Serializable { public static of( ids: SelectorMap.Bucket, classes: SelectorMap.Bucket, types: SelectorMap.Bucket, other: Array<SelectorMap.Node> ): SelectorMap { return new SelectorMap(ids, classes, types, other); } private readonly _ids: SelectorMap.Bucket; private readonly _classes: SelectorMap.Bucket; private readonly _types: SelectorMap.Bucket; private readonly _other: Array<SelectorMap.Node>; private constructor( ids: SelectorMap.Bucket, classes: SelectorMap.Bucket, types: SelectorMap.Bucket, other: Array<SelectorMap.Node> ) { this._ids = ids; this._classes = classes; this._types = types; this._other = other; } public get( element: Element, context: Context, filter: Option<AncestorFilter> ): Array<SelectorMap.Node> { const nodes: Array<SelectorMap.Node> = []; const collect = (candidates: Iterable<SelectorMap.Node>) => { for (const node of candidates) { if ( filter.none((filter) => Iterable.every( node.selector, and(isDescendantSelector, (selector) => canReject(selector.left, filter) ) ) ) && node.selector.matches(element, context) ) { nodes.push(node); } } }; for (const id of element.id) { collect(this._ids.get(id)); } collect(this._types.get(element.name)); for (const className of element.classes) { collect(this._classes.get(className)); } collect(this._other); return nodes; } public toJSON(): SelectorMap.JSON { return { ids: this._ids.toJSON(), classes: this._classes.toJSON(), types: this._types.toJSON(), other: this._other.map((node) => node.toJSON()), }; } } /** * @internal */ export namespace SelectorMap { export interface JSON { [key: string]: json.JSON; ids: Bucket.JSON; classes: Bucket.JSON; types: Bucket.JSON; other: Array<Node.JSON>; } export function from(sheets: Iterable<Sheet>, device: Device): SelectorMap { // Every rule encountered in style sheets is assigned an increasing number // that denotes declaration order. While rules are stored in buckets in the // order in which they were declared, information related to ordering will // otherwise no longer be available once rules from different buckets are // combined. let order = 0; const ids = Bucket.empty(); const classes = Bucket.empty(); const types = Bucket.empty(); const other: Array<Node> = []; const add = ( rule: Rule, selector: Selector, declarations: Iterable<Declaration>, origin: Origin, order: number ): void => { const node = Node.of(rule, selector, declarations, origin, order); const keySelector = getKeySelector(selector); if (keySelector === null) { other.push(node); } else if (isId(keySelector)) { ids.add(keySelector.name, node); } else if (isClass(keySelector)) { classes.add(keySelector.name, node); } else { types.add(keySelector.name, node); } }; const visit = (rule: Rule) => { if (StyleRule.isStyleRule(rule)) { // Style rules with empty style blocks aren't relevant and so can be // skipped entirely. if (rule.style.isEmpty()) { return; } for (const [, selector] of Selector.parse(Lexer.lex(rule.selector))) { const origin = rule.owner.includes(UserAgent) ? Origin.UserAgent : Origin.Author; order++; for (const part of selector) { add(rule, part, rule.style, origin, order); } } } // For media rules, we recurse into the child rules if and only if the // media condition matches the device. else if (MediaRule.isMediaRule(rule)) { const query = Media.parse(Lexer.lex(rule.condition)); if (query.none(([, query]) => query.matches(device))) { return; } for (const child of rule.children()) { visit(child); } } // For import rules, we recurse into the imported style sheet if and only // if the import condition matches the device. else if (ImportRule.isImportRule(rule)) { const query = Media.parse(Lexer.lex(rule.condition)); if (query.none(([, query]) => query.matches(device))) { return; } for (const child of rule.sheet.children()) { visit(child); } } // Otherwise, we recurse into whichever child rules are declared by the // current rule. else { for (const child of rule.children()) { visit(child); } } }; for (const sheet of sheets) { if (sheet.disabled) { continue; } if (sheet.condition.isSome()) { const query = Media.parse(Lexer.lex(sheet.condition.get())); if (query.every(([, query]) => !query.matches(device))) { continue; } } for (const rule of sheet.children()) { visit(rule); } } return SelectorMap.of(ids, classes, types, other); } export class Node implements Serializable { public static of( rule: Rule, selector: Selector, declarations: Iterable<Declaration>, origin: Origin, order: number ): Node { return new Node(rule, selector, declarations, origin, order); } private readonly _rule: Rule; private readonly _selector: Selector; private readonly _declarations: Iterable<Declaration>; private readonly _origin: Origin; private readonly _order: number; private readonly _specificity: number; private constructor( rule: Rule, selector: Selector, declarations: Iterable<Declaration>, origin: Origin, order: number ) { this._rule = rule; this._selector = selector; this._declarations = declarations; this._origin = origin; this._order = order; // For style rules that are presentational hints, the specificity will // always be 0 regardless of the selector. if (StyleRule.isStyleRule(rule) && rule.hint) { this._specificity = 0; } // Otherwise, determine the specificity of the selector. else { this._specificity = getSpecificity(selector); } } public get rule(): Rule { return this._rule; } public get selector(): Selector { return this._selector; } public get declarations(): Iterable<Declaration> { return this._declarations; } public get origin(): Origin { return this._origin; } public get order(): number { return this._order; } public get specificity(): number { return this._specificity; } public toJSON(): Node.JSON { return { rule: this._rule.toJSON(), selector: this._selector.toJSON(), declarations: [...this._declarations].map((declaration) => declaration.toJSON() ), origin: this._origin, order: this._order, specificity: this._specificity, }; } } export namespace Node { export interface JSON { [key: string]: json.JSON; rule: Rule.JSON; selector: Selector.JSON; declarations: Array<Declaration.JSON>; origin: Origin; order: number; specificity: number; } } export class Bucket implements Serializable { public static empty(): Bucket { return new Bucket(new Map()); } private readonly _nodes: Map<string, Array<SelectorMap.Node>>; private constructor(nodes: Map<string, Array<SelectorMap.Node>>) { this._nodes = nodes; } public add(key: string, node: SelectorMap.Node): void { const nodes = this._nodes.get(key); if (nodes === undefined) { this._nodes.set(key, [node]); } else { nodes.push(node); } } public get(key: string): Array<SelectorMap.Node> { const nodes = this._nodes.get(key); if (nodes === undefined) { return []; } return nodes; } public toJSON(): Bucket.JSON { return [...this._nodes].map(([key, nodes]) => [ key, nodes.map((node) => node.toJSON()), ]); } } export namespace Bucket { export type JSON = Array<[string, Array<SelectorMap.Node.JSON>]>; } } /** * Given a selector, get the right-most ID, class, or type selector, i.e. the * key selector. If the right-most selector is a compound selector, then the * left-most ID, class, or type selector of the compound selector is returned. */ function getKeySelector( selector: Selector ): Selector.Id | Selector.Class | Selector.Type | null { if (isId(selector) || isClass(selector) || isType(selector)) { return selector; } if (isCompound(selector)) { return getKeySelector(selector.left) ?? getKeySelector(selector.right); } if (isComplex(selector)) { return getKeySelector(selector.right); } return null; } type Specificity = number; // The number of bits to use for every component of the specificity computation. // As bitwise operations in JavaScript are limited to 32 bits, we can only use // at most 10 bits per component as 3 components are used. const componentBits = 10; // The maximum value that any given component can have. Since we can only use 10 // bits for every component, this in effect means that any given component count // must be strictly less than 1024. const componentMax = (1 << componentBits) - 1; /** * {@link https://www.w3.org/TR/selectors/#specificity} */ function getSpecificity(selector: Selector): Specificity { let a = 0; let b = 0; let c = 0; const queue: Array<Selector> = [selector]; while (queue.length > 0) { const selector = queue.pop(); if (selector === undefined) { break; } if (isId(selector)) { a++; } else if ( isClass(selector) || isAttribute(selector) || isPseudoClass(selector) ) { b++; } else if (isType(selector) || isPseudoElement(selector)) { c++; } else if (isCompound(selector) || isComplex(selector)) { queue.push(selector.left, selector.right); } } // Concatenate the components to a single number indicating the specificity of // the selector. This allows us to treat specificities as simple numbers and // hence use normal comparison operators when comparing specificities. return ( (Math.min(a, componentMax) << (componentBits * 2)) | (Math.min(b, componentMax) << (componentBits * 1)) | Math.min(c, componentMax) ); } /** * Check if a selector can be rejected based on an ancestor filter. */ function canReject(selector: Selector, filter: AncestorFilter): boolean { if (isId(selector) || isClass(selector) || isType(selector)) { return !filter.matches(selector); } if (isCompound(selector)) { // Compound selectors are right-leaning, so recurse to the left first as it // is likely the shortest branch. return ( canReject(selector.left, filter) || canReject(selector.right, filter) ); } if (isComplex(selector)) { const { combinator } = selector; if ( combinator === Selector.Combinator.Descendant || combinator === Selector.Combinator.DirectDescendant ) { // Complex selectors are left-leaning, so recurse to the right first as it // is likely the shortest branch. return ( canReject(selector.right, filter) || canReject(selector.left, filter) ); } } return false; }
the_stack
import {BiMap} from 'bim' import {times} from 'lodash' import { Aperture, ConfigName, ExposureMode, ISO, ManualFocusOption, WhiteBalance, } from '../configs' import {ObjectFormatCode, ResCode} from '../PTPDatacode' import {PTPDataView} from '../PTPDataView' import {PTPDevice, PTPEvent} from '../PTPDevice' import { ConfigDesc, createReadonlyConfigDesc, OperationResult, TakePhotoOption, } from '../Tethr' import {TethrObject, TethrObjectInfo} from '../TethrObject' import {isntNil} from '../util' import {TethrPTPUSB} from './TethrPTPUSB' enum OpCodePanasonic { OpenSession = 0x9102, CloseSession = 0x9103, GetDevicePropDesc = 0x9108, GetDevicePropValue = 0x9402, SetDevicePropValue = 0x9403, InitiateCapture = 0x9404, CtrlLiveview = 0x9405, Liveview = 0x9412, GetLiveviewSettings = 0x9414, SetLiveviewSettings = 0x9415, ManualFocusDrive = 0x9416, LiveviewImage = 0x9706, } enum EventCodePanasonic { DevicePropChanged = 0xc102, ObjectAdded = 0xc108, } // Panasonic does not have regular device properties, they use some 32bit values enum DevicePropCodePanasonic { PhotoStyle = 0x02000010, PhotoStyle_Param = 0x02000011, ISO = 0x02000020, ISO_Param = 0x02000021, ISO_UpperLimit = 0x02000022, ShutterSpeed = 0x02000030, ShutterSpeed_Param = 0x02000031, ShutterSpeed_RangeLimit = 0x02000032, Aperture = 0x02000040, Aperture_Param = 0x02000041, Aperture_RangeLimit = 0x02000042, WhiteBalance = 0x02000050, WhiteBalance_Param = 0x02000051, WhiteBalance_KSet = 0x02000052, WhiteBalance_ADJ_AB = 0x02000053, WhiteBalance_ADJ_GM = 0x02000054, WhiteBalance_ADJ_AB_Sep = 0x02000055, Exposure = 0x02000060, Exposure_Param = 0x02000061, Exposure_RangeLimit = 0x02000062, AFArea = 0x02000070, AFArea_AFModeParam = 0x02000071, AFArea_AFAreaParam = 0x02000072, AFArea_SetQuickAFParam = 0x02000073, CameraMode = 0x02000080, CameraMode_DriveMode = 0x02000081, CameraMode_ModePos = 0x02000082, CameraMode_CreativeMode = 0x02000083, CameraMode_iAMode = 0x02000084, ImageFormat = 0x020000a2, MeteringInfo = 0x020000b0, IntervalInfo = 0x020000c0, RecDispConfig = 0x020000e0, RecInfoFlash = 0x02000110, BurstBracket = 0x02000140, RecPreviewConfig = 0x02000170, RecInfoSelfTimer = 0x020001a0, RecInfoFlash2 = 0x020001b0, RecCtrlRelease = 0x03000010, ImageMode = 0x20000a0, ImageMode_Param = 0x20000a1, ImageMode_Quality = 0x20000a2, ImageMode_ImageAspect = 0x20000a3, Liveview_TransImg = 0xd800011, Liveview_RecomImg = 0xd800012, } enum ObjectFormatCodePanasonic { Raw = 0x3800, } interface DevicePropSchemePanasonic<T> { devicePropCode: number decode: (value: number) => T | null encode: (value: T) => number | null valueSize: 1 | 2 | 4 } interface LiveviewSetting { width: number height: number frameSize: number fps: number } export class TethrPanasonic extends TethrPTPUSB { private liveviewEnabled = false public constructor(device: PTPDevice) { super(device) } public async open(): Promise<void> { await super.open() await this.device.sendCommand({ label: 'Panasonic OpenSession', opcode: OpCodePanasonic.OpenSession, parameters: [0x00010001], }) this.device.onEventCode( EventCodePanasonic.DevicePropChanged, this.onDevicePropChanged ) } public async close(): Promise<void> { await this.device.sendCommand({ label: 'Panasonic CloseSession', opcode: OpCodePanasonic.CloseSession, parameters: [0x00010001], }) await super.open() } // Config public setAperture(value: Aperture) { return this.setDevicePropValuePanasonic({ devicePropCode: DevicePropCodePanasonic.Aperture_Param, encode: (value: Aperture) => { return value === 'auto' ? 0 : Math.round(value * 10) }, valueSize: 2, value, }) } public getApertureDesc() { return this.getDevicePropDescPanasonic({ devicePropCode: DevicePropCodePanasonic.Aperture, decode: (value: number) => { return value / 10 }, valueSize: 2, }) } public setColorModeDesc(value: string) { return this.setDevicePropValuePanasonic({ devicePropCode: DevicePropCodePanasonic.PhotoStyle_Param, encode: (value: string) => { return this.colorModeTable.getKey(value) ?? null }, valueSize: 2, value, }) } public getColorModeDesc() { return this.getDevicePropDescPanasonic({ devicePropCode: DevicePropCodePanasonic.PhotoStyle, decode: (value: number) => { return this.colorModeTable.get(value) ?? null }, valueSize: 2, }) } public getExposureModeDesc() { return this.getDevicePropDescPanasonic({ devicePropCode: DevicePropCodePanasonic.CameraMode_ModePos, decode: (value: number) => { return this.exposureModeTable.get(value) ?? null }, valueSize: 2, }) } public setExposureComp(value: string) { return this.setDevicePropValuePanasonic({ devicePropCode: DevicePropCodePanasonic.Exposure_Param, encode: v => { if (v === '0') return 0x0 let negative = false, digits = 0, thirds = 0 const match1 = v.match(/^([+-]?)([0-9]+)( 1\/3| 2\/3)?$/) if (match1) { negative = match1[1] === '-' digits = parseInt(match1[2]) thirds = !match1[3] ? 0 : match1[3] === ' 1/3' ? 1 : 2 } const match2 = match1 && v.match(/^([+-]?)(1\/3|2\/3)$/) if (match2) { negative = match2[1] === '-' thirds = match2[2] === '1/3' ? 1 : 2 } if (!match1 && !match2) return null const steps = digits * 3 + thirds return (negative ? 0x8000 : 0x0000) | steps }, valueSize: 2, value, }) } public getExposureCompDesc() { return this.getDevicePropDescPanasonic({ devicePropCode: DevicePropCodePanasonic.Exposure, decode: v => { if (v === 0x0) return '0' const steps = v & 0xf const digits = Math.floor(steps / 3) const thirds = steps % 3 const negative = v & 0x8000 const sign = negative ? '-' : '+' const thirdsSymbol = thirds === 1 ? '1/3' : thirds === 2 ? '2/3' : '' if (digits === 0) return sign + thirdsSymbol if (thirds === 0) return sign + digits return sign + digits + ' ' + thirdsSymbol }, valueSize: 2, }) } public async getManualFocusOptionsDesc() { return createReadonlyConfigDesc([ 'near:2', 'near:1', 'far:1', 'far:2', ] as ManualFocusOption[]) } public async getCanTakePhotoDesc() { return createReadonlyConfigDesc(true) } public async getCanRunAutoFocusDesc() { return createReadonlyConfigDesc(true) } public async getCanRunManualFocusDesc() { return createReadonlyConfigDesc(true) } public async getCanStartLiveviewDesc() { return createReadonlyConfigDesc(true) } public async setColorTemperature(value: number) { return this.setDevicePropValuePanasonic({ devicePropCode: DevicePropCodePanasonic.WhiteBalance_KSet, encode: value => value, valueSize: 2, value, }) } public async getColorTemperatureDesc() { return this.getDevicePropDescPanasonic({ devicePropCode: DevicePropCodePanasonic.WhiteBalance_KSet, decode: data => data, valueSize: 2, }) } public setImageAspect(value: string) { return this.setDevicePropValuePanasonic({ devicePropCode: DevicePropCodePanasonic.ImageMode_ImageAspect, encode: (value: string) => { return this.imageAspectTable.getKey(value) ?? null }, valueSize: 2, value, }) } public getImageAspectDesc() { return this.getDevicePropDescPanasonic({ devicePropCode: DevicePropCodePanasonic.ImageMode_ImageAspect, decode: (value: number) => { return this.imageAspectTable.get(value) ?? null }, valueSize: 2, }) } public setImageQuality(value: string) { return this.setDevicePropValuePanasonic({ devicePropCode: DevicePropCodePanasonic.ImageMode_Quality, encode: (value: string) => { return this.imageQualityTable.getKey(value) ?? null }, valueSize: 2, value, }) } public getImageQualityDesc() { return this.getDevicePropDescPanasonic({ devicePropCode: DevicePropCodePanasonic.ImageMode_Quality, decode: (value: number) => { return this.imageQualityTable.get(value) ?? null }, valueSize: 2, }) } public setIso(value: ISO) { return this.setDevicePropValuePanasonic({ devicePropCode: DevicePropCodePanasonic.ISO_Param, encode: value => { return value === 'auto' ? 0xffffffff : value }, valueSize: 4, value, }) } public getIsoDesc() { return this.getDevicePropDescPanasonic({ devicePropCode: DevicePropCodePanasonic.ISO, decode: (value: number) => { if (value === 0xffffffff) return 'auto' if (value === 0xfffffffe) return 'auto' // i-ISO return value }, valueSize: 4, }) } public setWhiteBalance(value: WhiteBalance) { return this.setDevicePropValuePanasonic({ devicePropCode: DevicePropCodePanasonic.WhiteBalance_Param, encode: (value: WhiteBalance) => { return this.whiteBalanceTable.getKey(value) ?? null }, valueSize: 2, value, }) } public getWhiteBalanceDesc() { return this.getDevicePropDescPanasonic({ devicePropCode: DevicePropCodePanasonic.WhiteBalance, decode: (value: number) => { return this.whiteBalanceTable.get(value) ?? null }, valueSize: 2, }) } private async setDevicePropValuePanasonic<T>({ value, valueSize, encode, devicePropCode, }: Omit<DevicePropSchemePanasonic<T>, 'decode'> & {value: T}): Promise< OperationResult<void> > { const dataView = new PTPDataView() const encodedValue = encode(value) if (encodedValue === null) { return { status: 'invalid parameter', } } dataView.writeUint32(devicePropCode) dataView.writeUint32(valueSize) if (valueSize === 1) dataView.writeUint8(encodedValue) if (valueSize === 2) dataView.writeUint16(encodedValue) if (valueSize === 4) dataView.writeUint32(encodedValue) const succeed = await this.device.sendData({ label: 'Panasonic SetDevicePropValue', opcode: OpCodePanasonic.SetDevicePropValue, parameters: [devicePropCode], data: dataView.toBuffer(), }) return { status: succeed ? 'ok' : 'invalid parameter', } } private async getDevicePropDescPanasonic<T>({ devicePropCode, decode, valueSize, }: Omit<DevicePropSchemePanasonic<T>, 'encode'>): Promise<ConfigDesc<T>> { const {data} = await this.device.receiveData({ label: 'Panasonic GetDevicePropDesc', opcode: OpCodePanasonic.GetDevicePropDesc, parameters: [devicePropCode], }) const dataView = new PTPDataView(data) let getValue: () => number let getArray: () => number[] switch (valueSize) { case 1: getValue = dataView.readUint8 getArray = dataView.readUint8Array break case 2: getValue = dataView.readUint16 getArray = dataView.readUint16Array break case 4: getValue = dataView.readUint32 getArray = dataView.readUint32Array break } dataView.skip(4) // devicePropCode const headerLength = dataView.readUint32() dataView.goto(headerLength * 4 + 2 * 4) const value = decode(getValue()) const values = [...getArray()].map(decode).filter(isntNil) return { writable: values.length > 0, value, option: { type: 'enum', values, }, } } public setShutterSpeed(value: string) { return this.setDevicePropValuePanasonic({ devicePropCode: DevicePropCodePanasonic.ShutterSpeed_Param, encode: (value: string) => { if (value === 'bulb') return 0xffffffff if (value === 'auto') return 0x0ffffffe if (value.startsWith('1/')) { const denominator = parseInt(value.replace(/^1\//, '')) return denominator * 1000 } // Seconds const seconds = parseFloat(value) if (!isNaN(seconds)) { return Math.round(seconds * 1000) | 0x80000000 } return null }, valueSize: 4, value, }) } public getShutterSpeedDesc() { return this.getDevicePropDescPanasonic({ devicePropCode: DevicePropCodePanasonic.ShutterSpeed, decode: (value: number) => { switch (value) { case 0xffffffff: return 'bulb' case 0x0fffffff: return 'auto' case 0x0ffffffe: return null } if ((value & 0x80000000) === 0x00000000) { return '1/' + value / 1000 } else { return ((value & 0x7fffffff) / 1000).toString() } }, valueSize: 4, }) } // Actions public async takePhoto({download = true}: TakePhotoOption = {}): Promise< OperationResult<TethrObject[]> > { const quality = await this.get('imageQuality') let restNumPhotos = quality?.includes('+') ? 2 : 1 await this.device.sendCommand({ label: 'Panasonic InitiateCapture', opcode: OpCodePanasonic.InitiateCapture, parameters: [0x3000011], }) const infos = await new Promise<TethrObjectInfo[]>(resolve => { const infos: TethrObjectInfo[] = [] const onObjectAdded = async (ev: PTPEvent) => { const objectID = ev.parameters[0] const info = await this.getObjectInfo(objectID) switch (info.format) { case 'jpeg': case 'raw': infos.push(info) break case 'association': // Ignore folder return default: throw new Error('Received unexpected objectFormat' + info.format) } if (--restNumPhotos === 0) { this.device.offEventCode( EventCodePanasonic.ObjectAdded, onObjectAdded ) resolve(infos) } } this.device.onEventCode(EventCodePanasonic.ObjectAdded, onObjectAdded) }) if (!download) { return {status: 'ok', value: []} } const objects: TethrObject[] = [] for (const info of infos) { const data = await this.getObject(info.id) const isRaw = info.format === 'raw' const type = isRaw ? 'image/x-panasonic-rw2' : 'image/jpeg' const blob = new Blob([data], {type}) objects.push({...info, blob}) } return {status: 'ok', value: objects} } public async startLiveview(): Promise<OperationResult<MediaStream>> { const canvas = document.createElement('canvas') const ctx = canvas.getContext('2d') if (!ctx) return {status: 'general error'} await this.device.sendCommand({ label: 'Panasonic Liveview', opcode: OpCodePanasonic.Liveview, parameters: [0x0d000010], }) this.liveviewEnabled = true this.emit('liveviewEnabledChanged', await this.getDesc('liveviewEnabled')) const updateFrame = async () => { if (!this.liveviewEnabled) return try { const image = await this.getLiveview() if (!image) return const imageBitmap = await createImageBitmap(image) const sizeChanged = canvas.width !== imageBitmap.width || canvas.height !== imageBitmap.height if (sizeChanged) { canvas.width = imageBitmap.width canvas.height = imageBitmap.height } ctx.drawImage(imageBitmap, 0, 0) } finally { requestAnimationFrame(updateFrame) } } updateFrame() const stream = canvas.captureStream(60) return {status: 'ok', value: stream} } public async stopLiveview(): Promise<OperationResult<void>> { await this.device.sendCommand({ label: 'Panasonic Liveview', opcode: OpCodePanasonic.Liveview, parameters: [0x0d000011], }) this.liveviewEnabled = false this.emit('liveviewEnabledChanged', await this.getDesc('liveviewEnabled')) return {status: 'ok'} } public async getLiveviewSizeDesc(): Promise<ConfigDesc<string>> { const setting = await this.getLiveviewSetting() const settingOptions = await this.getLiveviewRecommendedSettings() const value = getSizeStringFromSetting(setting) const values = settingOptions.map(getSizeStringFromSetting) return { writable: values.length > 0, value, option: { type: 'enum', values, }, } function getSizeStringFromSetting(setting: LiveviewSetting) { return `${setting.width}x${setting.height}` } } public async setLiveviewSize(value: string): Promise<OperationResult<void>> { const [width, height] = value.split('x').map(parseInt) const settings = await this.getLiveviewRecommendedSettings() const setting = settings.find(s => s.width === width && s.height === height) if (!setting) { return {status: 'invalid parameter'} } await this.setLiveviewSetting(setting) return {status: 'ok'} } private async getLiveviewRecommendedSettings(): Promise<LiveviewSetting[]> { const {data} = await this.device.receiveData({ opcode: OpCodePanasonic.GetLiveviewSettings, parameters: [DevicePropCodePanasonic.Liveview_RecomImg], }) const dataView = new PTPDataView(data) /*const receivedPropCode =*/ dataView.readUint32() /*const dataSize =*/ dataView.readUint32() const settingsNum = dataView.readUint16() /*const structSize =*/ dataView.readUint16() const settings = times(settingsNum, () => { return { height: dataView.readUint16(), width: dataView.readUint16(), frameSize: dataView.readUint16(), fps: dataView.readUint16(), } }) return settings } private async getLiveviewSetting(): Promise<LiveviewSetting> { const {data} = await this.device.receiveData({ opcode: OpCodePanasonic.GetLiveviewSettings, parameters: [DevicePropCodePanasonic.Liveview_TransImg], }) const dataView = new PTPDataView(data) /*const receivedPropCode =*/ dataView.readUint32() /*const dataSize =*/ dataView.readUint32() return { height: dataView.readUint16(), width: dataView.readUint16(), frameSize: dataView.readUint16(), fps: dataView.readUint16(), } } private async setLiveviewSetting(setting: LiveviewSetting): Promise<void> { const dataView = new PTPDataView() dataView.writeUint32(DevicePropCodePanasonic.Liveview_TransImg) dataView.writeUint32(8) dataView.writeUint16(setting.height) dataView.writeUint16(setting.width) dataView.writeUint16(setting.frameSize) dataView.writeUint16(setting.fps) await this.device.sendData({ opcode: OpCodePanasonic.SetLiveviewSettings, parameters: [DevicePropCodePanasonic.Liveview_TransImg], data: dataView.toBuffer(), }) } private async getLiveview(): Promise<null | Blob> { const {resCode, data} = await this.device.receiveData({ label: 'Panasonic LiveviewImage', opcode: OpCodePanasonic.LiveviewImage, expectedResCodes: [ResCode.OK, ResCode.DeviceBusy], maxByteLength: 1_000_000, // = 1MB }) if (resCode !== ResCode.OK) return null // let histogram!: Uint8Array | undefined const dataView = new DataView(data) let jpegOffset = 180 for (let offset = 0; offset < 180; ) { const id = dataView.getUint32(offset, true) offset += 4 const dataSize = dataView.getUint32(offset, true) offset += 4 // const sessionID = dataView.getUint32(offset, true) switch (id) { case 0x17000001: { // Jpeg Offset jpegOffset = dataView.getUint32(offset + 4, true) break } /* case 0x17000002: { // Jpeg Length? jpegLength = dataView.getUint32(offset + 4, true) break }*/ case 0x17000003: { // Histogram // const valid = dataView.getUint32(offset + 4, true) // const samples = dataView.getUint32(offset + 8, true) // const elems = dataView.getUint32(offset + 12, true) // histogram = new Uint8Array( // data.slice(offset + 16, offset + 16 + samples) // ) break } case 0x17000004: { // Posture? // const posture = dataView.getUint16(offset + 4, true) break } case 0x17000005: { // Level gauge // const roll = dataView.getInt16(offset + 4, true) / 10 // const pitch = dataView.getInt16(offset + 6, true) / 10 break } } offset += dataSize } if (!jpegOffset) return null const jpegData = data.slice(jpegOffset) const image = new Blob([jpegData], {type: 'image/jpg'}) return image } public async runManualFocus( option: ManualFocusOption ): Promise<OperationResult<void>> { const [direction, speed] = option.split(':') let mode = 0 if (direction === 'far') { if (speed === '1') mode = 2 else if (speed === '2') mode = 1 } else if (direction === 'near') { if (speed === '1') mode = 3 else if (speed === '2') mode = 4 } if (!mode) { return {status: 'invalid parameter'} } const devicePropCode = 0x03010011 const dataView = new PTPDataView() dataView.writeUint32(devicePropCode) dataView.writeUint32(2) dataView.writeUint16(mode) await this.device.sendData({ label: 'Panasonic ManualFocusDrive', opcode: OpCodePanasonic.ManualFocusDrive, parameters: [devicePropCode], data: dataView.toBuffer(), }) return {status: 'ok'} } public async runAutoFocus(): Promise<OperationResult<void>> { await this.device.sendCommand({ label: 'Panasonic Ctrl Liveview', opcode: OpCodePanasonic.CtrlLiveview, parameters: [0x03000024], }) return {status: 'ok'} } protected onDevicePropChanged = async (ev: PTPEvent) => { const devicdPropCode = ev.parameters[0] let configs: ConfigName[] switch (devicdPropCode) { case DevicePropCodePanasonic.CameraMode: configs = ['exposureMode', 'aperture', 'shutterSpeed', 'exposureComp'] break case DevicePropCodePanasonic.Aperture: configs = ['aperture'] break case DevicePropCodePanasonic.ShutterSpeed: configs = ['shutterSpeed'] break case DevicePropCodePanasonic.ISO: configs = ['iso'] break case DevicePropCodePanasonic.Exposure: configs = ['exposureComp'] break case DevicePropCodePanasonic.WhiteBalance: configs = ['whiteBalance', 'colorTemperature'] break case DevicePropCodePanasonic.PhotoStyle: configs = ['colorMode'] break case DevicePropCodePanasonic.ImageMode: configs = ['imageSize', 'imageAspect', 'imageQuality'] break default: return } for (const config of configs) { const desc = await this.getDesc(config) this.emit(`${config}Changed`, desc) } } protected getObjectFormat(code: number) { return ( ObjectFormatCode[code] ?? ObjectFormatCodePanasonic[code] ).toLowerCase() } protected exposureModeTable = new BiMap<number, ExposureMode>([ [0, 'P'], [1, 'A'], [2, 'S'], [3, 'M'], [5, 'video'], [7, 'vendor iA'], [8, 'C1'], [9, 'C2'], [10, 'C3'], [12, 'vendor S&Q'], ]) protected whiteBalanceTable = new BiMap<number, WhiteBalance>([ [0x0002, 'auto'], [0x0004, 'daylight'], [0x8008, 'cloud'], [0x0006, 'incandescent'], // [0x8009, 'White Set'], [0x0007, 'flash'], [0x0005, 'fluorescent'], // [0x800a, 'Black and White'], // [0x800b, 'WB Setting 1'], // [0x800c, 'WB Setting 2'], // [0x800d, 'WB Setting 3'], // [0x800e, 'WB Setting 4'], [0x800f, 'shade'], [0x8010, 'manual'], [0x8011, 'manual2'], [0x8012, 'manual3'], [0x8013, 'manual4'], [0x8014, 'auto cool'], [0x8015, 'auto warm'], ]) protected colorModeTable = new BiMap<number, string>([ [0, 'Standard'], [1, 'Vivid'], [2, 'Natural'], [18, 'Flat'], [4, 'Landscape'], [5, 'Portrait'], [3, 'Monochorme'], [15, 'L.Monochrome'], [17, 'L.Monochrome D'], [41, 'Cinelike D2'], [42, 'Cinelike V2'], [14, 'Like709'], [40, 'V-Log'], [19, 'MY PHOTOSTYLE 1'], [20, 'MY PHOTOSTYLE 2'], [21, 'MY PHOTOSTYLE 3'], [22, 'MY PHOTOSTYLE 4'], ]) protected imageAspectTable = new BiMap<number, string>([ [1, '4:3'], [2, '3:2'], [3, '16:9'], [4, '1:1'], [10, '65:24'], [11, '2:1'], ]) protected imageQualityTable = new BiMap<number, string>([ [0, 'fine'], [1, 'standard'], [2, 'raw'], [3, 'raw,fine'], [4, 'raw,standard'], ]) }
the_stack
import { PayloadMismatchError, EventBus, SchemaMismatchError } from './event-bus'; import AdvancedSchema from './fixtures/advanced-schema.json'; //------------------------------------------------------------------------------------ // PayloadMismatchError //------------------------------------------------------------------------------------ describe('[PayloadMismatchError]', () => { const captureStackTrace = Error.captureStackTrace; afterEach(() => { Error.captureStackTrace = captureStackTrace; }); it('should create detailed errors', () => { const schema = { type: 'boolean' } const payload = true; Error.captureStackTrace = jest.fn(); const error = new PayloadMismatchError('channel', schema, payload); expect(Error.captureStackTrace).toHaveBeenCalledWith(error, PayloadMismatchError); expect(error).toBeInstanceOf(Error); expect(error.channel).toBe('channel'); expect(error.schema).toStrictEqual(schema); expect(error.payload).toStrictEqual(payload); }); it('should not call captureStackTrace if it is not defined', () => { (Error as any).captureStackTrace = undefined; new PayloadMismatchError('channel', null, null); expect(Error.captureStackTrace).toBeFalsy(); }); }); //------------------------------------------------------------------------------------ // SchemaMismatchError //------------------------------------------------------------------------------------ describe('[SchemaMismatchError]', () => { const captureStackTrace = Error.captureStackTrace; afterEach(() => { Error.captureStackTrace = captureStackTrace; }); it('should create detailed errors', () => { const schema = { type: 'boolean' } const newSchema = { type: 'string' }; Error.captureStackTrace = jest.fn(); const error = new SchemaMismatchError('channel', schema, newSchema); expect(Error.captureStackTrace).toHaveBeenCalledWith(error, SchemaMismatchError); expect(error).toBeInstanceOf(Error); expect(error.channel).toBe('channel'); expect(error.schema).toStrictEqual(schema); expect(error.newSchema).toStrictEqual(newSchema); }); it('should not call captureStackTrace if it is not defined', () => { (Error as any).captureStackTrace = undefined; new SchemaMismatchError('channel', null, null); expect(Error.captureStackTrace).toBeFalsy(); }); }); //------------------------------------------------------------------------------------ // register //------------------------------------------------------------------------------------ describe('[EventBus]: register', () => { it('should register schemas on new channels', () => { const eventBus = new EventBus(); let exists = eventBus.register('test1', { type: 'boolean' }); expect(exists).toBeFalsy(); exists = eventBus.register('test2', { type: 'string' }); expect(exists).toBeFalsy(); }); it('should re-register an equal schema on an existing channel', () => { const eventBus = new EventBus(); let exists = eventBus.register('test1', { type: 'object', properties: { test: { type: 'boolean' } } }); expect(exists).toBeFalsy(); exists = eventBus.register('test1', { properties: { test: { type: 'boolean' } }, type: 'object' }); expect(exists).toBeTruthy(); }); it('should be fail to register a differentiating schema on an existing channel', () => { const eventBus = new EventBus(); const register = () => { eventBus.register('test1', { type: 'boolean' }); eventBus.register('test1', { type: 'string' }); }; expect(register).toThrowError(); }); }); //------------------------------------------------------------------------------------ // unregister //------------------------------------------------------------------------------------ describe('[EventBus]: unregister', () => { it('should unregister the schema from channel', () => { const eventBus = new EventBus(); let exists = eventBus.unregister('test1'); expect(exists).toBeFalsy(); eventBus.register('test1', { type: 'boolean' }); exists = eventBus.unregister('test1'); expect(exists).toBeTruthy(); }); }); //------------------------------------------------------------------------------------ // subscribe //------------------------------------------------------------------------------------ describe('[EventBus]: subscribe', () => { it('should subscribe to a new channel', () => { const eventBus = new EventBus(); eventBus.subscribe('test1', jest.fn()); }); it('should throw an error if subscribe is called without a callback', () => { const eventBus = new EventBus(); const subscribe = () => { eventBus.subscribe('test1', undefined as any); }; expect(subscribe).toThrowError(); }); it('should not trigger a callback with replay if no previous event was published', () => { const eventBus = new EventBus(); const callback = jest.fn(); eventBus.subscribe('test1', true, callback); expect(callback).not.toHaveBeenCalled(); }); it('should not trigger a callback if no replay is requested', () => { const eventBus = new EventBus(); const callback = jest.fn(); eventBus.subscribe('test1', false, callback); expect(callback).not.toHaveBeenCalled(); }); it('should unsubscribe to an existing channel', () => { const eventBus = new EventBus(); const subscription = eventBus.subscribe('test1', jest.fn()); subscription.unsubscribe(); }); }); //------------------------------------------------------------------------------------ // publish //------------------------------------------------------------------------------------ describe('[EventBus]: publish', () => { it('should publish event to a new channel', () => { const eventBus = new EventBus(); eventBus.publish('test1', true); }); it('should publish event to a registered channel matching schema', () => { const eventBus = new EventBus(); eventBus.register('test1', { type: 'boolean' }); eventBus.publish('test1', true); }); it('should fail to publish incorrect event to a registered channel with schema', () => { const eventBus = new EventBus(); const publish = () => { eventBus.register('test1', { type: 'boolean' }); eventBus.publish('test1', 'hello'); }; expect(publish).toThrowError(); }); }); //------------------------------------------------------------------------------------ // getLatest //------------------------------------------------------------------------------------ describe('[EventBus]: getLatest', () => { it('should return the latest published event on channel', () => { const eventBus = new EventBus(); eventBus.publish('test1', true); expect(eventBus.getLatest('test1')).toBe(true) expect(eventBus.getLatest('test2')).toBeUndefined() }); }); //------------------------------------------------------------------------------------ // getSchema //------------------------------------------------------------------------------------ describe('[EventBus]: getSchema', () => { it('should return the schema registered on channel', () => { const eventBus = new EventBus(); eventBus.register('test1', { type: 'boolean' }); expect(eventBus.getSchema('test1')).toStrictEqual({ type: 'boolean' }) expect(eventBus.getSchema('test2')).toBeUndefined() }); }); //------------------------------------------------------------------------------------ // subscribe and publish //------------------------------------------------------------------------------------ describe('[EventBus]: subscribe and publish', () => { it('should receive same data as published', () => { const eventBus = new EventBus(); const payload = { test1: true }; const callback = jest.fn(); eventBus.subscribe('test1', callback); expect(callback).not.toHaveBeenCalled(); eventBus.publish('test1', payload); expect(callback).toBeCalledWith({ channel: 'test1', payload }); }); it('should receive events on wildcard channel * regardless of channel it was published on', () => { const eventBus = new EventBus(); const payload = { test1: true }; const callback = jest.fn(); eventBus.subscribe('*', callback); eventBus.publish('test1', payload); expect(callback).toBeCalledWith({ channel: 'test1', payload: payload }); }); it('should handle more advanced schemas', () => { const eventBus = new EventBus(); const payload = { name: 'Milk', amount: '1000 ml', price: 0.99, organic: true, stores: [ { name: 'ACME Food AB', url: 'acme-food.com' } ] }; const callback = jest.fn(); eventBus.register('test1', AdvancedSchema); eventBus.subscribe('test1', callback); eventBus.publish('test1', payload); expect(callback).toBeCalledWith({ channel: 'test1', payload }); }); it('should subscribe to an existing channel with replay of last event', () => { const eventBus = new EventBus(); const payload = { test1: true }; const callback = jest.fn(); eventBus.publish('test1', payload); eventBus.subscribe('test1', true, callback); expect(callback).toBeCalledWith({ channel: 'test1', payload }); }); it('should handle multiple subscriptions with correct channels', () => { const eventBus = new EventBus(); const callback1 = jest.fn(); const callback2 = jest.fn(); const callback3 = jest.fn(); eventBus.subscribe('test1', callback1); eventBus.subscribe('test1', callback2); eventBus.subscribe('test2', callback3); eventBus.publish('test1', { test1: true }); expect(callback1).toHaveBeenCalled(); expect(callback2).toHaveBeenCalled(); expect(callback3).not.toHaveBeenCalled(); }); it('should no longer receive data after unsubscribe', () => { const eventBus = new EventBus(); const callback = jest.fn(); const subscription = eventBus.subscribe('test1', callback); eventBus.publish('test1', { test1: true }); expect(callback).toHaveBeenCalled(); subscription.unsubscribe(); eventBus.publish('test1', { test1: true }); expect(callback).toHaveBeenCalledTimes(1); }); });
the_stack
import { isColor } from '../../../src/utils/isColor' describe('unit: isColor()', function () { it('should return true for valid color names', function () { // generated from: https://www.w3schools.com/colors/colors_names.asp using this script in console: // document.body.innerHTML = '<pre>' + Array.from(document.getElementsByClassName('colornamespan')).map((item) => "expect(isColor('" + item.innerText.toLowerCase() + "')).toBe(true)").join('\n') + '</pre>' // lowercase test expect(isColor('aliceblue')).toBe(true) expect(isColor('antiquewhite')).toBe(true) expect(isColor('aqua')).toBe(true) expect(isColor('aquamarine')).toBe(true) expect(isColor('azure')).toBe(true) expect(isColor('beige')).toBe(true) expect(isColor('bisque')).toBe(true) expect(isColor('black')).toBe(true) expect(isColor('blanchedalmond')).toBe(true) expect(isColor('blue')).toBe(true) expect(isColor('blueviolet')).toBe(true) expect(isColor('brown')).toBe(true) expect(isColor('burlywood')).toBe(true) expect(isColor('cadetblue')).toBe(true) expect(isColor('chartreuse')).toBe(true) expect(isColor('chocolate')).toBe(true) expect(isColor('coral')).toBe(true) expect(isColor('cornflowerblue')).toBe(true) expect(isColor('cornsilk')).toBe(true) expect(isColor('crimson')).toBe(true) expect(isColor('cyan')).toBe(true) expect(isColor('darkblue')).toBe(true) expect(isColor('darkcyan')).toBe(true) expect(isColor('darkgoldenrod')).toBe(true) expect(isColor('darkgray')).toBe(true) expect(isColor('darkgrey')).toBe(true) expect(isColor('darkgreen')).toBe(true) expect(isColor('darkkhaki')).toBe(true) expect(isColor('darkmagenta')).toBe(true) expect(isColor('darkolivegreen')).toBe(true) expect(isColor('darkorange')).toBe(true) expect(isColor('darkorchid')).toBe(true) expect(isColor('darkred')).toBe(true) expect(isColor('darksalmon')).toBe(true) expect(isColor('darkseagreen')).toBe(true) expect(isColor('darkslateblue')).toBe(true) expect(isColor('darkslategray')).toBe(true) expect(isColor('darkslategrey')).toBe(true) expect(isColor('darkturquoise')).toBe(true) expect(isColor('darkviolet')).toBe(true) expect(isColor('deeppink')).toBe(true) expect(isColor('deepskyblue')).toBe(true) expect(isColor('dimgray')).toBe(true) expect(isColor('dimgrey')).toBe(true) expect(isColor('dodgerblue')).toBe(true) expect(isColor('firebrick')).toBe(true) expect(isColor('floralwhite')).toBe(true) expect(isColor('forestgreen')).toBe(true) expect(isColor('fuchsia')).toBe(true) expect(isColor('gainsboro')).toBe(true) expect(isColor('ghostwhite')).toBe(true) expect(isColor('gold')).toBe(true) expect(isColor('goldenrod')).toBe(true) expect(isColor('gray')).toBe(true) expect(isColor('grey')).toBe(true) expect(isColor('green')).toBe(true) expect(isColor('greenyellow')).toBe(true) expect(isColor('honeydew')).toBe(true) expect(isColor('hotpink')).toBe(true) expect(isColor('indianred')).toBe(true) expect(isColor('indigo')).toBe(true) expect(isColor('ivory')).toBe(true) expect(isColor('khaki')).toBe(true) expect(isColor('lavender')).toBe(true) expect(isColor('lavenderblush')).toBe(true) expect(isColor('lawngreen')).toBe(true) expect(isColor('lemonchiffon')).toBe(true) expect(isColor('lightblue')).toBe(true) expect(isColor('lightcoral')).toBe(true) expect(isColor('lightcyan')).toBe(true) expect(isColor('lightgoldenrodyellow')).toBe(true) expect(isColor('lightgray')).toBe(true) expect(isColor('lightgrey')).toBe(true) expect(isColor('lightgreen')).toBe(true) expect(isColor('lightpink')).toBe(true) expect(isColor('lightsalmon')).toBe(true) expect(isColor('lightseagreen')).toBe(true) expect(isColor('lightskyblue')).toBe(true) expect(isColor('lightslategray')).toBe(true) expect(isColor('lightslategrey')).toBe(true) expect(isColor('lightsteelblue')).toBe(true) expect(isColor('lightyellow')).toBe(true) expect(isColor('lime')).toBe(true) expect(isColor('limegreen')).toBe(true) expect(isColor('linen')).toBe(true) expect(isColor('magenta')).toBe(true) expect(isColor('maroon')).toBe(true) expect(isColor('mediumaquamarine')).toBe(true) expect(isColor('mediumblue')).toBe(true) expect(isColor('mediumorchid')).toBe(true) expect(isColor('mediumpurple')).toBe(true) expect(isColor('mediumseagreen')).toBe(true) expect(isColor('mediumslateblue')).toBe(true) expect(isColor('mediumspringgreen')).toBe(true) expect(isColor('mediumturquoise')).toBe(true) expect(isColor('mediumvioletred')).toBe(true) expect(isColor('midnightblue')).toBe(true) expect(isColor('mintcream')).toBe(true) expect(isColor('mistyrose')).toBe(true) expect(isColor('moccasin')).toBe(true) expect(isColor('navajowhite')).toBe(true) expect(isColor('navy')).toBe(true) expect(isColor('oldlace')).toBe(true) expect(isColor('olive')).toBe(true) expect(isColor('olivedrab')).toBe(true) expect(isColor('orange')).toBe(true) expect(isColor('orangered')).toBe(true) expect(isColor('orchid')).toBe(true) expect(isColor('palegoldenrod')).toBe(true) expect(isColor('palegreen')).toBe(true) expect(isColor('paleturquoise')).toBe(true) expect(isColor('palevioletred')).toBe(true) expect(isColor('papayawhip')).toBe(true) expect(isColor('peachpuff')).toBe(true) expect(isColor('peru')).toBe(true) expect(isColor('pink')).toBe(true) expect(isColor('plum')).toBe(true) expect(isColor('powderblue')).toBe(true) expect(isColor('purple')).toBe(true) expect(isColor('rebeccapurple')).toBe(true) expect(isColor('red')).toBe(true) expect(isColor('rosybrown')).toBe(true) expect(isColor('royalblue')).toBe(true) expect(isColor('saddlebrown')).toBe(true) expect(isColor('salmon')).toBe(true) expect(isColor('sandybrown')).toBe(true) expect(isColor('seagreen')).toBe(true) expect(isColor('seashell')).toBe(true) expect(isColor('sienna')).toBe(true) expect(isColor('silver')).toBe(true) expect(isColor('skyblue')).toBe(true) expect(isColor('slateblue')).toBe(true) expect(isColor('slategray')).toBe(true) expect(isColor('slategrey')).toBe(true) expect(isColor('snow')).toBe(true) expect(isColor('springgreen')).toBe(true) expect(isColor('steelblue')).toBe(true) expect(isColor('tan')).toBe(true) expect(isColor('teal')).toBe(true) expect(isColor('thistle')).toBe(true) expect(isColor('tomato')).toBe(true) expect(isColor('turquoise')).toBe(true) expect(isColor('violet')).toBe(true) expect(isColor('wheat')).toBe(true) expect(isColor('white')).toBe(true) expect(isColor('whitesmoke')).toBe(true) expect(isColor('yellow')).toBe(true) expect(isColor('yellowgreen')).toBe(true) // uppercase test expect(isColor('AliceBlue')).toBe(true) expect(isColor('AntiqueWhite')).toBe(true) expect(isColor('Aqua')).toBe(true) expect(isColor('Aquamarine')).toBe(true) expect(isColor('Azure')).toBe(true) expect(isColor('Beige')).toBe(true) expect(isColor('Bisque')).toBe(true) expect(isColor('Black')).toBe(true) expect(isColor('BlanchedAlmond')).toBe(true) expect(isColor('Blue')).toBe(true) expect(isColor('BlueViolet')).toBe(true) expect(isColor('Brown')).toBe(true) expect(isColor('BurlyWood')).toBe(true) expect(isColor('CadetBlue')).toBe(true) expect(isColor('Chartreuse')).toBe(true) expect(isColor('Chocolate')).toBe(true) expect(isColor('Coral')).toBe(true) expect(isColor('CornflowerBlue')).toBe(true) expect(isColor('Cornsilk')).toBe(true) expect(isColor('Crimson')).toBe(true) expect(isColor('Cyan')).toBe(true) expect(isColor('DarkBlue')).toBe(true) expect(isColor('DarkCyan')).toBe(true) expect(isColor('DarkGoldenRod')).toBe(true) expect(isColor('DarkGray')).toBe(true) expect(isColor('DarkGrey')).toBe(true) expect(isColor('DarkGreen')).toBe(true) expect(isColor('DarkKhaki')).toBe(true) expect(isColor('DarkMagenta')).toBe(true) expect(isColor('DarkOliveGreen')).toBe(true) expect(isColor('DarkOrange')).toBe(true) expect(isColor('DarkOrchid')).toBe(true) expect(isColor('DarkRed')).toBe(true) expect(isColor('DarkSalmon')).toBe(true) expect(isColor('DarkSeaGreen')).toBe(true) expect(isColor('DarkSlateBlue')).toBe(true) expect(isColor('DarkSlateGray')).toBe(true) expect(isColor('DarkSlateGrey')).toBe(true) expect(isColor('DarkTurquoise')).toBe(true) expect(isColor('DarkViolet')).toBe(true) expect(isColor('DeepPink')).toBe(true) expect(isColor('DeepSkyBlue')).toBe(true) expect(isColor('DimGray')).toBe(true) expect(isColor('DimGrey')).toBe(true) expect(isColor('DodgerBlue')).toBe(true) expect(isColor('FireBrick')).toBe(true) expect(isColor('FloralWhite')).toBe(true) expect(isColor('ForestGreen')).toBe(true) expect(isColor('Fuchsia')).toBe(true) expect(isColor('Gainsboro')).toBe(true) expect(isColor('GhostWhite')).toBe(true) expect(isColor('Gold')).toBe(true) expect(isColor('GoldenRod')).toBe(true) expect(isColor('Gray')).toBe(true) expect(isColor('Grey')).toBe(true) expect(isColor('Green')).toBe(true) expect(isColor('GreenYellow')).toBe(true) expect(isColor('HoneyDew')).toBe(true) expect(isColor('HotPink')).toBe(true) expect(isColor('IndianRed')).toBe(true) expect(isColor('Indigo')).toBe(true) expect(isColor('Ivory')).toBe(true) expect(isColor('Khaki')).toBe(true) expect(isColor('Lavender')).toBe(true) expect(isColor('LavenderBlush')).toBe(true) expect(isColor('LawnGreen')).toBe(true) expect(isColor('LemonChiffon')).toBe(true) expect(isColor('LightBlue')).toBe(true) expect(isColor('LightCoral')).toBe(true) expect(isColor('LightCyan')).toBe(true) expect(isColor('LightGoldenRodYellow')).toBe(true) expect(isColor('LightGray')).toBe(true) expect(isColor('LightGrey')).toBe(true) expect(isColor('LightGreen')).toBe(true) expect(isColor('LightPink')).toBe(true) expect(isColor('LightSalmon')).toBe(true) expect(isColor('LightSeaGreen')).toBe(true) expect(isColor('LightSkyBlue')).toBe(true) expect(isColor('LightSlateGray')).toBe(true) expect(isColor('LightSlateGrey')).toBe(true) expect(isColor('LightSteelBlue')).toBe(true) expect(isColor('LightYellow')).toBe(true) expect(isColor('Lime')).toBe(true) expect(isColor('LimeGreen')).toBe(true) expect(isColor('Linen')).toBe(true) expect(isColor('Magenta')).toBe(true) expect(isColor('Maroon')).toBe(true) expect(isColor('MediumAquaMarine')).toBe(true) expect(isColor('MediumBlue')).toBe(true) expect(isColor('MediumOrchid')).toBe(true) expect(isColor('MediumPurple')).toBe(true) expect(isColor('MediumSeaGreen')).toBe(true) expect(isColor('MediumSlateBlue')).toBe(true) expect(isColor('MediumSpringGreen')).toBe(true) expect(isColor('MediumTurquoise')).toBe(true) expect(isColor('MediumVioletRed')).toBe(true) expect(isColor('MidnightBlue')).toBe(true) expect(isColor('MintCream')).toBe(true) expect(isColor('MistyRose')).toBe(true) expect(isColor('Moccasin')).toBe(true) expect(isColor('NavajoWhite')).toBe(true) expect(isColor('Navy')).toBe(true) expect(isColor('OldLace')).toBe(true) expect(isColor('Olive')).toBe(true) expect(isColor('OliveDrab')).toBe(true) expect(isColor('Orange')).toBe(true) expect(isColor('OrangeRed')).toBe(true) expect(isColor('Orchid')).toBe(true) expect(isColor('PaleGoldenRod')).toBe(true) expect(isColor('PaleGreen')).toBe(true) expect(isColor('PaleTurquoise')).toBe(true) expect(isColor('PaleVioletRed')).toBe(true) expect(isColor('PapayaWhip')).toBe(true) expect(isColor('PeachPuff')).toBe(true) expect(isColor('Peru')).toBe(true) expect(isColor('Pink')).toBe(true) expect(isColor('Plum')).toBe(true) expect(isColor('PowderBlue')).toBe(true) expect(isColor('Purple')).toBe(true) expect(isColor('RebeccaPurple')).toBe(true) expect(isColor('Red')).toBe(true) expect(isColor('RosyBrown')).toBe(true) expect(isColor('RoyalBlue')).toBe(true) expect(isColor('SaddleBrown')).toBe(true) expect(isColor('Salmon')).toBe(true) expect(isColor('SandyBrown')).toBe(true) expect(isColor('SeaGreen')).toBe(true) expect(isColor('SeaShell')).toBe(true) expect(isColor('Sienna')).toBe(true) expect(isColor('Silver')).toBe(true) expect(isColor('SkyBlue')).toBe(true) expect(isColor('SlateBlue')).toBe(true) expect(isColor('SlateGray')).toBe(true) expect(isColor('SlateGrey')).toBe(true) expect(isColor('Snow')).toBe(true) expect(isColor('SpringGreen')).toBe(true) expect(isColor('SteelBlue')).toBe(true) expect(isColor('Tan')).toBe(true) expect(isColor('Teal')).toBe(true) expect(isColor('Thistle')).toBe(true) expect(isColor('Tomato')).toBe(true) expect(isColor('Turquoise')).toBe(true) expect(isColor('Violet')).toBe(true) expect(isColor('Wheat')).toBe(true) expect(isColor('White')).toBe(true) expect(isColor('WhiteSmoke')).toBe(true) expect(isColor('Yellow')).toBe(true) expect(isColor('YellowGreen')).toBe(true) }) it('should return true for valid hex color values', function () { expect(isColor('#f09')).toBe(true) expect(isColor('#F09')).toBe(true) expect(isColor('#ff0099')).toBe(true) expect(isColor('#FF0099')).toBe(true) // with transparency expect(isColor('#3a30')).toBe(true) expect(isColor('#3A3F')).toBe(true) expect(isColor('#33aa3300')).toBe(true) expect(isColor('#33AA3380')).toBe(true) expect(isColor('#f09f')).toBe(true) expect(isColor('#F09F')).toBe(true) expect(isColor('#ff0099ff')).toBe(true) expect(isColor('#FF0099FF')).toBe(true) }) it('should return true for valid rgb() function colors', function () { expect(isColor('rgb(255,0,153)')).toBe(true) expect(isColor('rgb(255, 0, 153)')).toBe(true) // expect(isColor('rgb(255, 0, 153.0)')).toBe(true) <-- TODO: fix me expect(isColor('rgb(100%,0%,60%)')).toBe(true) expect(isColor('rgb(100%, 0%, 60%)')).toBe(true) // whitespace syntax // TODO: fix whitespace syntax // expect(isColor('rgb(51 170 51')).toBe(true) // expect(isColor('rgb(51 170 51')).toBe(true) // with float values // TODO: fix float values // expect(isColor('rgb(255, 0, 153.6)')).toBe(true) // expect(isColor('rgb(2.55e2, 0e0, 1.53e2)')).toBe(true) }) it('should return true for valid rgba() function colors', function () { expect(isColor('rgba(51, 170, 51, .1)')).toBe(true) expect(isColor('rgba(51, 170, 51, .4)')).toBe(true) expect(isColor('rgba(51, 170, 51, .7)')).toBe(true) expect(isColor('rgba(51, 170, 51, 1)')).toBe(true) // whitespace syntax // TODO: fix whitespace syntax // expect(isColor('rgba(51 170 51 / 0.4')).toBe(true) // expect(isColor('rgba(51 170 51 / 40%')).toBe(true) // with float values // TODO: fix float values // expect(isColor('rgba(255, 0, 153.6, 1)')).toBe(true) // expect(isColor('rgba(2.55e2, 0e0, 1.53e2, 1e2%)')).toBe(true) }) it('should return true for valid hsl() function colors', function () { expect(isColor('hsl(270,60%,70%)')).toBe(true) expect(isColor('hsl(270, 60%, 70%)')).toBe(true) // TODO: fix hsl syntax values // expect(isColor('hsl(270 60% 70%)')).toBe(true) // expect(isColor('hsl(270deg, 60%, 70%)')).toBe(true) // expect(isColor('hsl(4.71239rad, 60%, 70%)')).toBe(true) // expect(isColor('hsl(.75turn, 60%, 70%)')).toBe(true) }) it('should return true for valid hsla() function colors', function () { expect(isColor('hsla(270, 60%, 50%, .15)')).toBe(true) expect(isColor('hsla(240, 100%, 50%, .05)')).toBe(true) expect(isColor('hsla(240, 100%, 50%, .4)')).toBe(true) expect(isColor('hsla(240, 100%, 50%, .7)')).toBe(true) expect(isColor('hsla(240, 100%, 50%, 1)')).toBe(true) // TODO: fix hsla syntax values // expect(isColor('hsla(270, 60%, 50%, 15%)')).toBe(true) // expect(isColor('hsla(270 60% 50% / .15)')).toBe(true) // expect(isColor('hsla(270 60% 50% / 15%)')).toBe(true) }) it('should return false for invalid color values', function () { expect(isColor('invalid-color')).toBe(false) expect(isColor('reds')).toBe(false) expect(isColor('#YYYYY')).toBe(false) expect(isColor('rgb(test, 2, testing)')).toBe(false) expect(isColor('rgba(test, 2, testing, 0.1)')).toBe(false) expect(isColor('hsl(test, 2, testing)')).toBe(false) expect(isColor('hsla(test, 2, testing, 1)')).toBe(false) }) })
the_stack
import * as React from 'react'; import { PolygonLayer, PointCloudLayer } from '@deck.gl/layers'; import { HexagonLayer } from '@deck.gl/aggregation-layers'; import { Marker, Popup, LinearInterpolator } from 'react-map-gl'; import { Container, MovesLayer, DepotsLayer, LineMapLayer, HarmoVisLayers, MovedData, connectToHarmowareVis, LoadingIcon, BasedProps, EventInfo, FpsDisplay, Viewport } from 'harmoware-vis'; import Controller from '../components/controller'; import SvgIcon from '../icondata/SvgIcon'; // MovesLayer で iconCubeType=1(ScenegraphLayer) を使用する場合に登録要 const scenegraph = '../sampledata/car.glb'; const defaultInterval = 1000; const mapStyle:string[] = [ 'mapbox://styles/mapbox/dark-v8', //0 'mapbox://styles/mapbox/light-v8', //1 'mapbox://styles/mapbox/streets-v8', //2 'mapbox://styles/mapbox/satellite-v8', //3 'mapbox://styles/mapbox/outdoors-v10', //4 ]; const MAPBOX_TOKEN: string = process.env.MAPBOX_ACCESS_TOKEN; interface State { mapboxVisible: boolean, mapStyleNo: number, moveDataVisible: boolean, moveOptionVisible: boolean, moveOptionArcVisible: boolean, moveOptionLineVisible: boolean, moveSvgVisible: boolean, depotOptionVisible: boolean, heatmapVisible: boolean, optionChange: boolean, iconChange: boolean, iconCubeType: number, popup: any[], popupInfo: MovedData, viewportArray: Viewport[], followingiconId: number, follwTimerId: NodeJS.Timeout, terrain: boolean } class App extends Container<BasedProps, State> { constructor(props: BasedProps) { super(props); this.state = { mapboxVisible: true, mapStyleNo: 0, moveDataVisible: true, moveOptionVisible: false, moveOptionArcVisible: false, moveOptionLineVisible: false, moveSvgVisible: false, depotOptionVisible: false, heatmapVisible: false, optionChange: false, iconChange: true, iconCubeType: 0, popup: [0, 0, ''], popupInfo: null, viewportArray: [], followingiconId: -1, follwTimerId: null, terrain: false }; this.viewportPlayback = this.viewportPlayback.bind(this); this.iconFollwNext = this.iconFollwNext.bind(this); } viewportPlayback(){ const {viewportArray} = this.state; if(viewportArray && viewportArray.length > 0){ const viewport = viewportArray.shift(); if(viewport.transitionDuration && !viewport.transitionInterpolator){ viewport.transitionInterpolator = new LinearInterpolator(); } this.props.actions.setViewport(viewport); this.setState({viewportArray:[...viewportArray]}); const {transitionDuration = defaultInterval} = viewport; const timeoutValue = (transitionDuration === 'auto' ? defaultInterval : transitionDuration); setTimeout(this.viewportPlayback,timeoutValue); } } iconFollwNext(movesbaseidx:number){ const {animateReverse,animatePause,loopEndPause,movesbase,movedData,settime,secperhour,actions} = this.props; const data = movedData.find(x=>x.movesbaseidx === movesbaseidx); if(data && data.position){ actions.setViewport({ longitude:data.position[0], latitude:data.position[1],bearing:data.direction }); } const base = movesbase[movesbaseidx]; if(base && base.operation && base.departuretime <= settime && settime < base.arrivaltime){ if (!animatePause && !loopEndPause) { let next = undefined; let direction = 0; const nextIdx = base.operation.findIndex(x=>x.elapsedtime > settime); if (!animateReverse) { next = base.operation[nextIdx]; if(nextIdx === base.operation.length - 1){ direction = base.operation[nextIdx-1].direction; }else{ direction = base.operation[nextIdx].direction; } }else{ next = base.operation[nextIdx-1]; direction = base.operation[nextIdx-1].direction; } if(next && next.position){ const timeoutValue = (Math.abs(next.elapsedtime - settime)/3.6) * secperhour; actions.setViewport({ longitude:next.position[0], latitude:next.position[1], bearing:direction, transitionDuration:timeoutValue, transitionInterpolator:new LinearInterpolator() }); if(this.state.followingiconId === movesbaseidx){ const follwTimerId = setTimeout(this.iconFollwNext,timeoutValue,movesbaseidx); this.setState({ follwTimerId }); return; } } } } this.setState({ followingiconId: -1 }); } getMapboxChecked(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ mapboxVisible: e.target.checked }); } getMapStyleSelected(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ mapStyleNo: +e.target.value }); } getTerrainChecked(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ terrain: e.target.checked }); } getMoveDataChecked(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ moveDataVisible: e.target.checked }); } getMoveOptionChecked(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ moveOptionVisible: e.target.checked }); } getMoveOptionArcChecked(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ moveOptionArcVisible: e.target.checked }); } getMoveOptionLineChecked(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ moveOptionLineVisible: e.target.checked }); } getMoveSvgChecked(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ moveSvgVisible: e.target.checked }); } getDepotOptionChecked(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ depotOptionVisible: e.target.checked }); } getOptionChangeChecked(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ optionChange: e.target.checked }); } getIconChangeChecked(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ iconChange: e.target.checked }); } getIconCubeTypeSelected(e: React.ChangeEvent<HTMLSelectElement>) { this.setState({ iconCubeType: +e.target.value }); } getFollowingiconIdSelected(e: React.ChangeEvent<HTMLSelectElement>) { clearTimeout(this.state.follwTimerId); this.setState({ follwTimerId:null }); const movesbaseidx:number = +e.target.value; this.setState({ followingiconId: movesbaseidx }); if(movesbaseidx < 0) return; const data = this.props.movedData.find(x=>x.movesbaseidx === movesbaseidx); if(data && data.position){ setTimeout(this.iconFollwNext,0,movesbaseidx); return; } this.setState({ followingiconId: -1 }); } getHeatmapVisible(e: React.ChangeEvent<HTMLInputElement>) { this.setState({ heatmapVisible: e.target.checked }); } getViewport(viewport: Viewport|Viewport[]){ if(Array.isArray(viewport)){ if(viewport.length > 0){ this.setState({viewportArray:viewport}); this.viewportPlayback(); } }else{ this.props.actions.setViewport(viewport); } } getMarker(data: MovedData, index: number) { const { viewport } = this.props; const { direction, position } = data; if(position){ return (<Marker key={`marker-${index}`} longitude={data.position[0]} latitude={data.position[1]} > <SvgIcon viewport={viewport} direction={direction} onMouseOver={() => this.setState({popupInfo: data})} onMouseOut={() => this.setState({popupInfo: null})} /> </Marker>); } return null; } getPopup() { const {popupInfo} = this.state; if(popupInfo && popupInfo.position){ const objctlist = Object.entries(popupInfo); return ( <Popup tipSize={5} anchor="bottom" longitude={popupInfo.position[0]} latitude={popupInfo.position[1]} closeButton={false} > <div>{objctlist.map((item)=><p>{`${item[0]}: ${item[1].toString()}`}</p>)}</div> </Popup> ); } return null; } getMapGlComponents(movedData: MovedData[]){ const { moveSvgVisible } = this.state; if(moveSvgVisible){ return ( <div> {movedData.map( this.getMarker.bind(this) )} {this.getPopup()} </div> ); } return null; } getPointCloudLayer(PointCloudData: any[]){ return PointCloudData.map((pointCloudElements:{pointCloud:any[]}, idx:number)=>{ const {pointCloud} = pointCloudElements; const data = pointCloud.filter(x=>x.position); return new PointCloudLayer({ id: 'PointCloudLayer-' + String(idx), data, getColor: (x: any) => x.color || [255,255,255,255], sizeUnits: 'meters', pointSize: 1, }); }); } onHover(el: EventInfo){ if (el && el.object) { let disptext = ''; const objctlist = Object.entries(el.object); for (let i = 0, lengthi = objctlist.length; i < lengthi; i=(i+1)|0) { const strvalue = objctlist[i][1].toString(); disptext = disptext + (i > 0 ? '\n' : ''); disptext = disptext + (`${objctlist[i][0]}: ${strvalue}`); } this.setState({ popup: [el.x, el.y, disptext] }); } else { this.setState({ popup: [0, 0, ''] }); } } render() { const state = this.state; const props = this.props; const { actions, routePaths, viewport, loading, clickedObject, movedData, movesbase, depotsData, linemapData } = props; const polygonData = movedData.filter((x:any)=>(x.coordinates || x.polygon)); const hexagonData = state.heatmapVisible ? movedData.filter(x=>x.position):[]; const PointCloudData = movedData.filter((x:any)=>x.pointCloud); const sizeScale = (Math.max(17 - viewport.zoom,2)**2)*2; const onHover = this.onHover.bind(this); return ( <div> <Controller {...props} mapStyleNo={state.mapStyleNo} iconCubeType={state.iconCubeType} followingiconId={state.followingiconId} getMapboxChecked={this.getMapboxChecked.bind(this)} getMapStyleSelected={this.getMapStyleSelected.bind(this)} getTerrainChecked={this.getTerrainChecked.bind(this)} getMoveDataChecked={this.getMoveDataChecked.bind(this)} getMoveOptionChecked={this.getMoveOptionChecked.bind(this)} getMoveOptionArcChecked={this.getMoveOptionArcChecked.bind(this)} getMoveOptionLineChecked={this.getMoveOptionLineChecked.bind(this)} getMoveSvgChecked={this.getMoveSvgChecked.bind(this)} getDepotOptionChecked={this.getDepotOptionChecked.bind(this)} getHeatmapVisible={this.getHeatmapVisible.bind(this)} getOptionChangeChecked={this.getOptionChangeChecked.bind(this)} getIconChangeChecked={this.getIconChangeChecked.bind(this)} getIconCubeTypeSelected={this.getIconCubeTypeSelected.bind(this)} getFollowingiconIdSelected={this.getFollowingiconIdSelected.bind(this)} getViewport={this.getViewport.bind(this)} /> <div className="harmovis_footer"> longitude:{viewport.longitude}&nbsp; latitude:{viewport.latitude}&nbsp; altitude:{viewport.altitude}&nbsp; zoom:{viewport.zoom}&nbsp; bearing:{viewport.bearing}&nbsp; pitch:{viewport.pitch} </div> <div className="harmovis_area"> <HarmoVisLayers viewport={viewport} actions={actions} mapboxApiAccessToken={state.mapboxVisible ? MAPBOX_TOKEN : ''} mapStyle={state.mapboxVisible ? mapStyle[state.mapStyleNo] : ''} mapboxAddSourceValue={undefined} visible={state.mapboxVisible} terrain={state.terrain} layers={[].concat( depotsData.length > 0 ? new DepotsLayer({ depotsData, /* iconDesignations Setting Example iconDesignations: [{type:'stop', layer:'Scatterplot'},{type:'station', layer:'SimpleMesh'}], /**/ optionVisible: state.depotOptionVisible, optionChange: state.optionChange, iconChange: state.iconChange, onHover }):null, state.moveDataVisible && movedData.length > 0 ? new MovesLayer({ // scenegraph, routePaths, movedData, movesbase, /* iconDesignations Setting Example iconDesignations:[{type:'car', layer:'SimpleMesh', getColor:()=>[255,0,0,255]}, {type:'bus', layer:'Scenegraph', getScale:()=>[0.2,0.2,0.2], getOrientation:()=>[0,0,90]}, {type:'walker', layer:'Scatterplot'},], /**/ clickedObject, actions, visible: state.moveDataVisible, optionVisible: state.moveOptionVisible, optionArcVisible: state.moveOptionArcVisible, optionLineVisible: state.moveOptionLineVisible, optionChange: state.optionChange, iconChange: state.iconChange, // Invalid if there is iconDesignations definition iconCubeType: state.iconCubeType, // Invalid if there is iconDesignations definition sizeScale: state.iconCubeType === 0 ? sizeScale : (sizeScale/10), onHover }):null, linemapData.length > 0 ? new LineMapLayer({ id: 'line-map', data: linemapData, onHover }):null, polygonData.length > 0 ? new PolygonLayer({ id: 'PolygonLayer', data: polygonData, visible: true, opacity: 0.5, pickable: true, extruded: true, wireframe: true, getPolygon: (x: any) => x.coordinates || x.polygon, getFillColor: (x: any) => x.color || [255,255,255,255], getLineColor: null, getElevation: (x: any) => x.elevation || 3, onHover: onHover }):null, PointCloudData.length > 0 ? this.getPointCloudLayer(PointCloudData):null, state.heatmapVisible && hexagonData.length > 0 ? new HexagonLayer({ id: '3d-heatmap', data: hexagonData, getPosition: (x: any) => x.position, radius: 100, opacity: 0.5, extruded: true, visible: state.heatmapVisible }):null )} mapGlComponents={ this.getMapGlComponents(movedData) } /> </div> <svg width={viewport.width} height={viewport.height} className="harmovis_overlay"> <g fill="white" fontSize="12"> {state.popup[2].length > 0 ? state.popup[2].split('\n').map((value:any, index:number) => <text x={state.popup[0] + 10} y={state.popup[1] + (index * 12)} key={index.toString()} >{value}</text>) : null } </g> </svg> <LoadingIcon loading={loading} /> <FpsDisplay /> </div> ); } } export default connectToHarmowareVis(App);
the_stack
import { Translation } from '../i18'; // french // // Translated by: Neiobaf (https://github.com/neiobaf) // Mathieu Lesniak (https://github.com/mathieulesniak) export const translation: Translation = { __plugins: { reload: { failed: 'Impossible de charger les plugins: {0}', loaded: { more: '{0:trim} plugins chargés.', none: 'Aucun plugin chargé.', one: '1 plugin chargé.', } } }, canceled: '[Annulé]', commands: { executionFailed: "L'exécution de la commande {0:trim,surround} a échoué: {1}", }, compare: { noPlugins: 'Aucun plugin trouvé!', noPluginsForType: 'Aucun plugin correspondant pour {0:trim,surround}!', failed : 'Impossible de récupérer le fichier {0:trim,surround}: {1}', selectSource: 'Choisissez la source de récupération...', }, deploy: { after: { button: { text: "{2}Déploiement: [{1}] {0}{3}", tooltip: "Cliquez ici pour voir la sortie...", }, failed: "Les opérations APRES le déploiement n'ont pas pu être exécutées: {0}", }, before: { failed: "Les opérations AVANT le déploiement n'ont pas pu être exécutées: {0}", }, button: { cancelling: 'Annulation...', prepareText: 'Préparation au déploiement...', text: 'Déploiement...', tooltip: 'Cliquez ici pour annuler le déploiement...', }, canceled: 'Annulé.', canceledWithErrors: 'Annulé avec erreurs!', cancelling: 'Annulation du déploiement...', file: { deploying: 'Déploiement du fichier {0:trim,surround}{1:trim,leading_space}... ', deployingWithDestination: 'Déploiement du fichier {0:trim,surround} vers {1:trim,surround}{2:trim,leading_space}... ', failed: 'Impossible de déployer le fichier {0:trim,surround}: {1}', isIgnored:'Le fichier {0:trim,surround} a été ignoré!', succeeded: 'Fichier {0:trim,surround} déployé avec succès.', succeededWithTarget: 'Fichier {0:trim,surround} déployé avec succès vers {1:trim,surround}.', }, fileOrFolder: { failed: 'Impossible de déployer le fichie / dossier {0:trim,surround}: {1}', }, finished: 'Terminé.', finished2: 'Terminé', finishedWithErrors: 'Terminé avec erreurs!', folder: { failed: 'Impossible de déployer le dossier {0:trim,surround}: {1}', selectTarget: 'Sélectionnez la cible de déploiement du dossier...', }, newerFiles: { deploy: 'Déployer', localFile: 'Fichiers locaux', message: "{0} nouveaux fichiers(s) trouvés!", modifyTime: 'Dernière modification', pull: 'Récupérer', remoteFile: 'Fichier distant', show: 'Afficher fichiers', size: 'Taille', title: 'Nouveaux fichiers dans {0:trim,surround}', titleNoTarget: 'Nouveaux fichiers', }, noFiles: 'Aucun fichier à déployer!', noPlugins: 'Aucun plugin trouvé!', noPluginsForType: 'Aucun plugin correspondant pour {0:trim,surround}!', onSave: { couldNotFindTarget: 'La cible de déploiement {0:trim,surround} définie pour le paquetage {1:trim,surround,leading_space} n\'existe pas!', failed: 'Impossible de déployer {0:trim,surround} à la sauvegarde ({1:trim}): {2}', failedTarget: 'Impossible de déployer {0:trim,surround} vers {1:trim} à la sauvegarde: {2}', }, operations: { failed: "[ERREUR: {0:trim,surround}]", finished: "[Terminé]", noFileCompiled: "Aucun fichier parmi {0:trim} n'a pu être compilé!", noFunctionInScript: "La fonction {0:trim,surround} n'a pas été trouvée dans {1:trim,surround}!", open: 'Ouverture {0:trim,surround}... ', someFilesNotCompiled: "{0:trim} parmi {1:trim} le(s) fichier(s) n'a pu être compilé!", unknownCompiler: 'Compilateur {0:trim,surround} inconnu!', unknownSqlEngine: 'SGBD (Moteur SQL) inconnu {0:trim,surround}!', unknownType: 'TYPE INCONNU: {0:trim,surround}', }, startQuestion: 'Lancer le déploiement?', workspace: { allFailed: 'Aucun fichier ne peut être déployé: {0}', allFailedWithTarget: 'Aucun fichier ne peut être déployé vers {0:trim,surround}: {1}', allSucceeded: 'Tous les {0:trim} fichiers déployés avec succès.', allSucceededWithTarget: 'Tous les {0:trim} fichiers déployés avec succès vers {1:trim,surround}.', alreadyStarted: 'Un déploiement est déjà en cours vers {0:trim,surround}! Voulez-vous vraiment lancer cette opération?', clickToCancel: 'cliquez ici pour annuler', deploying: 'Déploiement du paquetage {0:trim,surround,leading_space}...', deployingWithTarget: 'Déploiement du paquetage {0:trim,surround,leading_space} vers {1:trim,surround}...', failed: 'Impossible de déployer les fichiers: {0}', failedWithCategory: 'Impossible de déployer les fichiers ({0:trim}): {1}', failedWithTarget: 'Impossible de déployer les fichiers vers {0:trim,surround}: {1}', nothingDeployed: 'Aucun fichier déployé!', nothingDeployedWithTarget: 'Aucun fichier déployé vers {0:trim,surround}!', selectPackage: 'Sélectionnez un paquetage...', selectTarget : 'Sélectionnez une destination...', someFailed: '{0:trim} de {1:trim} fichiers n\'a pas pu être déployé!', someFailedWithTarget: '{0:trim} de {1:trim} fichiers n\'a pas pu être déployé vers {2:trim,surround}!', status: 'Déploiement {0:trim,surround}... ', statusWithDestination: 'Déploiement {0:trim,surround} vers {1:trim,surround}... ', virtualTargetName: 'Cible du lot virtuel pour le paquetage courant', virtualTargetNameWithPackage: 'Cible du lot virtuel pour le paquetage {0:trim,surround}', } }, errors: { countable: 'ERREUR #{0:trim}: {1}', withCategory: '[ERREUR] {0:trim}: {1}', }, extension: { update: "Mettre à jour...", updateRequired: "L'extension nécessite d'être mise à jour!", }, extensions: { notInstalled: 'L\'extension {0:trim,surround} n\'est PAS installée.', }, failed: '[ÉCHEC: {0}]', format: { dateTime: 'YYYY-MM-DD HH:mm:ss', }, host: { button: { text: 'En attente de fichiers...', tooltip: 'Cliquez ici pour fermer l\'hôte de déploiement' , }, errors: { cannotListen: 'Impossible de démarrer l\'attente de fichier...: {0}', couldNotStop: 'Impossible d\'arrêter l\'hôte de déploiement: {0}', fileRejected: 'Le fichier a été rejeté!', noData: 'Pas de données!', noFilename: 'Pas de nom de fichier {0:trim}!', }, receiveFile: { failed: '[ÉCHEC:{0:trim,leading_space}]', ok: '[OK{0:trim}]', receiving: "Réception du fichier {2:trim,leading_space} depuis '{0:trim}:{1:trim}'... ", }, started: 'Hôte de déploiement démarré sur le port {0:trim} dans le dossier {1:trim,surround}.', stopped: 'L\'hôte de déploiement arrêté.', }, install: 'Installer', isNo: { directory: "{0:trim,surround} n'est pas un répertoire!", file: "{0:trim,surround} n'est pas un fichier!", validItem: '{0:trim,surround} n\'est pas un élément valide pouvant être déployé!', }, load: { from: { failed: "Échec du chargement des données depuis {0:trim,surround} : {1}", } }, network: { hostname: 'Votre nom d\'hôte: {0:trim,surround}', interfaces: { failed: 'Impossible d\'accéder aux interfaces réseau: {0}', list: 'Vos interfaces réseau:', } }, ok: '[OK]', packages: { couldNotFindTarget: 'Impossible de trouver la cible {0:trim,surround} dans le paquetage {1:trim,surround}!', defaultName: '(Paquetage #{0:trim})', noneDefined: "Merci de définir au moins un paquetage dans votre 'settings.json'!", notFound: 'Paquetage {0:trim,surround} introuvable!', nothingToDeploy: 'Aucun paquetage à déployer!', }, plugins: { api: { clientErrors: { noPermissions: "Permissions en écriture non définies!", notFound: 'Fichier non trouvé!', unauthorized: "Utilisateur non autorisé!", unknown: "Erreur client inconnue: {0:trim} {2:trim,surround}", }, description: "Déploiement via une API REST, comme 'vs-rest-api'", serverErrors: { unknown: "Erreur, serveur inconnu: {0:trim} {2:trim,surround}", }, }, app: { description: 'Déploie vers une application, comme un script ou un exécutable, sur la machine locale', }, azureblob: { description: 'Déploie vers un blob de stockage Microsoft Azure', }, batch: { description: 'Déploie vers d\'autres cibles', }, dropbox: { description: 'Déploie dans dossier Dropbox.', notFound: 'Fichier non trouvé!', unknownResponse: 'Réponse inattendue {0:trim} ({1:trim}): {2:trim,surround}', }, each: { description: 'Déploie les fichiers en utilisant une liste de valeurs', }, ftp: { description: 'Déploie vers un serveur FTP', }, http: { description: 'Déploie vers un serveur/service HTTP', protocolNotSupported: 'Le protocole {0:trim,surround} n\'est pas supporté!', }, local: { description: 'Déploie dans un dossier local ou un dossier partagé (comme SMB) sur le réseau', emptyTargetDirectory: 'Dossier local vide {0:trim,surround}... ', }, list: { description: 'Permet à l\'utilisateur de sélectionner une entrée avec des paramètres pour une ou plusieurs cibles', selectEntry: 'Sélectionnez une entrée...', }, mail: { addressSelector: { placeholder: 'Adresses email de destination', prompt: 'Une ou plusieurs adresse(s) email (séparées par une virgule) pour déployer vers...', }, description: 'Déploie les fichiers dans ZIP et l\'ajoute en tant que pièce jointe dans un email envoyé par SMTP', }, map: { description: 'Déploie les fichiers en utilisant une liste de valeurs', }, pipeline: { description: 'Place une liste de fichiers sources dans un \'pipe\' vers une nouvelle destination, en utilisant un script et envoie la liste des nouveaux fichiers vers la cible', noPipeFunction: "{0:trim,surround} n'implémente pas de fonction 'pipe()'!", }, prompt: { description: "Demande à l'utilisateur une liste de paramètres à appliquer à une ou plusieurs cibles", invalidInput: "Entrée invalide!", }, remote: { description: 'Déploie vers une machine distante via une connexion TCP', }, s3bucket: { credentialTypeNotSupported: 'Type d\'identification {0:trim,surround} non supporté!', description: 'Déploie dans un bucket Amazon S3', }, script: { deployFileFailed: 'Impossible de déployer le fichier {0:trim,surround} par script {1:trim,surround}!', deployWorkspaceFailed: 'Impossible de déployer l\'espace de travail par script {0:trim,surround}!', description: 'Déploie via un script JS', noDeployFileFunction: "{0:trim,surround} n'implémente pas de fonction 'deployFile()'!", }, sftp: { description: 'Déploie vers un serveur SFTP', }, sql: { description: 'Exécute des scripts SQL', invalidFile: 'Fichier invalide!', unknownEngine: 'Type de SGBD inconnu {0:trim,surround}!', }, test: { description: 'Un déployeur de test qui ne fait qu\'afficher les fichiers qui seront deployés', }, zip: { description: 'Déploie dans un fichier ZIP', fileAlreadyExists: 'Le fichier {0:trim,surround} existe déjà! Veuillez rééssayer...', fileNotFound: 'Fichier introuvable!', noFileFound: "Aucun fichier ZIP trouvé!", } }, popups: { newVersion: { message: "Vous utilisez la version de 'vs-deploy' ({0:trim})!", showChangeLog: 'Afficher le journal de modifications...', }, }, prompts: { inputAccessKey: 'Entrez la clé d\'accès (Access Key)...', inputAccessToken: 'Entrez le jeton d\'accès (Access Token)...', inputPassword: 'Entrez le mot de passe...', }, pull: { button: { cancelling: 'Annulation...', prepareText: 'Préparation de la récupération...', text: 'Récupération...', tooltip: 'Cliquez ici pour annuler la récupération...', }, canceled: 'Annulé.', canceledWithErrors: 'Annulé avec erreurs!', file: { failed: 'Impossible de récupérer le fichier {0:trim,surround}: {1}', pulling: 'Récupération du fichier {0:trim,surround}{1:trim,leading_space}... ', pullingWithDestination: 'Récupération du fichier {0:trim,surround} depuis {1:trim,surround}{2:trim,leading_space}... ', succeeded: 'Fichier {0:trim,surround} récupéré avec succès.', succeededWithTarget: 'Fichier {0:trim,surround} récupéré avec succès depuis {1:trim,surround}.', }, fileOrFolder: { failed: "Impossible de récupérer le fichier / dossier {0:trim,surround}: {1}", }, finished2: 'Terminé', finishedWithErrors: 'Terminé avec erreurs!', noPlugins: 'Aucun plugin trouvé!', noPluginsForType: 'Aucun plugin correspondant pour {0:trim,surround}!', workspace: { allFailed: 'Aucun fichier récupéré: {0}', allFailedWithTarget: 'Aucun fichier récupéré depuis {0:trim,surround}: {1}', allSucceeded: 'Tous les {0:trim} fichier(s) récupéré(s) avec succès.', allSucceededWithTarget: 'Tous les {0:trim} fichier(s) récupéré(s) avec succès depuis {1:trim,surround}.', alreadyStarted: 'Vous avez déjà une opération en cours pour {0:trim,surround}! Voulez-vous vraiment lancer cette opération?', clickToCancel: 'cliquez ici pour annuler', failed: 'Impossible de récupérer les fichiers: {0}', failedWithCategory: 'Impossible de récupérer les fichiers ({0:trim}): {1}', failedWithTarget: 'Impossible de récupérer les fichiers depuis {0:trim,surround}: {1}', nothingPulled: 'Aucun fichier récupéré!', nothingPulledWithTarget: 'Acuun fichier récupéré depuis {0:trim,surround}!', pulling: 'Récupération du paquetage {0:trim,surround,leading_space}...', pullingWithTarget: 'Récupération du paquetage {0:trim,surround,leading_space} depuis {1:trim,surround}...', selectPackage: 'Sélectionnez un paquetage...', selectSource: 'Sélectionnez une source...', someFailed: '{0:trim} des {1:trim} fichiers(s) ne peuvent pas être récupérés!', someFailedWithTarget: '{0:trim} de {1:trim} fichiers(s) ne peuvent pas être récupérés depuis {2:trim,surround}!', status: 'Récupération de {0:trim,surround}... ', statusWithDestination: 'Récupération de {0:trim,surround} depuis {1:trim,surround}... ', virtualTargetName: 'Cible du lot virtuel pour le paquetage courant', virtualTargetNameWithPackage: 'Cible du lot virtuel pour le paquetage {0:trim,surround}', } }, quickDeploy: { caption: 'Déploiement rapide!', failed: 'Échec du déploiement rapide: {0}', start: 'Démarre un déploiement rapide...', }, relativePaths: { couldNotResolve: "Impossible d'obtenir le chemin relatif pour le fichier {0:trim,surround}!", isEmpty: 'Le chemin relatif du fichier {0:trim,surround} est vide!', }, sync: { file: { doesNotExistOnRemote: '[Objet distant inexistant]', localChangedWithinSession: '[Modification locale durant la session]', localIsNewer: '[Fichier local plus récent]', synchronize: 'Synchronisation du fichier {0:trim,surround}{1:trim,leading_space}... ', } }, targets: { cannotUseRecurrence: 'Impossible d\'utiliser la cible {0:trim,surround} (recurrence)!', defaultName: '(Cible #{0:trim})', noneDefined: "Merci de définir au moins une CIBLE dans votre 'settings.json'!", notFound: 'Impossible de trouver la cible {0:trim,surround}!', select: 'Sélectionnez la cible de déploiement...', selectSource: 'Select the source from where to pull from...', }, templates: { browserTitle: "Template{0:trim,surround,leading_space}", currentPath: 'Chemin actuel:{0:trim,leading_space}', noneDefined: "Merci de définir au moins une SOURCE de TEMPLATE dans votre 'settings.json'!", officialRepositories: { newAvailable: "Les SOURCES de TEMPLATE ont été mises à jour.", openTemplates: "Ouvrir les templates...", }, placeholder: 'Merci de sélectionner un élément...', publishOrRequest: { label: 'Publier ou demander un exemple...', } }, warnings: { withCategory: '[ATTENTION] {0:trim}: {1}', }, yes: 'Oui', };
the_stack
import stableStringify from 'fast-json-stable-stringify'; import { cached, CacheOption } from '../libs/cached_fn'; import { PrpcClientExt } from '../libs/prpc_client_ext'; import { sha256 } from '../libs/utils'; import { BuilderID } from './buildbucket'; /* eslint-disable max-len */ /** * Manually coded type definition and classes for resultdb service. * TODO(weiweilin): To be replaced by code generated version once we have one. * source: https://chromium.googlesource.com/infra/luci/luci-go/+/4525018bc0953bfa8597bd056f814dcf5e765142/resultdb/proto/rpc/v1/resultdb.proto */ /* eslint-enable max-len */ export enum TestStatus { Unspecified = 'STATUS_UNSPECIFIED', Pass = 'PASS', Fail = 'FAIL', Crash = 'CRASH', Abort = 'ABORT', Skip = 'SKIP', } export enum InvocationState { Unspecified = 'STATE_UNSPECIFIED', Active = 'ACTIVE', Finalizing = 'FINALIZING', Finalized = 'FINALIZED', } export interface Invocation { readonly interrupted: boolean; readonly name: string; readonly state: InvocationState; readonly createTime: string; readonly finalizeTime: string; readonly deadline: string; readonly includedInvocations?: string[]; readonly tags?: Tag[]; } export interface TestResult { readonly name: string; readonly testId: string; readonly resultId: string; readonly variant?: Variant; readonly variantHash?: string; readonly expected?: boolean; readonly status: TestStatus; readonly summaryHtml: string; readonly startTime: string; readonly duration?: string; readonly tags?: Tag[]; readonly failureReason?: FailureReason; } export interface TestLocation { readonly repo: string; readonly fileName: string; readonly line?: number; } export interface TestExoneration { readonly name: string; readonly testId: string; readonly variant?: Variant; readonly variantHash?: string; readonly exonerationId: string; readonly explanationHtml?: string; } export interface Artifact { readonly name: string; readonly artifactId: string; readonly fetchUrl: string; readonly fetchUrlExpiration: string; readonly contentType: string; readonly sizeBytes: number; } export interface Variant { readonly def: { [key: string]: string }; } export interface Tag { readonly key: string; readonly value: string; } export interface FailureReason { readonly primaryErrorMessage: string; } export interface GetInvocationRequest { readonly name: string; } export interface QueryTestResultsRequest { readonly invocations: string[]; readonly readMask?: string; readonly predicate?: TestResultPredicate; readonly pageSize?: number; readonly pageToken?: string; } export interface QueryTestExonerationsRequest { readonly invocations: string[]; readonly predicate?: TestExonerationPredicate; readonly pageSize?: number; readonly pageToken?: string; } export interface ListArtifactsRequest { readonly parent: string; readonly pageSize?: number; readonly pageToken?: string; } export interface EdgeTypeSet { readonly includedInvocations: boolean; readonly testResults: boolean; } export interface QueryArtifactsRequest { readonly invocations: string[]; readonly followEdges?: EdgeTypeSet; readonly testResultPredicate?: TestResultPredicate; readonly maxStaleness?: string; readonly pageSize?: number; readonly pageToken?: string; } export interface GetArtifactRequest { readonly name: string; } export interface TestResultPredicate { readonly testIdRegexp?: string; readonly variant?: VariantPredicate; readonly expectancy?: Expectancy; } export interface TestExonerationPredicate { readonly testIdRegexp?: string; readonly variant?: VariantPredicate; } export type VariantPredicate = { readonly equals: Variant } | { readonly contains: Variant }; export const enum Expectancy { All = 'ALL', VariantsWithUnexpectedResults = 'VARIANTS_WITH_UNEXPECTED_RESULTS', } export interface QueryTestResultsResponse { readonly testResults?: TestResult[]; readonly nextPageToken?: string; } export interface QueryTestExonerationsResponse { readonly testExonerations?: TestExoneration[]; readonly nextPageToken?: string; } export interface ListArtifactsResponse { readonly artifacts?: Artifact[]; readonly nextPageToken?: string; } export interface QueryArtifactsResponse { readonly artifacts?: Artifact[]; readonly nextPageToken?: string; } export interface QueryTestVariantsRequest { readonly invocations: readonly string[]; readonly pageSize?: number; readonly pageToken?: string; } export interface QueryTestVariantsResponse { readonly testVariants: readonly TestVariant[]; readonly nextPageToken?: string; } export interface TestVariant { readonly testId: string; readonly variant?: Variant; readonly variantHash: string; readonly status: TestVariantStatus; readonly results?: readonly TestResultBundle[]; readonly exonerations?: readonly TestExoneration[]; readonly testMetadata?: TestMetadata; } export const enum TestVariantStatus { TEST_VARIANT_STATUS_UNSPECIFIED = 'TEST_VARIANT_STATUS_UNSPECIFIED', UNEXPECTED = 'UNEXPECTED', UNEXPECTEDLY_SKIPPED = 'UNEXPECTEDLY_SKIPPED', FLAKY = 'FLAKY', EXONERATED = 'EXONERATED', EXPECTED = 'EXPECTED', } // Note: once we have more than 9 statuses, we need to add '0' prefix so '10' // won't appear before '2' after sorting. export const TEST_VARIANT_STATUS_CMP_STRING = { [TestVariantStatus.TEST_VARIANT_STATUS_UNSPECIFIED]: '0', [TestVariantStatus.UNEXPECTED]: '1', [TestVariantStatus.UNEXPECTEDLY_SKIPPED]: '2', [TestVariantStatus.FLAKY]: '3', [TestVariantStatus.EXONERATED]: '4', [TestVariantStatus.EXPECTED]: '5', }; export interface TestMetadata { readonly name?: string; readonly location?: TestLocation; } export interface TestResultBundle { readonly result: TestResult; } export interface CommitPosition { readonly host: string; readonly project: string; readonly ref: string; readonly position: number; } export interface CommitPositionRange { readonly earliest: CommitPosition; readonly latest: CommitPosition; } export interface TimeRange { readonly earliest: string; readonly latest: string; } type GetTestResultHistoryRequestRange = { commitPositionRange: CommitPositionRange } | { timeRange: TimeRange }; interface GetTestResultHistoryRequestMain { readonly realm: string; readonly testIdRegexp: string; readonly variantPredicate?: VariantPredicate; readonly pageSize?: number; readonly pageToken?: string; } export type GetTestResultHistoryRequest = GetTestResultHistoryRequestMain & GetTestResultHistoryRequestRange; export interface GetTestResultHistoryResponseEntry { readonly commitPosition: CommitPosition; readonly invocationTimestamp: string; readonly result: TestResult; } export interface GetTestResultHistoryResponse { readonly entries: readonly GetTestResultHistoryResponseEntry[]; readonly nextPageToken: string; } export class ResultDb { private static SERVICE = 'luci.resultdb.v1.ResultDB'; private readonly cachedCallFn: (opt: CacheOption, method: string, message: object) => Promise<unknown>; constructor(client: PrpcClientExt) { this.cachedCallFn = cached((method: string, message: object) => client.call(ResultDb.SERVICE, method, message), { key: (method, message) => `${method}-${stableStringify(message)}`, }); } async getInvocation(req: GetInvocationRequest, cacheOpt: CacheOption = {}): Promise<Invocation> { return (await this.cachedCallFn(cacheOpt, 'GetInvocation', req)) as Invocation; } async queryTestResults(req: QueryTestResultsRequest, cacheOpt: CacheOption = {}) { return (await this.cachedCallFn(cacheOpt, 'QueryTestResults', req)) as QueryTestResultsResponse; } async queryTestExonerations(req: QueryTestExonerationsRequest, cacheOpt: CacheOption = {}) { return (await this.cachedCallFn(cacheOpt, 'QueryTestExonerations', req)) as QueryTestExonerationsResponse; } async listArtifacts(req: ListArtifactsRequest, cacheOpt: CacheOption = {}) { return (await this.cachedCallFn(cacheOpt, 'ListArtifacts', req)) as ListArtifactsResponse; } async queryArtifacts(req: QueryArtifactsRequest, cacheOpt: CacheOption = {}) { return (await this.cachedCallFn(cacheOpt, 'QueryArtifacts', req)) as QueryArtifactsResponse; } async getArtifact(req: GetArtifactRequest, cacheOpt: CacheOption = {}) { return (await this.cachedCallFn(cacheOpt, 'GetArtifact', req)) as Artifact; } async queryTestVariants(req: QueryTestVariantsRequest, cacheOpt: CacheOption = {}) { return (await this.cachedCallFn(cacheOpt, 'QueryTestVariants', req)) as QueryTestVariantsResponse; } async getTestResultHistory(req: GetTestResultHistoryRequest, cacheOpt: CacheOption = {}) { return (await this.cachedCallFn(cacheOpt, 'GetTestResultHistory', req)) as GetTestResultHistoryResponse; } } /** * Parses the artifact name and get the individual components. */ export function parseArtifactName(artifactName: string): ArtifactIdentifier { const match = artifactName.match(/invocations\/(.*?)\/(?:tests\/(.*?)\/results\/(.*?)\/)?artifacts\/(.*)/)!; const [, invocationId, testId, resultId, artifactId] = match as string[]; return { invocationId, testId: testId ? decodeURIComponent(testId) : undefined, resultId: resultId ? resultId : undefined, artifactId, }; } export type ArtifactIdentifier = InvocationArtifactIdentifier | TestResultArtifactIdentifier; export interface InvocationArtifactIdentifier { readonly invocationId: string; readonly testId?: string; readonly resultId?: string; readonly artifactId: string; } export interface TestResultArtifactIdentifier { readonly invocationId: string; readonly testId: string; readonly resultId: string; readonly artifactId: string; } /** * Constructs the name of the artifact. */ export function constructArtifactName(identifier: ArtifactIdentifier) { if (identifier.testId && identifier.resultId) { return `invocations/${identifier.invocationId}/tests/${encodeURIComponent(identifier.testId)}/results/${ identifier.resultId }/artifacts/${identifier.artifactId}`; } else { return `invocations/${identifier.invocationId}/artifacts/${identifier.artifactId}`; } } /** * Computes invocation ID for the build from the given build ID. */ export function getInvIdFromBuildId(buildId: string): string { return `build-${buildId}`; } /** * Computes invocation ID for the build from the given builder ID and build number. */ export async function getInvIdFromBuildNum(builder: BuilderID, buildNum: number): Promise<string> { const builderId = `${builder.project}/${builder.bucket}/${builder.builder}`; return `build-${await sha256(builderId)}-${buildNum}`; } /** * Create a test variant property getter for the given property key. * * A property key must be one of the following: * 1. 'status': status of the test variant. * 2. 'name': test_metadata.name of the test variant. * 3. 'v.{variant_key}': variant.def[variant_key] of the test variant (e.g. * v.gpu). */ export function createTVPropGetter(propKey: string): (v: TestVariant) => { toString(): string } { if (propKey.match(/^v[.]/i)) { const variantKey = propKey.slice(2); return (v) => v.variant?.def[variantKey] || ''; } propKey = propKey.toLowerCase(); switch (propKey) { case 'name': return (v) => v.testMetadata?.name || v.testId; case 'status': return (v) => v.status; default: console.warn('invalid property key', propKey); return () => ''; } } /** * Create a test variant compare function for the given sorting key list. * * A sorting key must be one of the following: * 1. '{property_key}': sort by property_key in ascending order. * 2. '-{property_key}': sort by property_key in descending order. */ export function createTVCmpFn(sortingKeys: string[]): (v1: TestVariant, v2: TestVariant) => number { const sorters: Array<[number, (v: TestVariant) => { toString(): string }]> = sortingKeys.map((key) => { const [mul, propKey] = key.startsWith('-') ? [-1, key.slice(1)] : [1, key]; const propGetter = createTVPropGetter(propKey); // Status should be be sorted by their significance not by their string // representation. if (propKey.toLowerCase() === 'status') { return [mul, (v) => TEST_VARIANT_STATUS_CMP_STRING[propGetter(v) as TestVariantStatus]]; } return [mul, propGetter]; }); return (v1, v2) => { for (const [mul, propGetter] of sorters) { const cmp = propGetter(v1).toString().localeCompare(propGetter(v2).toString()) * mul; if (cmp !== 0) { return cmp; } } return 0; }; } /** * Computes the display label for a given property key. */ export function getPropKeyLabel(key: string) { // If the key has the format of '{type}.{value}', hide the '{type}.' prefix. // Don't use String.split here because value may contain '.'. return key.match(/^([^.]*\.)?(.*)$/)![2]; }
the_stack
import { HttpClientTestingModule } from '@angular/common/http/testing'; import { Component, ViewChild } from '@angular/core'; import { ComponentFixture, fakeAsync, TestBed, tick, waitForAsync, } from '@angular/core/testing'; import { DtDateAdapter } from '@dynatrace/barista-components/core'; import { DtIconModule } from '@dynatrace/barista-components/icon'; import { DtThemingModule } from '@dynatrace/barista-components/theming'; import { createComponent } from '@dynatrace/testing/browser'; import { DtCalendar, DtDatepickerModule } from '..'; describe('DtCalendar', () => { beforeEach( waitForAsync(() => { TestBed.configureTestingModule({ imports: [ DtDatepickerModule, DtThemingModule, HttpClientTestingModule, DtIconModule.forRoot({ svgIconLocation: `{{name}}.svg`, }), ], declarations: [ SimpleCalendarTestApp, SimpleCalendarWithoutStartDateTestApp, SimpleCalendarLimitedDateTestApp, ], }); TestBed.compileComponents(); }), ); describe('calendar with min and max dates', () => { let fixture: ComponentFixture<SimpleCalendarLimitedDateTestApp>; let component: SimpleCalendarLimitedDateTestApp; beforeEach(fakeAsync(() => { fixture = createComponent(SimpleCalendarLimitedDateTestApp); component = fixture.componentInstance; fixture.detectChanges(); })); it('should start with the min date if a date earlier than the min is provided as startAt date', () => { expect(component.calendar.startAt).toEqual(component.calendar.minDate); }); it('should set the min date as active if a date earlier than the min date is selected', () => { component.calendar.activeDate = new Date(2000, 1, 1); fixture.detectChanges(); expect(component.calendar.activeDate).toEqual(component.calendar.minDate); }); it('should start with the max date if a date later than the max is provided as startAt date', () => { component.startAt = new Date(2030, 1, 1); fixture.detectChanges(); expect(component.calendar.startAt).toEqual(component.calendar.maxDate); }); it('should set the max date as active if a date later than the max date is selected', () => { component.calendar.activeDate = new Date(2030, 1, 1); fixture.detectChanges(); expect(component.calendar.activeDate).toEqual(component.calendar.maxDate); }); }); describe('basic behavior', () => { let fixture: ComponentFixture<SimpleCalendarTestApp>; let component: SimpleCalendarTestApp; beforeEach(fakeAsync(() => { fixture = createComponent(SimpleCalendarTestApp); component = fixture.componentInstance; fixture.detectChanges(); })); describe('calendar header', () => { it("should go to the previous year when the '<<' icon button is clicked", () => { const prevYearBtn = fixture.debugElement.nativeElement.querySelector( '.dt-calendar-header-button-prev-year', ); prevYearBtn.click(); fixture.detectChanges(); expect(component.calendar.activeDate).toEqual(new Date(2019, 7, 31)); }); it("should go to the next year when the '>>' icon button is clicked", () => { const nextYearBtn = fixture.debugElement.nativeElement.querySelector( '.dt-calendar-header-button-next-year', ); nextYearBtn.click(); fixture.detectChanges(); expect(component.calendar.activeDate).toEqual(new Date(2021, 7, 31)); }); it("should go to the previous month when the '<' icon button is clicked", () => { const prevMonthBtn = fixture.debugElement.nativeElement.querySelector( '.dt-calendar-header-button-prev-month', ); prevMonthBtn.click(); fixture.detectChanges(); expect(component.calendar.activeDate).toEqual(new Date(2020, 6, 31)); }); it("should go to the next month when the '>' icon button is clicked and if the day does not exist, take the closest existing last day of the month", () => { const nextMonthBtn = fixture.debugElement.nativeElement.querySelector( '.dt-calendar-header-button-next-month', ); nextMonthBtn.click(); fixture.detectChanges(); expect(component.calendar.activeDate).toEqual(new Date(2020, 8, 30)); }); }); describe('calendar today button', () => { it("should show the today button if 'showTodayButton' is set to false ", () => { const todayBtn = fixture.debugElement.nativeElement.querySelector('.dt-today-button'); expect(todayBtn).not.toBeNull(); }); it("should not show the today button if 'showTodayButton' is set to false", () => { component.showTodayButton = false; fixture.detectChanges(); const todayBtn = fixture.debugElement.nativeElement.querySelector('.dt-today-button'); expect(todayBtn).toBeNull(); }); it("should correctly set today's date if the today button is clicked", fakeAsync(() => { const todayBtn = fixture.debugElement.nativeElement.querySelector('.dt-today-button'); todayBtn.click(); fixture.detectChanges(); tick(); expect(component.calendar.selected).toEqual( component._dateAdapter.today(), ); expect(component.calendar.activeDate).toEqual( component._dateAdapter.today(), ); })); }); describe('calendar date selection', () => { it('should correctly set the start date and active date if startAt is set', () => { expect(component.calendar.activeDate).toEqual(component.startAt); }); it('should emit a selectedChange event if the today button is clicked', () => { const todayBtn = fixture.debugElement.nativeElement.querySelector('.dt-today-button'); const changeSpy = jest.fn(); component.calendar.selectedChange.subscribe(changeSpy); todayBtn.click(); fixture.detectChanges(); fixture.detectChanges(); expect(changeSpy).toHaveBeenCalledTimes(1); }); it('should emit a selectedChange event if a date is selected', () => { const changeSpy = jest.fn(); component.calendar.selectedChange.subscribe(changeSpy); let selectedCell = { displayValue: '19', value: 19, rawValue: new Date(2020, 7, 19), ariaLabel: 'Aug 19, 2020', }; component.calendar._calendarBody._cellClicked(selectedCell); fixture.detectChanges(); expect(changeSpy).toHaveBeenCalledTimes(1); }); }); it('should correctly set the value and value label if a date is selected', fakeAsync(() => { const formattedStartDate = component._dateAdapter.format( component.startAt, { year: 'numeric', month: 'short', }, ); expect(component.calendar._label).toEqual(formattedStartDate); let selectedCell = { displayValue: '15', value: 15, rawValue: new Date(2020, 6, 15), ariaLabel: 'Jul 15, 2020', }; component.calendar._calendarBody._cellClicked(selectedCell); fixture.detectChanges(); expect(component.calendar.activeDate).toEqual(selectedCell.rawValue); const formattedValueLabel = component._dateAdapter.format( selectedCell.rawValue, { year: 'numeric', month: 'short', }, ); const formattedActiveDate = component._dateAdapter.format( component.calendar.activeDate, { year: 'numeric', month: 'short', }, ); expect(component.calendar._label).toEqual(formattedValueLabel); expect(component.calendar._label).toEqual(formattedActiveDate); })); }); describe('basic behavior when no start date is defined', () => { let fixture: ComponentFixture<SimpleCalendarWithoutStartDateTestApp>; let component: SimpleCalendarWithoutStartDateTestApp; beforeEach(fakeAsync(() => { fixture = createComponent(SimpleCalendarWithoutStartDateTestApp); component = fixture.componentInstance; fixture.detectChanges(); })); describe('calendar date selection', () => { it("should set today's date if startAt is not set", () => { expect(component.calendar.activeDate).not.toEqual( component.calendar.startAt, ); expect(component.calendar.activeDate.getDate()).toEqual( component._dateAdapter.today().getDate(), ); expect(component.calendar.activeDate.getMonth()).toEqual( component._dateAdapter.today().getMonth(), ); expect(component.calendar.activeDate.getFullYear()).toEqual( component._dateAdapter.today().getFullYear(), ); }); }); }); }); // ################################### // Testing components // ################################### @Component({ selector: 'dt-test-app', template: ` <dt-calendar [startAt]="startAt" [showTodayButton]="showTodayButton" ></dt-calendar> `, }) class SimpleCalendarTestApp { startAt = new Date(2020, 7, 31); showTodayButton = true; @ViewChild(DtCalendar) calendar: DtCalendar<any>; constructor(public _dateAdapter: DtDateAdapter<any>) {} } @Component({ selector: 'dt-test-today-app', template: ` <dt-calendar [startAt]="startAt"></dt-calendar> `, }) class SimpleCalendarWithoutStartDateTestApp { @ViewChild(DtCalendar) calendar: DtCalendar<Date>; constructor(public _dateAdapter: DtDateAdapter<Date>) {} } @Component({ selector: 'dt-test-min-max-app', template: ` <dt-calendar [startAt]="startAt" [minDate]="minDate" [maxDate]="maxDate" ></dt-calendar> `, }) class SimpleCalendarLimitedDateTestApp { @ViewChild(DtCalendar) calendar: DtCalendar<Date>; startAt = new Date(2020, 10, 15); minDate = new Date(2020, 11, 13); maxDate = new Date(2021, 1, 1); constructor(public _dateAdapter: DtDateAdapter<Date>) {} }
the_stack
import { module, test } from 'qunit'; import Keg, { KegRecord } from '@yavin/client/utils/classes/keg'; interface RecordValue extends Record<string, any> { id: number; description: string; meta: string; } class MyRecord implements RecordValue { declare id: number; declare description: string; declare meta: string; constructor(value: unknown) { Object.assign(this, value); } } let KegInstance: Keg<{ [K: string]: KegRecord; record: MyRecord }>; let Record1: MyRecord, Record2: MyRecord, Record3: MyRecord; let RawRecord1: RecordValue, RawRecord2: RecordValue, RawRecord3: RecordValue; module('Unit | Utils | Classes | keg', function (hooks) { hooks.beforeEach(function () { KegInstance = new Keg(); RawRecord1 = { id: 1, description: 'foo', meta: 'ember' }; RawRecord2 = { id: 2, description: 'bar', meta: 'bard' }; RawRecord3 = { id: 3, description: 'bar', meta: 'keg' }; Record1 = new MyRecord(RawRecord1); Record2 = new MyRecord(RawRecord2); Record3 = new MyRecord(RawRecord3); }); test('_getRecordKegForType returns a keg for a type', function (assert) { const recordKeg = KegInstance._getRecordKegForType('record'); assert.deepEqual(recordKeg, [], '_getRecordKegForType returns an empty array when called the first time'); //Mock a record insert const mockRecord = new MyRecord({ id: 1 }); recordKeg.push(mockRecord); assert.deepEqual( KegInstance._getRecordKegForType('record'), [mockRecord], '_getRecordKegForType returns the existing array when called after initially' ); }); test('_getIdIndexForType returns an id index for a type', function (assert) { const idIndex = KegInstance._getIdIndexForType('record'); assert.deepEqual(idIndex, {}, '_getIdIndexForType returns an empty object when called the first time'); //Mock a record insert const mockRecord = new MyRecord({ id: 1 }); idIndex[1] = mockRecord; assert.deepEqual( KegInstance._getIdIndexForType('record'), { 1: mockRecord }, '_getIdIndexForType returns the existing object when called after initially' ); }); test('reset clears all state of the keg', function (assert) { //@ts-expect-error - mock value KegInstance.recordKegs = { foo: 'bar' }; //@ts-expect-error - mock value KegInstance.idIndexes = { foo: 'bar' }; KegInstance.reset(); assert.deepEqual(KegInstance.recordKegs, {}, 'reset resets recordKegs'); assert.deepEqual(KegInstance.idIndexes, {}, 'reset resets idIndexes'); }); test('reset by type clears one model type from the keg', function (assert) { //@ts-expect-error - mock value KegInstance.recordKegs = { foo: 'bar', ham: 'spam' }; //@ts-expect-error - mock value KegInstance.idIndexes = { foo: 'bar', ham: 'spam' }; KegInstance.resetByType('foo'); assert.deepEqual( KegInstance.recordKegs, //@ts-expect-error - mock value { foo: [], ham: 'spam' }, 'resets foo type, but leaves ham alone in recordKegs' ); assert.deepEqual( KegInstance.idIndexes, //@ts-expect-error - mock value { foo: {}, ham: 'spam' }, 'resets foo type, but leaves ham alone in idIndexes' ); }); test('insert pushes a record into the keg', function (assert) { const insertedRecord = KegInstance.insert('record', Record1); assert.equal(Record1, insertedRecord, '`insert` returns the inserted record'); const foundRecord = KegInstance.getById('record', 1, 'default'); assert.equal(Record1, foundRecord, 'after inserting a record it can be found'); }); test('inserting a record with an existing id update the record in the keg', function (assert) { //Insert initial record const insertedRecord = KegInstance.insert('record', Record1); //Insert new record with same id const newRecord = new MyRecord({ ...RawRecord1, description: 'updated' }); KegInstance.insert('record', newRecord); let fetchedRecord = KegInstance.getById('record', 1); assert.deepEqual( KegInstance.all('record'), [newRecord], 'Inserting a record into the keg with an existing id does not add a new record' ); assert.equal(insertedRecord, fetchedRecord, 'After update fetched record still the same object'); }); test('insertMany pushes many records into the keg', function (assert) { const records = [Record1, Record2]; const pushedRecords = KegInstance.insertMany('record', records); assert.ok( pushedRecords.every((rec, idx) => rec === records[idx]), 'insertMany returns an array of the inserted records' ); const allRecords = KegInstance.all('record'); assert.ok( records.every((rec, idx) => rec === allRecords[idx]), 'The inserted records are the same as the fetched records' ); assert.ok( KegInstance.recordKegs.record?.every((rec, idx) => rec === records[idx]), 'The inserted records are registered in recordKeg' ); assert.deepEqual( KegInstance.idIndexes.record, { 'default.1': records[0], 'default.2': records[1], }, 'The inserted records are registered in idIndexes' ); }); test('insertMany updates keg records when provided records have the same id', function (assert) { const firstPush = KegInstance.insertMany('record', [Record1, Record2]); const secondPush = KegInstance.insertMany('record', [ new MyRecord({ ...RawRecord1, description: 'updated' }), Record3, new MyRecord({ id: 4, description: 'partially loaded record', partialData: true }), ]); assert.deepEqual( KegInstance.all('record'), [...firstPush, secondPush[1], secondPush[2]], 'Inserting records into the keg with an existing id does not add a new record' ); assert.equal(secondPush[0], firstPush[0], 'After update fetched record still the same object'); const fetchedRecord = KegInstance.getById('record', 1); assert.equal( fetchedRecord?.description, 'updated', 'Inserting a record into the keg with an existing id updates the record' ); assert.equal(fetchedRecord, firstPush[0], 'After updating a record the same object is return when fetching'); const thirdPush = KegInstance.insertMany('record', [new MyRecord({ id: 4, description: 'Fully loaded record' })]); assert.deepEqual( KegInstance.all('record'), [...firstPush, secondPush[1], ...thirdPush], 'Inserting a record into the keg with an existing id containing partial data does not add a new record' ); assert.notOk( //@ts-ignore KegInstance.getById('record', 4)?.partialData, 'Partial flag is removed when partial record is updated without flag in update set' ); assert.equal(secondPush[2], thirdPush[0], 'After update the returned record is the same object'); }); test('insertMany with identifierField option', function (assert) { const records = [Record1, Record2]; KegInstance.insertMany('record', records, { identifierField: 'description' }); assert.deepEqual( KegInstance.idIndexes.record, { 'default.foo': records[0], 'default.bar': records[1], }, 'The inserted records are registered in idIndexes using the `identifierField` option' ); assert.equal(KegInstance.getById('record', 'foo'), records[0], 'Record1 is fetched using the `identifierField`'); }); test('getById can be used to find a record by id', function (assert) { KegInstance.insert('record', Record1); const foundRecord = KegInstance.getById('record', 1); assert.ok(foundRecord, 'after pushing a record it can be found'); assert.equal(foundRecord?.id, RawRecord1.id, 'record has correct id'); assert.equal(foundRecord?.description, RawRecord1.description, 'record has correct description'); const missingRecord = KegInstance.getById('record', 22); assert.equal(missingRecord, undefined, 'getById returns undefined when given an id that is not present'); const missingType = KegInstance.getById('nonRegisteredType', 22); assert.equal(missingType, undefined, 'getById returns undefined when given a type that is not present'); }); test('getBy can be used to search by fields when an object is passed', function (assert) { KegInstance.insertMany('record', [Record1, Record2, Record3]); const foundRecords1 = KegInstance.getBy('record', { description: 'foo' }); assert.deepEqual( foundRecords1.map((r) => r.id), [1], 'getBy returns an array when a single record was found' ); let foundRecords2 = KegInstance.getBy('record', { description: 'bar' }); assert.deepEqual( foundRecords2.map((r) => r.id), [2, 3], 'getBy returns an array when several records were found' ); let foundRecords3 = KegInstance.getBy('record', { description: 'bar', meta: 'keg', }); assert.deepEqual( foundRecords3.map((r) => r.id), [3], 'getBy returns an array of found records when it matches all key-value pairs' ); let foundRecords4 = KegInstance.getBy('record', { description: 'bar', meta: 'foo', }); assert.deepEqual( foundRecords4.map((r) => r.id), [], 'getBy returns an empty array when not all key-value pairs match' ); let foundRecords5 = KegInstance.getBy('record', { description: 'ba' }); assert.deepEqual( foundRecords5.map((r) => r.id), [], 'getBy returns an empty array when no records were found' ); let foundRecords6 = KegInstance.getBy('record', {}); assert.deepEqual( foundRecords6.map((r) => r.id), [1, 2, 3], 'getBy returns all records when an empty object is provided' ); let foundRecords7 = KegInstance.getBy('record', { meta: ['ember', 'keg'] }); assert.deepEqual( foundRecords7.map((r) => r.id), [1, 3], 'getBy returns all records when multiple values for a field is provided' ); let missingType = KegInstance.getBy('nonRegisteredType', { id: 1 }); assert.deepEqual(missingType, [], 'getBy returns an empty array when given a type that is not present'); }); test('getBy can be used to search by fields when a function is passed', function (assert) { KegInstance.insertMany('record', [Record1, Record2, Record3]); const foundRecords1 = KegInstance.getBy('record', (rec) => { return rec.id < 2; }); assert.deepEqual( foundRecords1.map((r) => r.id), [1], 'getBy returns an array when a single record was found' ); const foundRecords2 = KegInstance.getBy('record', (rec) => { return rec.description === 'bar'; }); assert.deepEqual( foundRecords2.map((r) => r.id), [2, 3], 'getBy returns an array when a several record was found' ); const foundRecords3 = KegInstance.getBy('record', () => false); assert.deepEqual( foundRecords3.map((r) => r.id), [], 'getBy returns an empty array when no records were found' ); }); test('all returns all records for a type', function (assert) { assert.deepEqual( KegInstance.all('record'), [], 'all returns an empty array if on records have been pushed for the provided type' ); const pushedRecords = KegInstance.insertMany('record', [Record1, Record2, Record3]); assert.deepEqual( KegInstance.all('record'), pushedRecords, 'all returns all records in the keg for the provided type' ); }); });
the_stack
import {csvParseRows, csvFormatRows} from 'd3-dsv'; import {range} from 'd3-array'; import {console as globalConsole} from 'global/window'; import assert from 'assert'; import {Analyzer, DATA_TYPES as AnalyzerDATA_TYPES} from 'type-analyzer'; import normalize from '@mapbox/geojson-normalize'; import {ALL_FIELD_TYPES, DATASET_FORMATS} from 'constants/default-settings'; import {notNullorUndefined, parseFieldValue} from 'utils/data-utils'; import KeplerGlSchema, {SavedMap, ParsedDataset} from 'schemas'; import {LoadedMap} from 'schemas/schema-manager'; import {GUIDES_FILE_FORMAT_DOC} from 'constants/user-guides'; import {hasOwnProperty, isPlainObject, toArray} from 'utils/utils'; import {Field} from 'utils/table-utils/kepler-table'; import {DataContainerInterface} from 'utils/table-utils/data-container-interface'; import {ProcessorResult, RowData} from './types'; import {Feature} from '@nebula.gl/edit-modes'; export const ACCEPTED_ANALYZER_TYPES = [ AnalyzerDATA_TYPES.DATE, AnalyzerDATA_TYPES.TIME, AnalyzerDATA_TYPES.DATETIME, AnalyzerDATA_TYPES.NUMBER, AnalyzerDATA_TYPES.INT, AnalyzerDATA_TYPES.FLOAT, AnalyzerDATA_TYPES.BOOLEAN, AnalyzerDATA_TYPES.STRING, AnalyzerDATA_TYPES.GEOMETRY, AnalyzerDATA_TYPES.GEOMETRY_FROM_STRING, AnalyzerDATA_TYPES.PAIR_GEOMETRY_FROM_STRING, AnalyzerDATA_TYPES.ZIPCODE, AnalyzerDATA_TYPES.ARRAY, AnalyzerDATA_TYPES.OBJECT ]; // if any of these value occurs in csv, parse it to null; // const CSV_NULLS = ['', 'null', 'NULL', 'Null', 'NaN', '/N']; // matches empty string export const CSV_NULLS = /^(null|NULL|Null|NaN|\/N||)$/; const IGNORE_DATA_TYPES = Object.keys(AnalyzerDATA_TYPES).filter( type => !ACCEPTED_ANALYZER_TYPES.includes(type) ); export const PARSE_FIELD_VALUE_FROM_STRING = { [ALL_FIELD_TYPES.boolean]: { valid: (d: unknown): boolean => typeof d === 'boolean', parse: (d: unknown): boolean => d === 'true' || d === 'True' || d === 'TRUE' || d === '1' }, [ALL_FIELD_TYPES.integer]: { // @ts-ignore valid: (d: unknown): boolean => parseInt(d, 10) === d, // @ts-ignore parse: (d: unknown): number => parseInt(d, 10) }, [ALL_FIELD_TYPES.timestamp]: { valid: (d: unknown, field: Field): boolean => ['x', 'X'].includes(field.format) ? typeof d === 'number' : typeof d === 'string', parse: (d: any, field: Field) => (['x', 'X'].includes(field.format) ? Number(d) : d) }, [ALL_FIELD_TYPES.real]: { // @ts-ignore valid: (d: unknown): boolean => parseFloat(d) === d, // Note this will result in NaN for some string parse: parseFloat } }; /** * Process csv data, output a data object with `{fields: [], rows: []}`. * The data object can be wrapped in a `dataset` and pass to [`addDataToMap`](../actions/actions.md#adddatatomap) * @param rawData raw csv string * @returns data object `{fields: [], rows: []}` can be passed to addDataToMaps * @public * @example * import {processCsvData} from 'kepler.gl/processors'; * * const testData = `gps_data.utc_timestamp,gps_data.lat,gps_data.lng,gps_data.types,epoch,has_result,id,time,begintrip_ts_utc,begintrip_ts_local,date * 2016-09-17 00:09:55,29.9900937,31.2590542,driver_analytics,1472688000000,False,1,2016-09-23T00:00:00.000Z,2016-10-01 09:41:39+00:00,2016-10-01 09:41:39+00:00,2016-09-23 * 2016-09-17 00:10:56,29.9927699,31.2461142,driver_analytics,1472688000000,False,2,2016-09-23T00:00:00.000Z,2016-10-01 09:46:37+00:00,2016-10-01 16:46:37+00:00,2016-09-23 * 2016-09-17 00:11:56,29.9907261,31.2312742,driver_analytics,1472688000000,False,3,2016-09-23T00:00:00.000Z,,,2016-09-23 * 2016-09-17 00:12:58,29.9870074,31.2175827,driver_analytics,1472688000000,False,4,2016-09-23T00:00:00.000Z,,,2016-09-23` * * const dataset = { * info: {id: 'test_data', label: 'My Csv'}, * data: processCsvData(testData) * }; * * dispatch(addDataToMap({ * datasets: [dataset], * options: {centerMap: true, readOnly: true} * })); */ export function processCsvData(rawData: unknown[][], header?: string[]): ProcessorResult { let rows: unknown[][] | undefined; let headerRow: string[] | undefined; if (typeof rawData === 'string') { const parsedRows: string[][] = csvParseRows(rawData); if (!Array.isArray(parsedRows) || parsedRows.length < 2) { // looks like an empty file, throw error to be catch throw new Error('process Csv Data Failed: CSV is empty'); } headerRow = parsedRows[0]; rows = parsedRows.slice(1); } else if (Array.isArray(rawData) && rawData.length) { rows = rawData; headerRow = header; if (!Array.isArray(headerRow)) { // if data is passed in as array of rows and missing header // assume first row is header // @ts-ignore headerRow = rawData[0]; rows = rawData.slice(1); } } if (!rows || !headerRow) { throw new Error('invalid input passed to processCsvData'); } // here we assume the csv file that people uploaded will have first row // as name of the column cleanUpFalsyCsvValue(rows); // No need to run type detection on every data point // here we get a list of none null values to run analyze on const sample = getSampleForTypeAnalyze({fields: headerRow, rows}); const fields = getFieldsFromData(sample, headerRow); const parsedRows = parseRowsByFields(rows, fields); return {fields, rows: parsedRows}; } /** * Parse rows of csv by analyzed field types. So that `'1'` -> `1`, `'True'` -> `true` * @param rows * @param fields */ export function parseRowsByFields(rows: any[][], fields: Field[]) { // Edit rows in place const geojsonFieldIdx = fields.findIndex(f => f.name === '_geojson'); fields.forEach(parseCsvRowsByFieldType.bind(null, rows, geojsonFieldIdx)); return rows; } /** * Getting sample data for analyzing field type. */ export function getSampleForTypeAnalyze({ fields, rows, sampleCount = 50 }: { fields: string[]; rows: unknown[][]; sampleCount?: number; }): RowData { const total = Math.min(sampleCount, rows.length); // const fieldOrder = fields.map(f => f.name); const sample = range(0, total, 1).map(d => ({})); // collect sample data for each field fields.forEach((field, fieldIdx) => { // data counter let i = 0; // sample counter let j = 0; while (j < total) { if (i >= rows.length) { // if depleted data pool sample[j][field] = null; j++; } else if (notNullorUndefined(rows[i][fieldIdx])) { const value = rows[i][fieldIdx]; sample[j][field] = typeof value === 'string' ? value.trim() : value; j++; i++; } else { i++; } } }); return sample; } /** * Convert falsy value in csv including `'', 'null', 'NULL', 'Null', 'NaN'` to `null`, * so that type-analyzer won't detect it as string * * @param rows */ function cleanUpFalsyCsvValue(rows: unknown[][]): void { const re = new RegExp(CSV_NULLS, 'g'); for (let i = 0; i < rows.length; i++) { for (let j = 0; j < rows[i].length; j++) { // analyzer will set any fields to 'string' if there are empty values // which will be parsed as '' by d3.csv // here we parse empty data as null // TODO: create warning when deltect `CSV_NULLS` in the data if (typeof rows[i][j] === 'string' && (rows[i][j] as string).match(re)) { rows[i][j] = null; } } } } /** * Process uploaded csv file to parse value by field type * * @param rows * @param geoFieldIdx field index * @param field * @param i */ export function parseCsvRowsByFieldType( rows: unknown[][], geoFieldIdx: number, field: Field, i: number ): void { const parser = PARSE_FIELD_VALUE_FROM_STRING[field.type]; if (parser) { // check first not null value of it's already parsed const first = rows.find(r => notNullorUndefined(r[i])); if (!first || parser.valid(first[i], field)) { return; } rows.forEach(row => { // parse string value based on field type if (row[i] !== null) { row[i] = parser.parse(row[i], field); if ( geoFieldIdx > -1 && isPlainObject(row[geoFieldIdx]) && // @ts-ignore hasOwnProperty(row[geoFieldIdx], 'properties') ) { // @ts-ignore row[geoFieldIdx].properties[field.name] = row[i]; } } }); } } /** * Analyze field types from data in `string` format, e.g. uploaded csv. * Assign `type`, `fieldIdx` and `format` (timestamp only) to each field * * @param data array of row object * @param fieldOrder array of field names as string * @returns formatted fields * @public * @example * * import {getFieldsFromData} from 'kepler.gl/processors'; * const data = [{ * time: '2016-09-17 00:09:55', * value: '4', * surge: '1.2', * isTrip: 'true', * zeroOnes: '0' * }, { * time: '2016-09-17 00:30:08', * value: '3', * surge: null, * isTrip: 'false', * zeroOnes: '1' * }, { * time: null, * value: '2', * surge: '1.3', * isTrip: null, * zeroOnes: '1' * }]; * * const fieldOrder = ['time', 'value', 'surge', 'isTrip', 'zeroOnes']; * const fields = getFieldsFromData(data, fieldOrder); * // fields = [ * // {name: 'time', format: 'YYYY-M-D H:m:s', fieldIdx: 1, type: 'timestamp'}, * // {name: 'value', format: '', fieldIdx: 4, type: 'integer'}, * // {name: 'surge', format: '', fieldIdx: 5, type: 'real'}, * // {name: 'isTrip', format: '', fieldIdx: 6, type: 'boolean'}, * // {name: 'zeroOnes', format: '', fieldIdx: 7, type: 'integer'}]; * */ export function getFieldsFromData(data: RowData, fieldOrder: string[]): Field[] { // add a check for epoch timestamp const metadata = Analyzer.computeColMeta( data, [ {regex: /.*geojson|all_points/g, dataType: 'GEOMETRY'}, {regex: /.*census/g, dataType: 'STRING'} ], {ignoredDataTypes: IGNORE_DATA_TYPES} ); const {fieldByIndex} = renameDuplicateFields(fieldOrder); const result = fieldOrder.map((field, index) => { const name = fieldByIndex[index]; const fieldMeta = metadata.find(m => m.key === field); const {type, format} = fieldMeta || {}; return { name, id: name, displayName: name, format, fieldIdx: index, type: analyzerTypeToFieldType(type), analyzerType: type, valueAccessor: dc => d => { return dc.valueAt(d.index, index); } }; }); return result; } /** * pass in an array of field names, rename duplicated one * and return a map from old field index to new name * * @param fieldOrder * @returns new field name by index */ export function renameDuplicateFields( fieldOrder: string[] ): {allNames: string[]; fieldByIndex: string[]} { return fieldOrder.reduce<{allNames: string[]; fieldByIndex: string[]}>( (accu, field, i) => { const {allNames} = accu; let fieldName = field; // add a counter to duplicated names if (allNames.includes(field)) { let counter = 0; while (allNames.includes(`${field}-${counter}`)) { counter++; } fieldName = `${field}-${counter}`; } accu.fieldByIndex[i] = fieldName; accu.allNames.push(fieldName); return accu; }, {allNames: [], fieldByIndex: []} ); } /** * Convert type-analyzer output to kepler.gl field types * * @param aType * @returns corresponding type in `ALL_FIELD_TYPES` */ /* eslint-disable complexity */ export function analyzerTypeToFieldType(aType: string): string { const { DATE, TIME, DATETIME, NUMBER, INT, FLOAT, BOOLEAN, STRING, GEOMETRY, GEOMETRY_FROM_STRING, PAIR_GEOMETRY_FROM_STRING, ZIPCODE, ARRAY, OBJECT } = AnalyzerDATA_TYPES; // TODO: un recognized types // CURRENCY PERCENT NONE switch (aType) { case DATE: return ALL_FIELD_TYPES.date; case TIME: case DATETIME: return ALL_FIELD_TYPES.timestamp; case FLOAT: return ALL_FIELD_TYPES.real; case INT: return ALL_FIELD_TYPES.integer; case BOOLEAN: return ALL_FIELD_TYPES.boolean; case GEOMETRY: case GEOMETRY_FROM_STRING: case PAIR_GEOMETRY_FROM_STRING: case ARRAY: case OBJECT: // TODO: create a new data type for objects and arrays return ALL_FIELD_TYPES.geojson; case NUMBER: case STRING: case ZIPCODE: return ALL_FIELD_TYPES.string; default: globalConsole.warn(`Unsupported analyzer type: ${aType}`); return ALL_FIELD_TYPES.string; } } /* eslint-enable complexity */ /** * Process data where each row is an object, output can be passed to [`addDataToMap`](../actions/actions.md#adddatatomap) * NOTE: This function may mutate input. * @param rawData an array of row object, each object should have the same number of keys * @returns dataset containing `fields` and `rows` * @public * @example * import {addDataToMap} from 'kepler.gl/actions'; * import {processRowObject} from 'kepler.gl/processors'; * * const data = [ * {lat: 31.27, lng: 127.56, value: 3}, * {lat: 31.22, lng: 126.26, value: 1} * ]; * * dispatch(addDataToMap({ * datasets: { * info: {label: 'My Data', id: 'my_data'}, * data: processRowObject(data) * } * })); */ export function processRowObject(rawData: unknown[]): ProcessorResult { if (!Array.isArray(rawData)) { return null; } else if (!rawData.length) { // data is empty return { fields: [], rows: [] }; } const keys = Object.keys(rawData[0]); // [lat, lng, value] const rows = rawData.map(d => keys.map(key => d[key])); // [[31.27, 127.56, 3]] // row object an still contain values like `Null` or `N/A` cleanUpFalsyCsvValue(rows); return processCsvData(rows, keys); } /** * Process GeoJSON [`FeatureCollection`](http://wiki.geojson.org/GeoJSON_draft_version_6#FeatureCollection), * output a data object with `{fields: [], rows: []}`. * The data object can be wrapped in a `dataset` and passed to [`addDataToMap`](../actions/actions.md#adddatatomap) * NOTE: This function may mutate input. * * @param rawData raw geojson feature collection * @returns dataset containing `fields` and `rows` * @public * @example * import {addDataToMap} from 'kepler.gl/actions'; * import {processGeojson} from 'kepler.gl/processors'; * * const geojson = { * "type" : "FeatureCollection", * "features" : [{ * "type" : "Feature", * "properties" : { * "capacity" : "10", * "type" : "U-Rack" * }, * "geometry" : { * "type" : "Point", * "coordinates" : [ -71.073283, 42.417500 ] * } * }] * }; * * dispatch(addDataToMap({ * datasets: { * info: { * label: 'Sample Taxi Trips in New York City', * id: 'test_trip_data' * }, * data: processGeojson(geojson) * } * })); */ export function processGeojson(rawData: unknown): ProcessorResult { const normalizedGeojson = normalize(rawData); if (!normalizedGeojson || !Array.isArray(normalizedGeojson.features)) { const error = new Error( `Read File Failed: File is not a valid GeoJSON. Read more about [supported file format](${GUIDES_FILE_FORMAT_DOC})` ); throw error; // fail to normalize geojson } // getting all feature fields const allDataRows: Array<{_geojson: Feature} & keyof Feature> = []; for (let i = 0; i < normalizedGeojson.features.length; i++) { const f = normalizedGeojson.features[i]; if (f.geometry) { allDataRows.push({ // add feature to _geojson field _geojson: f, ...(f.properties || {}) }); } } // get all the field const fields = allDataRows.reduce<string[]>((accu, curr) => { Object.keys(curr).forEach(key => { if (!accu.includes(key)) { accu.push(key); } }); return accu; }, []); // make sure each feature has exact same fields allDataRows.forEach(d => { fields.forEach(f => { if (!(f in d)) { d[f] = null; d._geojson.properties[f] = null; } }); }); return processRowObject(allDataRows); } /** * On export data to csv * @param dataContainer * @param fields `dataset.fields` * @returns csv string */ export function formatCsv(data: DataContainerInterface, fields: Field[]): string { const columns = fields.map(f => f.displayName || f.name); const formattedData = [columns]; // parse geojson object as string for (const row of data.rows(true)) { formattedData.push(row.map((d, i) => parseFieldValue(d, fields[i].type))); } return csvFormatRows(formattedData); } /** * Validate input data, adding missing field types, rename duplicate columns */ export function validateInputData(data: Record<string, unknown>): ProcessorResult { if (!isPlainObject(data)) { assert('addDataToMap Error: dataset.data cannot be null'); return null; } else if (!Array.isArray(data.fields)) { assert('addDataToMap Error: expect dataset.data.fields to be an array'); return null; } else if (!Array.isArray(data.rows)) { assert('addDataToMap Error: expect dataset.data.rows to be an array'); return null; } const {fields, rows} = data; // check if all fields has name, format and type const allValid = fields.every((f, i) => { if (!isPlainObject(f)) { assert(`fields needs to be an array of object, but find ${typeof f}`); fields[i] = {}; } if (!f.name) { assert(`field.name is required but missing in ${JSON.stringify(f)}`); // assign a name fields[i].name = `column_${i}`; } if (!ALL_FIELD_TYPES[f.type]) { assert(`unknown field type ${f.type}`); return false; } if (!fields.every(field => field.analyzerType)) { assert('field missing analyzerType'); return false; } // check time format is correct based on first 10 not empty element if (f.type === ALL_FIELD_TYPES.timestamp) { const sample = findNonEmptyRowsAtField(rows, i, 10).map(r => ({ts: r[i]})); const analyzedType = Analyzer.computeColMeta(sample)[0]; return analyzedType && analyzedType.category === 'TIME' && analyzedType.format === f.format; } return true; }); if (allValid) { return {rows, fields}; } // if any field has missing type, recalculate it for everyone // because we simply lost faith in humanity const sampleData = getSampleForTypeAnalyze({ fields: fields.map(f => f.name), rows }); const fieldOrder = fields.map(f => f.name); const meta = getFieldsFromData(sampleData, fieldOrder); const updatedFields = fields.map((f, i) => ({ ...f, type: meta[i].type, format: meta[i].format, analyzerType: meta[i].analyzerType })); return {fields: updatedFields, rows}; } function findNonEmptyRowsAtField(rows: unknown[][], fieldIdx: number, total: number): any[] { const sample: any[] = []; let i = 0; while (sample.length < total && i < rows.length) { if (notNullorUndefined(rows[i]?.[fieldIdx])) { sample.push(rows[i]); } i++; } return sample; } /** * Process saved kepler.gl json to be pass to [`addDataToMap`](../actions/actions.md#adddatatomap). * The json object should contain `datasets` and `config`. * @param rawData * @returns datasets and config `{datasets: {}, config: {}}` * @public * @example * import {addDataToMap} from 'kepler.gl/actions'; * import {processKeplerglJSON} from 'kepler.gl/processors'; * * dispatch(addDataToMap(processKeplerglJSON(keplerGlJson))); */ export function processKeplerglJSON(rawData: SavedMap): LoadedMap | null { return rawData ? KeplerGlSchema.load(rawData.datasets, rawData.config) : null; } /** * Parse a single or an array of datasets saved using kepler.gl schema * @param rawData */ export function processKeplerglDataset(rawData: unknown): ParsedDataset | ParsedDataset[] | null { if (!rawData) { return null; } const results = KeplerGlSchema.parseSavedData(toArray(rawData)); if (!results) { return null; } return Array.isArray(rawData) ? results : results[0]; } export const DATASET_HANDLERS: { row: typeof processRowObject; geojson: typeof processGeojson; csv: typeof processCsvData; keplergl: typeof processKeplerglDataset; } = { [DATASET_FORMATS.row]: processRowObject, [DATASET_FORMATS.geojson]: processGeojson, [DATASET_FORMATS.csv]: processCsvData, [DATASET_FORMATS.keplergl]: processKeplerglDataset }; export const Processors: { processGeojson: typeof processGeojson; processCsvData: typeof processCsvData; processRowObject: typeof processRowObject; processKeplerglJSON: typeof processKeplerglJSON; processKeplerglDataset: typeof processKeplerglDataset; analyzerTypeToFieldType: typeof analyzerTypeToFieldType; getFieldsFromData: typeof getFieldsFromData; parseCsvRowsByFieldType: typeof parseCsvRowsByFieldType; formatCsv: typeof formatCsv; } = { processGeojson, processCsvData, processRowObject, processKeplerglJSON, processKeplerglDataset, analyzerTypeToFieldType, getFieldsFromData, parseCsvRowsByFieldType, formatCsv };
the_stack
declare module Http { export const enum Verb { GET, HEAD, POST, PUT, DELETE, TRACE, OPTIONS, CONNECT, PATCH } export interface Response { statusCode: number; body?: string; } export interface Requester { request(verb: Verb, url: string, callback: Callback<Response>): void; request(verb: Verb, url: string, requestBody: string, callback: Callback<Response>): void; } } interface Window { codePush: CodePushCordovaPlugin; } /** * Defines a package. All fields are non-nullable, except when retrieving the currently running package on the first run of the app, * in which case only the appVersion is compulsory. * * !! THIS TYPE IS READ FROM NATIVE CODE AS WELL. ANY CHANGES TO THIS INTERFACE NEEDS TO BE UPDATED IN NATIVE CODE !! */ interface IPackage { deploymentKey: string; description: string; label: string; appVersion: string; isMandatory: boolean; packageHash: string; packageSize: number; failedApply: boolean; } /** * Defines a remote package, which represents an update package available for download. */ interface IRemotePackage extends IPackage { /** * The URL at which the package is available for download. */ downloadUrl: string; /** * Downloads the package update from the CodePush service. * * @param downloadSuccess Called with one parameter, the downloaded package information, once the download completed successfully. * @param downloadError Optional callback invoked in case of an error. */ download(downloadSuccess: SuccessCallback<ILocalPackage>, downloadError?: ErrorCallback): void; /** * Aborts the current download session, previously started with download(). * * @param abortSuccess Optional callback invoked if the abort operation succeeded. * @param abortError Optional callback invoked in case of an error. */ abortDownload(abortSuccess?: SuccessCallback<void>, abortError?: ErrorCallback): void; } /** * Defines a local package. * * !! THIS TYPE IS READ FROM NATIVE CODE AS WELL. ANY CHANGES TO THIS INTERFACE NEEDS TO BE UPDATED IN NATIVE CODE !! */ interface ILocalPackage extends IPackage { /** * The local storage path where this package is located. */ localPath: string; /** * Indicates if the current application run is the first one after the package was applied. */ isFirstRun: boolean; /** * Applies this package to the application. The application will be reloaded with this package and on every application launch this package will be loaded. * If the rollbackTimeout parameter is provided, the application will wait for a navigator.codePush.notifyApplicationReady() for the given number of milliseconds. * If navigator.codePush.notifyApplicationReady() is called before the time period specified by rollbackTimeout, the apply operation is considered a success. * Otherwise, the apply operation will be marked as failed, and the application is reverted to its previous version. * * @param applySuccess Callback invoked if the apply operation succeeded. * @param applyError Optional callback inovoked in case of an error. * @param rollbackTimeout Optional time interval, in milliseconds, to wait for a notifyApplicationReady() call before marking the apply as failed and reverting to the previous version. */ apply(applySuccess: SuccessCallback<void>, applyError?: ErrorCallback, rollbackTimeout?: number): void; } /** * Decomposed static side of RemotePackage. * For Class Decomposition guidelines see http://www.typescriptlang.org/Handbook#writing-dts-files-guidelines-and-specifics */ interface RemotePackage_Static { new (): IRemotePackage; } /** * Decomposed static side of LocalPackage. * For Class Decomposition guidelines see http://www.typescriptlang.org/Handbook#writing-dts-files-guidelines-and-specifics */ interface LocalPackage_Static { new (): ILocalPackage; } declare var RemotePackage: RemotePackage_Static; declare var LocalPackage: LocalPackage_Static; /** * Defines the JSON format of the current package information file. * This file is stored in the local storage of the device and persists between store updates and code-push updates. * * !! THIS FILE IS READ FROM NATIVE CODE AS WELL. ANY CHANGES TO THIS INTERFACE NEEDS TO BE UPDATED IN NATIVE CODE !! */ interface IPackageInfoMetadata extends ILocalPackage { nativeBuildTime: string; } interface NativeUpdateNotification { updateAppVersion: boolean; // Always true appVersion: string; } interface Callback<T> { (error: Error, parameter: T): void; } interface SuccessCallback<T> { (result?: T): void; } interface ErrorCallback { (error?: Error): void; } interface Configuration { serverUrl: string; deploymentKey: string; ignoreAppVersion?: boolean; } declare class AcquisitionStatus { static DeploymentSucceeded: string; static DeploymentFailed: string; } declare class AcquisitionManager { constructor(httpRequester: Http.Requester, configuration: Configuration); public queryUpdateWithCurrentPackage(currentPackage: IPackage, callback?: Callback<IRemotePackage | NativeUpdateNotification>): void; public reportStatus(status: string, message?: string, callback?: Callback<void>): void; } interface CodePushCordovaPlugin { /** * Get the current package information. * * @param packageSuccess Callback invoked with the currently deployed package information. * @param packageError Optional callback invoked in case of an error. */ getCurrentPackage(packageSuccess: SuccessCallback<ILocalPackage>, packageError?: ErrorCallback): void; /** * Checks with the CodePush server if an update package is available for download. * * @param querySuccess Callback invoked in case of a successful response from the server. * The callback takes one RemotePackage parameter. A non-null package is a valid update. * A null package means the application is up to date for the current native application version. * @param queryError Optional callback invoked in case of an error. */ checkForUpdate(querySuccess: SuccessCallback<IRemotePackage>, queryError?: ErrorCallback): void; /** * Notifies the plugin that the update operation succeeded and that the application is ready. * Calling this function is required if a rollbackTimeout parameter is used for your LocalPackage.apply() call. * If apply() is used without a rollbackTimeout, calling this function is a noop. * * @param notifySucceeded Optional callback invoked if the plugin was successfully notified. * @param notifyFailed Optional callback invoked in case of an error during notifying the plugin. */ notifyApplicationReady(notifySucceeded?: SuccessCallback<void>, notifyFailed?: ErrorCallback): void; /** * Convenience method for installing updates in one method call. * This method is provided for simplicity, and its behavior can be replicated by using window.codePush.checkForUpdate(), RemotePackage's download() and LocalPackage's apply() methods. * * The algorithm of this method is the following: * - Checks for an update on the CodePush server. * - If an update is available * - If the update is mandatory and the alertMessage is set in options, the user will be informed that the application will be updated to the latest version. * The update package will then be downloaded and applied. * - If the update is not mandatory and the confirmMessage is set in options, the user will be asked if they want to update to the latest version. * If they decline, the syncCallback will be invoked with SyncStatus.UPDATE_IGNORED. * - Otherwise, the update package will be downloaded and applied with no user interaction. * - If no update is available on the server, or if a previously rolled back update is available and the ignoreFailedUpdates is set to true, the syncCallback will be invoked with the SyncStatus.UP_TO_DATE. * - If an error ocurrs during checking for update, downloading or applying it, the syncCallback will be invoked with the SyncStatus.ERROR. * * @param syncCallback Optional callback to be called with the status of the sync operation. * The callback will be called only once, and the possible statuses are defined by the SyncStatus enum. * @param syncOptions Optional SyncOptions parameter configuring the behavior of the sync operation. * */ sync(syncCallback?: SuccessCallback<SyncStatus>, syncOptions?: SyncOptions): void; } /** * Defines the possible result statuses of the window.codePush.sync operation. */ declare enum SyncStatus { /** * The application is up to date. */ UP_TO_DATE, /** * An update is available, it has been downloaded, unzipped and copied to the deployment folder. * After the completion of the callback invoked with SyncStatus.APPLY_SUCCESS, the application will be reloaded with the updated code and resources. */ APPLY_SUCCESS, /** * An optional update is available, but the user declined to install it. The update was not downloaded. */ UPDATE_IGNORED, /** * An error happened during the sync operation. This might be an error while communicating with the server, downloading or unziping the update. * The console logs should contain more information about what happened. No update has been applied in this case. */ ERROR } /** * Defines the sync operation options. */ interface SyncOptions { /** * Optional time interval, in milliseconds, to wait for a notifyApplicationReady() call before marking the apply as failed and reverting to the previous version. * This is the rollbackTimeout parameter used for LocalPackage's apply() method call. */ rollbackTimeout?: number; /** * Optional boolean flag. If set, previous updates which were rolled back will be ignored. */ ignoreFailedUpdates?: boolean; /** * If a mandatory update is available and this option is set, the message will be displayed to the user in an alert dialog before downloading and installing the update. * The user will not be able to cancel the operation, since the update is mandatory. */ mandatoryUpdateMessage?: string; /** * If an optional update is available and this option is set, the message will be displayed to the user in a confirmation dialog. * If the user confirms the update, it will be downloaded and installed. Otherwise, the update update is not downloaded. */ optionalUpdateMessage?: string; /** * The title of the dialog box used for interacting with the user in case of a mandatory or optional update. * This title will only be used if at least one of mandatoryUpdateMessage or optionalUpdateMessage options are set. */ updateTitle?: string; /** * The label of the confirmation button in case of an optional update. */ optionalInstallButtonLabel?: string; /** * The label of the cancel button in case of an optional update. */ optionalIgnoreButtonLabel?: string; /** * The label of the continue button in case of a mandatory update. */ mandatoryContinueButtonLabel?: string; /** * Flag indicating if the update description provided by the CodePush server should be displayed in the dialog box appended to the update message. */ appendReleaseDescription?: boolean; /** * Optional prefix to add to the release description. */ descriptionPrefix?: string; } /** * Defines the JSON format of the package diff manifest file. */ interface IDiffManifest { deletedFiles: string[]; }
the_stack
(global as any).pxt = pxt; import * as nodeutil from './nodeutil'; import * as fs from 'fs'; import * as path from 'path'; import * as child_process from 'child_process'; import { promisify } from "util"; import * as hid from './hid'; import U = pxt.Util; import Map = pxt.Map; // abstract over build engine export interface BuildEngine { id: string; updateEngineAsync: () => Promise<void>; setPlatformAsync: () => Promise<void>; buildAsync: () => Promise<void>; patchHexInfo: (extInfo: pxtc.ExtensionInfo) => pxtc.HexInfo; prepBuildDirAsync: () => Promise<void>; buildPath: string; appPath: string; moduleConfig: string; outputPath?: string; deployAsync?: (r: pxtc.CompileResult) => Promise<void>; } // abstract over C++ runtime target (currently the DAL) export interface TargetRuntime { includePath: string; } interface BuildCache { sha?: string; modSha?: string; } function noopAsync() { return Promise.resolve() } export const buildEngines: Map<BuildEngine> = { yotta: { id: "yotta", updateEngineAsync: () => runYottaAsync(["update"]), buildAsync: () => runYottaAsync(["build"]), setPlatformAsync: () => runYottaAsync(["target", pxt.appTarget.compileService.yottaTarget]), patchHexInfo: patchYottaHexInfo, prepBuildDirAsync: noopAsync, buildPath: "built/yt", moduleConfig: "module.json", deployAsync: msdDeployCoreAsync, appPath: "source" }, dockeryotta: { id: "dockeryotta", updateEngineAsync: () => runDockerAsync(["yotta", "update"]), buildAsync: () => runDockerAsync(["yotta", "build"]), setPlatformAsync: () => runDockerAsync(["yotta", "target", pxt.appTarget.compileService.yottaTarget]), patchHexInfo: patchYottaHexInfo, prepBuildDirAsync: noopAsync, buildPath: "built/dockeryt", moduleConfig: "module.json", deployAsync: msdDeployCoreAsync, appPath: "source" }, platformio: { id: "platformio", updateEngineAsync: noopAsync, buildAsync: () => runPlatformioAsync(["run"]), setPlatformAsync: noopAsync, patchHexInfo: patchPioHexInfo, prepBuildDirAsync: noopAsync, buildPath: "built/pio", moduleConfig: "platformio.ini", deployAsync: platformioDeployAsync, appPath: "src" }, codal: { id: "codal", updateEngineAsync: updateCodalBuildAsync, buildAsync: () => runBuildCmdAsync("python", "build.py"), setPlatformAsync: noopAsync, patchHexInfo: patchCodalHexInfo, prepBuildDirAsync: prepCodalBuildDirAsync, buildPath: "built/codal", moduleConfig: "codal.json", deployAsync: msdDeployCoreAsync, appPath: "pxtapp" }, dockercodal: { id: "dockercodal", updateEngineAsync: updateCodalBuildAsync, buildAsync: () => runDockerAsync(["python", "build.py"]), setPlatformAsync: noopAsync, patchHexInfo: patchCodalHexInfo, prepBuildDirAsync: prepCodalBuildDirAsync, buildPath: "built/dockercodal", moduleConfig: "codal.json", deployAsync: msdDeployCoreAsync, appPath: "pxtapp" }, dockermake: { id: "dockermake", updateEngineAsync: () => runBuildCmdAsync(nodeutil.addCmd("npm"), "install"), buildAsync: () => runDockerAsync(["make", "-j8"]), setPlatformAsync: noopAsync, patchHexInfo: patchDockermakeHexInfo, prepBuildDirAsync: noopAsync, buildPath: "built/dockermake", moduleConfig: "package.json", deployAsync: msdDeployCoreAsync, outputPath: "bld/pxt-app.elf", appPath: "pxtapp" }, dockercross: { id: "dockercross", updateEngineAsync: () => runBuildCmdAsync(nodeutil.addCmd("npm"), "install"), buildAsync: () => runDockerAsync(["make"]), setPlatformAsync: noopAsync, patchHexInfo: patchDockerCrossHexInfo, prepBuildDirAsync: noopAsync, buildPath: "built/dockercross", moduleConfig: "package.json", deployAsync: noopAsync, appPath: "pxtapp" }, dockerespidf: { id: "dockerespidf", updateEngineAsync: noopAsync, buildAsync: () => runDockerAsync(["make"]), setPlatformAsync: noopAsync, patchHexInfo: patchDockerEspIdfHexInfo, prepBuildDirAsync: noopAsync, buildPath: "built/dockerespidf", moduleConfig: "sdkconfig.defaults", deployAsync: noopAsync, appPath: "main" }, cs: { id: "cs", updateEngineAsync: noopAsync, buildAsync: () => runBuildCmdAsync(getCSharpCommand(), "-t:library", "-out:pxtapp.dll", "lib.cs"), setPlatformAsync: noopAsync, patchHexInfo: patchCSharpDll, prepBuildDirAsync: noopAsync, buildPath: "built/cs", moduleConfig: "module.json", deployAsync: buildFinalCsAsync, appPath: "pxtapp" }, } // once we have a different build engine, set this appropriately export let thisBuild = buildEngines['yotta'] export function setThisBuild(b: BuildEngine) { if (pxt.appTarget.compileService.dockerImage && !process.env["PXT_NODOCKER"]) { if (b === buildEngines["codal"]) b = buildEngines["dockercodal"]; if (b === buildEngines["yotta"]) b = buildEngines["dockeryotta"]; } pxt.debug(`set build engine: ${b.id}`) thisBuild = b; } function patchYottaHexInfo(extInfo: pxtc.ExtensionInfo) { let buildEngine = thisBuild let hexPath = buildEngine.buildPath + "/build/" + pxt.appTarget.compileService.yottaTarget.split("@")[0] + "/source/" + pxt.appTarget.compileService.yottaBinary; return { hex: fs.readFileSync(hexPath, "utf8").split(/\r?\n/) } } function patchCodalHexInfo(extInfo: pxtc.ExtensionInfo) { let bin = pxt.appTarget.compileService.codalBinary let hexPath = thisBuild.buildPath + "/build/" + bin + ".hex" return { hex: fs.readFileSync(hexPath, "utf8").split(/\r?\n/) } } function patchDockermakeHexInfo(extInfo: pxtc.ExtensionInfo) { let hexPath = thisBuild.buildPath + "/bld/pxt-app.hex" return { hex: fs.readFileSync(hexPath, "utf8").split(/\r?\n/) } } function patchDockerCrossHexInfo(extInfo: pxtc.ExtensionInfo) { let hexPath = thisBuild.buildPath + "/bld/all.tgz.b64" return { hex: fs.readFileSync(hexPath, "utf8").split(/\r?\n/) } } function patchDockerEspIdfHexInfo(extInfo: pxtc.ExtensionInfo) { let hexPath = thisBuild.buildPath + "/build/pxtapp.b64" return { hex: fs.readFileSync(hexPath, "utf8").split(/\r?\n/) } } function patchCSharpDll(extInfo: pxtc.ExtensionInfo) { let hexPath = thisBuild.buildPath + "/lib.cs" return { hex: [fs.readFileSync(hexPath, "utf8")] } } function pioFirmwareHex() { let buildEngine = buildEngines['platformio'] return buildEngine.buildPath + "/.pioenvs/myenv/firmware.hex" } function patchPioHexInfo(extInfo: pxtc.ExtensionInfo) { return { hex: fs.readFileSync(pioFirmwareHex(), "utf8").split(/\r?\n/) } } function platformioDeployAsync(r: pxtc.CompileResult) { if (pxt.appTarget.compile.useUF2) return msdDeployCoreAsync(r); else return platformioUploadAsync(r); } function platformioUploadAsync(r: pxtc.CompileResult) { // TODO maybe platformio has some option to do this? let buildEngine = buildEngines['platformio'] let prevHex = fs.readFileSync(pioFirmwareHex()) fs.writeFileSync(pioFirmwareHex(), r.outfiles[pxtc.BINARY_HEX]) return runPlatformioAsync(["run", "--target", "upload", "--target", "nobuild", "-v"]) .finally(() => { pxt.log('Restoring ' + pioFirmwareHex()) fs.writeFileSync(pioFirmwareHex(), prevHex) }) } export function buildHexAsync(buildEngine: BuildEngine, mainPkg: pxt.MainPackage, extInfo: pxtc.ExtensionInfo, forceBuild: boolean) { let tasks = Promise.resolve() let buildCachePath = buildEngine.buildPath + "/buildcache.json" let buildCache: BuildCache = {} if (fs.existsSync(buildCachePath)) { buildCache = nodeutil.readJson(buildCachePath) } if (!forceBuild && (buildCache.sha == extInfo.sha && !process.env["PXT_RUNTIME_DEV"])) { pxt.debug("Skipping C++ build.") return tasks } pxt.debug("writing build files to " + buildEngine.buildPath) let allFiles = U.clone(extInfo.generatedFiles) U.jsonCopyFrom(allFiles, extInfo.extensionFiles) let writeFiles = () => { for (let f of nodeutil.allFiles(buildEngine.buildPath + "/" + buildEngine.appPath, 8, true)) { let bn = f.slice(buildEngine.buildPath.length) bn = bn.replace(/\\/g, "/").replace(/^\//, "/") if (U.startsWith(bn, "/" + buildEngine.appPath + "/") && !allFiles[bn]) { pxt.log("removing stale " + bn) fs.unlinkSync(f) } } U.iterMap(allFiles, (fn, v) => { fn = buildEngine.buildPath + fn nodeutil.mkdirP(path.dirname(fn)) let existing: string = null if (fs.existsSync(fn)) existing = fs.readFileSync(fn, "utf8") if (existing !== v) nodeutil.writeFileSync(fn, v) }) } tasks = tasks .then(buildEngine.prepBuildDirAsync) .then(writeFiles) let saveCache = () => fs.writeFileSync(buildCachePath, JSON.stringify(buildCache, null, 4) + "\n") let modSha = U.sha256(extInfo.generatedFiles["/" + buildEngine.moduleConfig]) let needDal = false if (buildCache.modSha !== modSha || forceBuild) { tasks = tasks .then(buildEngine.setPlatformAsync) .then(buildEngine.updateEngineAsync) .then(() => { buildCache.sha = "" buildCache.modSha = modSha saveCache(); needDal = true }) } else { pxt.debug(`Skipping C++ build update.`) } tasks = tasks .then(buildEngine.buildAsync) .then(() => { buildCache.sha = extInfo.sha saveCache() if (needDal) buildDalConst(buildEngine, mainPkg, true); }) return tasks } function runYottaAsync(args: string[]) { let ypath: string = process.env["YOTTA_PATH"] let ytCommand = "yotta" let env = U.clone(process.env) if (/;[A-Z]:\\/.test(ypath)) { for (let pp of ypath.split(";")) { let q = path.join(pp, "yotta.exe") if (fs.existsSync(q)) { ytCommand = q env["PATH"] = env["PATH"] + ";" + ypath break } } } pxt.log("*** " + ytCommand + " " + args.join(" ")) let child = child_process.spawn("yotta", args, { cwd: thisBuild.buildPath, stdio: "inherit", env: env }) return new Promise<void>((resolve, reject) => { child.on("close", (code: number) => { if (code === 0) resolve() else reject(new Error("yotta " + args.join(" ") + ": exit code " + code)) }) }) } function runPlatformioAsync(args: string[]) { pxt.log("*** platformio " + args.join(" ")) let child = child_process.spawn("platformio", args, { cwd: thisBuild.buildPath, stdio: "inherit", env: process.env }) return new Promise<void>((resolve, reject) => { child.on("close", (code: number) => { if (code === 0) resolve() else reject(new Error("platformio " + args.join(" ") + ": exit code " + code)) }) }) } function runDockerAsync(args: string[]) { if (process.env["PXT_NODOCKER"] == "force") { const cmd = args.shift() return nodeutil.spawnAsync({ cmd, args, cwd: thisBuild.buildPath }) } else { let fullpath = process.cwd() + "/" + thisBuild.buildPath + "/" let cs = pxt.appTarget.compileService let dargs = cs.dockerArgs || ["-u", "build"] let mountArg = fullpath + ":/src" // this speeds up docker build a lot on macOS, // see https://docs.docker.com/docker-for-mac/osxfs-caching/ if (process.platform == "darwin") mountArg += ":delegated" return nodeutil.spawnAsync({ cmd: "docker", args: ["run", "--rm", "-v", mountArg, "-w", "/src"].concat(dargs).concat([cs.dockerImage]).concat(args), cwd: thisBuild.buildPath }) } } let parseCppInt = pxt.cpp.parseCppInt; export function codalGitAsync(...args: string[]) { return nodeutil.spawnAsync({ cmd: "git", args: args, cwd: thisBuild.buildPath }) } function prepCodalBuildDirAsync() { if (fs.existsSync(thisBuild.buildPath + "/.git/config")) return Promise.resolve() let cs = pxt.appTarget.compileService let pkg = "https://github.com/" + cs.githubCorePackage nodeutil.mkdirP("built") return nodeutil.runGitAsync("clone", pkg, thisBuild.buildPath) .then(() => codalGitAsync("checkout", cs.gittag)) } function runBuildCmdAsync(cmd: string, ...args: string[]) { return nodeutil.spawnAsync({ cmd, args, cwd: thisBuild.buildPath, }) } function updateCodalBuildAsync() { let cs = pxt.appTarget.compileService return codalGitAsync("checkout", cs.gittag) .then( () => /v\d+/.test(cs.gittag) ? Promise.resolve() : codalGitAsync("pull"), e => codalGitAsync("checkout", "master") .then(() => codalGitAsync("pull"))) .then(() => codalGitAsync("checkout", cs.gittag)) } // TODO: DAL specific code should be lifted out export function buildDalConst(buildEngine: BuildEngine, mainPkg: pxt.MainPackage, rebuild = false, create = false) { const constName = "dal.d.ts"; let constPath = constName; const config = mainPkg && mainPkg.config; const corePackage = config && config.dalDTS && config.dalDTS.corePackage; if (corePackage) constPath = path.join(corePackage, constName); let vals: Map<string> = {} let done: Map<string> = {} let excludeSyms: string[] = [] function expandInt(s: string): number { s = s.trim() let existing = U.lookup(vals, s) if (existing != null && existing != "?") s = existing let mm = /^\((.*)\)/.exec(s) if (mm) s = mm[1] let m = /^(\w+)\s*([\+\|])\s*(.*)$/.exec(s) if (m) { let k = expandInt(m[1]) if (k != null) return m[2] == "+" ? k + expandInt(m[3]) : k | expandInt(m[3]) } let pp = parseCppInt(s) if (pp != null) return pp return null } function extractConstants(fileName: string, src: string, dogenerate = false): string { let lineNo = 0 // let err = (s: string) => U.userError(`${fileName}(${lineNo}): ${s}\n`) let outp = "" let inEnum = false let enumVal = 0 let defineVal = (n: string, v: string) => { if (excludeSyms.some(s => U.startsWith(n, s))) return let parsed = expandInt(v) if (parsed != null) { v = parsed.toString() let curr = U.lookup(vals, n) if (curr == null || curr == v) { vals[n] = v if (dogenerate && !done[n]) { outp += ` ${n} = ${v},\n` done[n] = v } } else { vals[n] = "?" // TODO: DAL-specific code if (dogenerate && !/^MICROBIT_DISPLAY_(ROW|COLUMN)_COUNT|PXT_VTABLE_SHIFT$/.test(n)) pxt.log(`${fileName}(${lineNo}): #define conflict, ${n}`) } } } src.split(/\r?\n/).forEach(ln => { ++lineNo ln = ln.replace(/\/\/.*/, "").replace(/\/\*.*/g, "") let m = /^\s*#define\s+(\w+)\s+(.*)$/.exec(ln) if (m) { defineVal(m[1], m[2]) } if (inEnum && /}/.test(ln)) inEnum = false if (/^\s*enum\s+(\w+)/.test(ln)) { inEnum = true; enumVal = -1; } const shouldExpand = inEnum && (m = /^\s*(\w+)\s*(=\s*(.*?))?,?\s*$/.exec(ln)); if (shouldExpand) { let v = m[3] if (v) { enumVal = expandInt(v) if (enumVal == null) { pxt.log(`${fileName}(${lineNo}): invalid enum initializer, ${ln}`) inEnum = false return } } else { enumVal++ v = enumVal + "" } defineVal(m[1], v) } }) return outp } if (mainPkg && (create || (mainPkg.getFiles().indexOf(constName) >= 0 && (rebuild || !fs.existsSync(constName))))) { pxt.log(`rebuilding ${constName} into ${constPath}...`) let files: string[] = [] let foundConfig = false for (let d of mainPkg.sortedDeps()) { if (d.config.dalDTS) { if (d.config.dalDTS.includeDirs) for (let dn of d.config.dalDTS.includeDirs) { dn = buildEngine.buildPath + "/" + dn if (U.endsWith(dn, ".h")) files.push(dn) else { let here = nodeutil.allFiles(dn, 20).filter(fn => U.endsWith(fn, ".h")) U.pushRange(files, here) } } excludeSyms = d.config.dalDTS.excludePrefix || excludeSyms foundConfig = true } } if (!foundConfig) { let incPath = buildEngine.buildPath + "/yotta_modules/microbit-dal/inc/" if (!fs.existsSync(incPath)) incPath = buildEngine.buildPath + "/yotta_modules/codal/inc/"; if (!fs.existsSync(incPath)) incPath = buildEngine.buildPath if (!fs.existsSync(incPath)) U.userError("cannot find " + incPath); files = nodeutil.allFiles(incPath, 20) .filter(fn => U.endsWith(fn, ".h")) .filter(fn => fn.indexOf("/mbed-classic/") < 0) .filter(fn => fn.indexOf("/mbed-os/") < 0) } files.sort(U.strcmp) let fc: Map<string> = {} for (let fn of files) { if (U.endsWith(fn, "Config.h")) continue fc[fn] = fs.readFileSync(fn, "utf8") } files = Object.keys(fc) // pre-pass - detect conflicts for (let fn of files) { extractConstants(fn, fc[fn]) } // stabilize for (let fn of files) { extractConstants(fn, fc[fn]) } let consts = "// Auto-generated. Do not edit.\ndeclare const enum DAL {\n" for (let fn of files) { let v = extractConstants(fn, fc[fn], true) if (v) { consts += " // " + fn.replace(/\\/g, "/").replace(buildEngine.buildPath, "") + "\n" consts += v } } consts += "}\n" fs.writeFileSync(constPath, consts) } } const writeFileAsync: any = promisify(fs.writeFile) const cpExecAsync = promisify(child_process.exec); const readDirAsync = promisify(fs.readdir) function buildFinalCsAsync(res: ts.pxtc.CompileResult) { return nodeutil.spawnAsync({ cmd: getCSharpCommand(), args: ["-out:pxtapp.exe", "binary.cs"], cwd: "built", }) } function getCSharpCommand() { return process.platform == "win32" ? "mcs.bat" : "mcs"; } function msdDeployCoreAsync(res: ts.pxtc.CompileResult): Promise<void> { const firmwareName = [pxtc.BINARY_UF2, pxtc.BINARY_HEX, pxtc.BINARY_ELF].filter(f => !!res.outfiles[f])[0]; if (!firmwareName) { // something went wrong heres pxt.reportError("compile", `firmware missing from built files (${Object.keys(res.outfiles).join(', ')})`) return Promise.resolve(); } const firmware = res.outfiles[firmwareName]; const encoding = firmwareName == pxtc.BINARY_HEX ? "utf8" : "base64"; function copyDeployAsync() { return getBoardDrivesAsync() .then(drives => filterDrives(drives)) .then(drives => { if (drives.length == 0) throw new Error("cannot find any drives to deploy to"); pxt.log(`copying ${firmwareName} to ` + drives.join(", ")); const writeHexFile = (drivename: string) => { return writeFileAsync(path.join(drivename, firmwareName), firmware, encoding) .then(() => pxt.debug(" wrote to " + drivename)) .catch((e: Error) => { throw new Error(`failed writing to ${drivename}; ${e.message}`); }) }; return U.promiseMapAll(drives, d => writeHexFile(d)) .then(() => drives.length); }).then(() => { }); } function hidDeployAsync() { const f = firmware const blocks = pxtc.UF2.parseFile(U.stringToUint8Array(atob(f))) return hid.initAsync() .then(dev => dev.flashAsync(blocks)) } let p = Promise.resolve(); if (pxt.appTarget.compile && pxt.appTarget.compile.useUF2 && !pxt.appTarget.serial.noDeploy && hid.isInstalled(true)) { // try hid or simply bail out p = p.then(() => hidDeployAsync()) .catch(e => copyDeployAsync()); } else { p = p.then(() => copyDeployAsync()) } return p; } function getBoardDrivesAsync(): Promise<string[]> { if (process.platform == "win32") { const rx = new RegExp("^([A-Z]:)\\s+(\\d+).* " + pxt.appTarget.compile.deployDrives) return cpExecAsync("wmic PATH Win32_LogicalDisk get DeviceID, VolumeName, FileSystem, DriveType") .then(({ stdout, stderr }) => { let res: string[] = []; stdout .split(/\n/) .forEach(ln => { let m = rx.exec(ln); if (m && m[2] == "2") { res.push(m[1] + "/"); } } ); return res; }); } else if (process.platform == "darwin") { const rx = new RegExp(pxt.appTarget.compile.deployDrives) return readDirAsync("/Volumes") .then(lst => lst.filter(s => rx.test(s)).map(s => "/Volumes/" + s + "/")) } else if (process.platform == "linux") { const rx = new RegExp(pxt.appTarget.compile.deployDrives) const user = process.env["USER"] if (nodeutil.existsDirSync(`/media/${user}`)) return readDirAsync(`/media/${user}`) .then(lst => lst.filter(s => rx.test(s)).map(s => `/media/${user}/${s}/`)) return Promise.resolve([]); } else { return Promise.resolve([]) } } function filterDrives(drives: string[]): string[] { const marker = pxt.appTarget.compile.deployFileMarker; if (!marker) return drives; return drives.filter(d => { try { return fs.existsSync(path.join(d, marker)); } catch (e) { return false; } }); }
the_stack
import { ComparisonOperator, MathExpression, Metric, MetricOptions, Statistic, TreatMissingData } from '@aws-cdk/aws-cloudwatch'; import { Rule, RuleTargetInput, Schedule } from '@aws-cdk/aws-events'; import { SfnStateMachine } from '@aws-cdk/aws-events-targets'; import { IGrantable, IPrincipal } from '@aws-cdk/aws-iam'; import { FunctionProps, IFunction, Tracing } from '@aws-cdk/aws-lambda'; import { SqsEventSource } from '@aws-cdk/aws-lambda-event-sources'; import { RetentionDays } from '@aws-cdk/aws-logs'; import { BlockPublicAccess, IBucket } from '@aws-cdk/aws-s3'; import { BucketDeployment, Source } from '@aws-cdk/aws-s3-deployment'; import { IQueue, Queue, QueueEncryption } from '@aws-cdk/aws-sqs'; import { StateMachine, JsonPath, Choice, Succeed, Condition, Map, TaskInput, IntegrationPattern } from '@aws-cdk/aws-stepfunctions'; import { CallAwsService, LambdaInvoke, StepFunctionsStartExecution } from '@aws-cdk/aws-stepfunctions-tasks'; import { Construct, Duration, Stack, ArnFormat } from '@aws-cdk/core'; import { Repository } from '../../codeartifact/repository'; import { lambdaFunctionUrl, sqsQueueUrl } from '../../deep-link'; import { Monitoring } from '../../monitoring'; import { PackageTagConfig } from '../../package-tag'; import { RUNBOOK_URL } from '../../runbook-url'; import { S3StorageFactory } from '../../s3/storage'; import { TempFile } from '../../temp-file'; import type { PackageLinkConfig } from '../../webapp'; import { gravitonLambdaIfAvailable } from '../_lambda-architecture'; import { Orchestration } from '../orchestration'; import { STORAGE_KEY_PREFIX, METADATA_KEY_SUFFIX, PACKAGE_KEY_SUFFIX } from '../shared/constants'; import { MetricName, METRICS_NAMESPACE } from './constants'; import { Ingestion as Handler } from './ingestion'; import { ReIngest } from './re-ingest'; export interface IngestionProps { /** * The bucket in which ingested objects are due to be inserted. */ readonly bucket: IBucket; /** * The CodeArtifact repository to which packages should be published. This is * the ConstructHub internal CodeArtifact repository, if one exists. */ readonly codeArtifact?: Repository; /** * The monitoring handler to register alarms with. */ readonly monitoring: Monitoring; /** * The backend orchestration to invoke once the package metadata has been * successfully registered. */ readonly orchestration: Orchestration; /** * How long to retain the CloudWatch logs. * * @default RetentionDays.TEN_YEARS */ readonly logRetention?: RetentionDays; /** * Configuration for custom package page links. */ readonly packageLinks?: PackageLinkConfig[]; /** * Serialized configuration for custom package tags. */ readonly packageTags?: PackageTagConfig[]; /** * How frequently all packages should get fully reprocessed. * * See the operator runbook for more information about reprocessing. * @see https://github.com/cdklabs/construct-hub/blob/main/docs/operator-runbook.md * * @default - never */ readonly reprocessFrequency?: Duration; } /** * The ingestion function receives messages from discovery integrations and * processes their payloads into the provided S3 Bucket. * * This function is also an `IGrantable`, so that it can be granted permissions * to read from the source S3 buckets. */ export class Ingestion extends Construct implements IGrantable { public readonly grantPrincipal: IPrincipal; /** * The SQS queue that triggers the ingestion function. */ public readonly queue: IQueue; /** * The ingestion dead letter queue, which will hold messages that failed * ingestion one too many times, so that poison pills don't endlessly consume * resources. */ public readonly deadLetterQueue: IQueue; public readonly queueRetentionPeriod = Duration.days(14); public readonly function: IFunction; public constructor(scope: Construct, id: string, props: IngestionProps) { super(scope, id); this.deadLetterQueue = new Queue(this, 'DLQ', { encryption: QueueEncryption.KMS_MANAGED, retentionPeriod: this.queueRetentionPeriod, visibilityTimeout: Duration.minutes(15), }); this.queue = new Queue(this, 'Queue', { deadLetterQueue: { maxReceiveCount: 5, queue: this.deadLetterQueue, }, encryption: QueueEncryption.KMS_MANAGED, retentionPeriod: this.queueRetentionPeriod, visibilityTimeout: Duration.minutes(15), }); const configFilename = 'config.json'; const config = new TempFile(configFilename, JSON.stringify({ packageLinks: props.packageLinks ?? [], packageTags: props.packageTags ?? [], })); const storageFactory = S3StorageFactory.getOrCreate(this); const configBucket = storageFactory.newBucket(this, 'ConfigBucket', { blockPublicAccess: BlockPublicAccess.BLOCK_ALL, enforceSSL: true, versioned: true, }); new BucketDeployment(this, 'DeployIngestionConfiguration', { sources: [Source.asset(config.dir)], destinationBucket: configBucket, }); const environment: FunctionProps['environment'] = { AWS_EMF_ENVIRONMENT: 'Local', BUCKET_NAME: props.bucket.bucketName, CONFIG_BUCKET_NAME: configBucket.bucketName, CONFIG_FILE_KEY: configFilename, STATE_MACHINE_ARN: props.orchestration.stateMachine.stateMachineArn, }; if (props.codeArtifact) { environment.CODE_ARTIFACT_REPOSITORY_ENDPOINT = props.codeArtifact.publishingRepositoryNpmEndpoint; environment.CODE_ARTIFACT_DOMAIN_NAME = props.codeArtifact.repositoryDomainName; environment.CODE_ARTIFACT_DOMAIN_OWNER = props.codeArtifact.repositoryDomainOwner; } const handler = new Handler(this, 'Default', { description: '[ConstructHub/Ingestion] Ingests new package versions into the Construct Hub', environment, logRetention: props.logRetention ?? RetentionDays.TEN_YEARS, memorySize: 10_240, // Currently the maximum possible setting timeout: Duration.minutes(15), tracing: Tracing.ACTIVE, }); this.function = handler; configBucket.grantRead(handler); props.bucket.grantWrite(this.function); props.codeArtifact?.grantPublishToRepository(handler); props.orchestration.stateMachine.grantStartExecution(this.function); this.function.addEventSource(new SqsEventSource(this.queue, { batchSize: 1 })); // This event source is disabled, and can be used to re-process dead-letter-queue messages this.function.addEventSource(new SqsEventSource(this.deadLetterQueue, { batchSize: 1, enabled: false })); // Reprocess workflow const reprocessQueue = new Queue(this, 'ReprocessQueue', { deadLetterQueue: { maxReceiveCount: 5, queue: this.deadLetterQueue, }, encryption: QueueEncryption.KMS_MANAGED, retentionPeriod: this.queueRetentionPeriod, // Visibility timeout of 15 minutes matches the Lambda maximum execution time. visibilityTimeout: Duration.minutes(15), }); props.bucket.grantRead(this.function, `${STORAGE_KEY_PREFIX}*${PACKAGE_KEY_SUFFIX}`); this.function.addEventSource(new SqsEventSource(reprocessQueue, { batchSize: 1 })); const reprocessWorkflow = new ReprocessIngestionWorkflow(this, 'ReprocessWorkflow', { bucket: props.bucket, queue: reprocessQueue }); // Run reprocess workflow on a daily basis const updatePeriod = props.reprocessFrequency; if (updatePeriod) { const rule = new Rule(this, 'ReprocessCronJob', { schedule: Schedule.rate(updatePeriod), description: 'Periodically reprocess all packages', }); rule.addTarget(new SfnStateMachine(reprocessWorkflow.stateMachine, { input: RuleTargetInput.fromObject({ comment: 'Scheduled reprocessing event from cron job.', }), })); } this.grantPrincipal = this.function.grantPrincipal; props.monitoring.addLowSeverityAlarm( 'Ingestion Dead-Letter Queue not empty', new MathExpression({ expression: 'm1 + m2', usingMetrics: { m1: this.deadLetterQueue.metricApproximateNumberOfMessagesVisible({ period: Duration.minutes(1) }), m2: this.deadLetterQueue.metricApproximateNumberOfMessagesNotVisible({ period: Duration.minutes(1) }), }, }).createAlarm(this, 'DLQAlarm', { alarmName: `${this.node.path}/DLQNotEmpty`, alarmDescription: [ 'The dead-letter queue for the Ingestion function is not empty!', '', `RunBook: ${RUNBOOK_URL}`, '', `Direct link to the queue: ${sqsQueueUrl(this.deadLetterQueue)}`, `Direct link to the function: ${lambdaFunctionUrl(this.function)}`, ].join('\n'), comparisonOperator: ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, evaluationPeriods: 1, threshold: 1, // SQS does not emit metrics if the queue has been empty for a while, which is GOOD. treatMissingData: TreatMissingData.NOT_BREACHING, }), ); props.monitoring.addHighSeverityAlarm( 'Ingestion failures', this.function.metricErrors().createAlarm(this, 'FailureAlarm', { alarmName: `${this.node.path}/Failure`, alarmDescription: [ 'The Ingestion function is failing!', '', `RunBook: ${RUNBOOK_URL}`, '', `Direct link to the function: ${lambdaFunctionUrl(this.function)}`, ].join('\n'), comparisonOperator: ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, evaluationPeriods: 2, threshold: 1, // Lambda only emits metrics when the function is invoked. No invokation => no errors. treatMissingData: TreatMissingData.NOT_BREACHING, }), ); } public metricFoundLicenseFile(opts?: MetricOptions): Metric { return new Metric({ period: Duration.minutes(5), statistic: Statistic.SUM, ...opts, metricName: MetricName.FOUND_LICENSE_FILE, namespace: METRICS_NAMESPACE, }); } public metricIneligibleLicense(opts?: MetricOptions): Metric { return new Metric({ period: Duration.minutes(5), statistic: Statistic.SUM, ...opts, metricName: MetricName.INELIGIBLE_LICENSE, namespace: METRICS_NAMESPACE, }); } public metricInvalidAssembly(opts?: MetricOptions): Metric { return new Metric({ period: Duration.minutes(5), statistic: Statistic.SUM, ...opts, metricName: MetricName.INVALID_ASSEMBLY, namespace: METRICS_NAMESPACE, }); } public metricInvalidTarball(opts?: MetricOptions): Metric { return new Metric({ period: Duration.minutes(5), statistic: Statistic.SUM, ...opts, metricName: MetricName.INVALID_TARBALL, namespace: METRICS_NAMESPACE, }); } /** * This metrics is the total count of packages that were rejected due to * mismatched identity (name, version, license) between the `package.json` * file and te `.jsii` attribute. */ public metricMismatchedIdentityRejections(opts?: MetricOptions): Metric { return new Metric({ period: Duration.minutes(5), statistic: Statistic.SUM, ...opts, metricName: MetricName.MISMATCHED_IDENTITY_REJECTIONS, namespace: METRICS_NAMESPACE, }); } } interface ReprocessIngestionWorkflowProps { readonly bucket: IBucket; readonly queue: IQueue; } /** * A StepFunctions State Machine to reprocess every currently indexed package * through the ingestion function. This should not be frequently required, but * can come in handy at times. * * For more information, refer to the runbook at * https://github.com/cdklabs/construct-hub/blob/main/docs/operator-runbook.md */ class ReprocessIngestionWorkflow extends Construct { public readonly stateMachine: StateMachine; public constructor(scope: Construct, id: string, props: ReprocessIngestionWorkflowProps) { super(scope, id); const lambdaFunction = new ReIngest(this, 'Function', { architecture: gravitonLambdaIfAvailable(this), description: '[ConstructHub/Ingestion/ReIngest] The function used to reprocess packages through ingestion', environment: { BUCKET_NAME: props.bucket.bucketName, QUEUE_URL: props.queue.queueUrl }, memorySize: 10_240, tracing: Tracing.ACTIVE, timeout: Duration.minutes(3), }); props.queue.grantSendMessages(lambdaFunction); props.bucket.grantRead(lambdaFunction, `${STORAGE_KEY_PREFIX}*${METADATA_KEY_SUFFIX}`); props.bucket.grantRead(lambdaFunction, `${STORAGE_KEY_PREFIX}*${PACKAGE_KEY_SUFFIX}`); // Need to physical-name the state machine so it can self-invoke. const stateMachineName = stateMachineNameFrom(this.node.path); const listBucket = new Choice(this, 'Has a ContinuationToken?') .when(Condition.isPresent('$.ContinuationToken'), new CallAwsService(this, 'S3.ListObjectsV2(NextPage)', { service: 's3', action: 'listObjectsV2', iamAction: 's3:ListBucket', iamResources: [props.bucket.bucketArn], parameters: { Bucket: props.bucket.bucketName, ContinuationToken: JsonPath.stringAt('$.ContinuationToken'), Prefix: STORAGE_KEY_PREFIX, }, resultPath: '$.response', }).addRetry({ errors: ['S3.SdkClientException'] })) .otherwise(new CallAwsService(this, 'S3.ListObjectsV2(FirstPage)', { service: 's3', action: 'listObjectsV2', iamAction: 's3:ListBucket', iamResources: [props.bucket.bucketArn], parameters: { Bucket: props.bucket.bucketName, Prefix: STORAGE_KEY_PREFIX, }, resultPath: '$.response', }).addRetry({ errors: ['S3.SdkClientException'] })).afterwards(); const process = new Map(this, 'Process Result', { itemsPath: '$.response.Contents', resultPath: JsonPath.DISCARD, }).iterator( new Choice(this, 'Is metadata object?') .when( Condition.stringMatches('$.Key', `*${METADATA_KEY_SUFFIX}`), new LambdaInvoke(this, 'Send for reprocessing', { lambdaFunction }) // Ample retries here... We should never fail because of throttling.... .addRetry({ errors: ['Lambda.TooManyRequestsException'], backoffRate: 1.1, interval: Duration.minutes(1), maxAttempts: 30 }), ) .otherwise(new Succeed(this, 'Nothing to do')), ); listBucket.next( new Choice(this, 'Is there more?') .when( Condition.isPresent('$.response.NextContinuationToken'), new StepFunctionsStartExecution(this, 'Continue as new', { associateWithParent: true, stateMachine: StateMachine.fromStateMachineArn(this, 'ThisStateMachine', Stack.of(this).formatArn({ arnFormat: ArnFormat.COLON_RESOURCE_NAME, service: 'states', resource: 'stateMachine', resourceName: stateMachineName, })), input: TaskInput.fromObject({ ContinuationToken: JsonPath.stringAt('$.response.NextContinuationToken') }), integrationPattern: IntegrationPattern.REQUEST_RESPONSE, resultPath: JsonPath.DISCARD, }).addRetry({ errors: ['StepFunctions.ExecutionLimitExceeded'] }), ).afterwards({ includeOtherwise: true }) .next(process), ); this.stateMachine = new StateMachine(this, 'StateMachine', { definition: listBucket, stateMachineName, timeout: Duration.hours(1), }); props.bucket.grantRead(this.stateMachine); props.queue.grantSendMessages(this.stateMachine); } } /** * This turns a node path into a valid state machine name, to try and improve * the StepFunction's AWS console experience while minimizing the risk for * collisons. */ function stateMachineNameFrom(nodePath: string): string { // Poor man's replace all... return nodePath.split(/[^a-z0-9+!@.()=_'-]+/i).join('.'); }
the_stack
import * as fs from 'fs'; import * as path from 'path'; import { system, filesystem } from 'gluegun'; // eslint-disable-next-line // @ts-ignore import * as pgtools from 'pgtools'; const pgtoolsCreateDB = pgtools.createdb; const pgtoolsDropDB = pgtools.dropdb; import { callWarthogCLI, createDB, dropDB, spyOnStd } from '../helpers'; import { setTestServerEnvironmentVariables } from '../server-vars'; const root = filesystem.path(__dirname, '../../../'); const GENERATED_FOLDER = path.join(__dirname, '../../../tmp/cli-tests'); describe('cli functional tests', () => { const spy = spyOnStd(); // Gives us access to whatever is written to stdout as part of the CLI command const openMock = jest.fn(); beforeAll(async () => { await filesystem.dirAsync(GENERATED_FOLDER); // cleanup test artifacts jest.mock('open', () => openMock); }); beforeEach(() => { setTestServerEnvironmentVariables(); spy.clear(); }); afterAll(() => { filesystem.remove(GENERATED_FOLDER); // cleanup test artifacts filesystem.remove(path.join(__dirname, 'tmp')); openMock.mockReset(); }); // This test actually calls the CLI via a system call. This won't count towards test coverage // but it's the most thorough way we can actually check to see if everything is wired up correctly test('spin up an actual process to test the full cli is wired up', async done => { expect.assertions(2); // Construct the environment variables here so that they're passed into cli command const env = { ...process.env }; const output = await system.run( 'node ' + filesystem.path(root, 'bin', 'warthog') + ' --version', { env } ); // TODO: should we bother with this since we don't update the version in package.json? expect(output).toContain('0.0.0-development'); // This makes sure we're actually getting the version command and not the standard "help" command, which also includes the version expect(output).not.toContain('help'); done(); }); test('outputs help', async done => { await callWarthogCLI('--help'); const stdout = spy.getStdOutErr(); expect(stdout).toContain('generate (g)'); done(); }); test('generates models', async done => { expect.assertions(23); await callWarthogCLI( `generate user name! nickname numLogins:int! verified:bool! registeredAt:date balance:float! meta:json! --folder ${GENERATED_FOLDER}` ); const stdout = spy.getStdOutErr(); let fileContents; expect(stdout).toContain(`Generated file at ${GENERATED_FOLDER}/user.model.ts`); fileContents = filesystem.read(`${GENERATED_FOLDER}/user.model.ts`); expect(fileContents).toContain('export class User'); expect(fileContents).toContain('@StringField()'); expect(fileContents).toContain('name!: string;'); // This also checks that prettier was run to remove trailing comma expect(fileContents).toContain('@StringField({ nullable: true })'); expect(fileContents).toContain('nickname?: string;'); expect(fileContents).toContain('@IntField()'); expect(fileContents).toContain('numLogins!: number;'); expect(fileContents).toContain('@BooleanField()'); expect(fileContents).toContain('verified!: boolean;'); // This also checks that prettier was run to remove trailing comma expect(fileContents).toContain('@DateField({ nullable: true })'); expect(fileContents).toContain('registeredAt?: Date;'); expect(fileContents).toContain('@FloatField()'); expect(fileContents).toContain('balance!: number;'); // Generator should dynamically add these imports expect(fileContents).toContain('BooleanField,'); expect(fileContents).toContain('DateField,'); expect(fileContents).toContain('FloatField,'); expect(fileContents).toContain('IntField,'); expect(fileContents).toContain('JSONField,'); expect(stdout).toContain(`Generated file at ${GENERATED_FOLDER}/user.service.ts`); fileContents = filesystem.read(`${GENERATED_FOLDER}/user.service.ts`); expect(fileContents).toContain("@Service('UserService')"); expect(stdout).toContain(`Generated file at ${GENERATED_FOLDER}/user.resolver.ts`); fileContents = filesystem.read(`${GENERATED_FOLDER}/user.resolver.ts`); expect(fileContents).toContain('this.service.find<UserWhereInput>'); done(); }); test('generates a shell of a file of no params specified', async done => { expect.assertions(9); await callWarthogCLI(`generate empty_class --folder ${GENERATED_FOLDER}`); const stdout = spy.getStdOutErr(); expect(stdout).toContain(`Generated file at ${GENERATED_FOLDER}/empty-class.model.ts`); const fileContents = filesystem.read(`${GENERATED_FOLDER}/empty-class.model.ts`); expect(fileContents).toContain('export class EmptyClass extends BaseModel'); expect(fileContents).toContain('@StringField({ nullable: true })'); expect(fileContents).toContain('fieldName?: string;'); // Generator should NOT dynamically add these imports expect(fileContents).not.toContain('BooleanField'); expect(fileContents).not.toContain('DateField'); expect(fileContents).not.toContain('FloatField'); expect(fileContents).not.toContain('IntField'); expect(fileContents).not.toContain('JSONField'); done(); }); test('generates to a dynamic path', async done => { expect.assertions(2); await callWarthogCLI('generate empty_class --folder ' + GENERATED_FOLDER + '/${camelName}'); const stdout = spy.getStdOutErr(); // Note the camel cased "emptyClass below" const file = `${GENERATED_FOLDER}/emptyClass/empty-class.model.ts`; expect(stdout).toContain(`Generated file at ${file}`); const fileContents = filesystem.read(file); expect(fileContents).toContain('export class EmptyClass extends BaseModel'); done(); }); test('requires name for db:create', async done => { expect.assertions(1); // process.env.PGUSER = 'postgres'; await callWarthogCLI('db:create', { WARTHOG_DB_DATABASE: '' }); const stdout = spy.getStdOutErr(); expect(stdout).toContain('Database name is required'); done(); }); test('successfully creates a database', async done => { pgtools.createdb = jest.fn().mockImplementation((config: any, dbname: string, cb: Function) => { cb(null, { success: true }); }); await callWarthogCLI('db:create'); const stdout = spy.getStdOutErr(); expect(stdout).toContain("Database 'warthog-test' created!"); pgtools.createdb = pgtoolsCreateDB; done(); }); test('throws an error if pg library cant create DB', async done => { pgtools.createdb = jest.fn().mockImplementation((config: any, dbname: string, cb: Function) => { cb({ message: 'duplicate database' }, null); }); await callWarthogCLI('db:create'); const stdout = spy.getStdOutErr(); expect(stdout).toContain("Database 'warthog-test' already exists"); pgtools.createdb = pgtoolsCreateDB; done(); }); test('db:drop: throws an error if database does not exist', async done => { pgtools.dropdb = jest.fn().mockImplementation((config: any, dbname: string, cb: Function) => { cb({ name: 'invalid_catalog_name' }, null); }); await callWarthogCLI('db:drop'); const stdout = spy.getStdOutErr(); expect(stdout).toContain("Database 'warthog-test' does not exist"); pgtools.dropdb = pgtoolsDropDB; done(); }); test('db:drop success', async done => { pgtools.dropdb = jest.fn().mockImplementation((config: any, dbname: string, cb: Function) => { cb(null, { success: true }); }); await callWarthogCLI('db:drop'); const stdout = spy.getStdOutErr(); expect(stdout).toContain("Database 'warthog-test' dropped!"); pgtools.dropdb = pgtoolsDropDB; done(); }); test('db:drop success', async done => { await callWarthogCLI('db:migrate:generate'); const stdout = spy.getStdOutErr(); expect(stdout).toContain('"name" option is required'); spy.clear(); done(); }); test('generates and runs migrations', async done => { const migrationDBName = 'warthog-test-generate-migrations'; // Set environment variables for a test server that writes to a separate test DB and does NOT autogenerate files setTestServerEnvironmentVariables({ WARTHOG_DB_DATABASE: migrationDBName, WARTHOG_DB_SYNCHRONIZE: 'false' }); await allowError( dropDB(migrationDBName), 'DropDB will likely fail since DB might not be there' ); await createDB(migrationDBName); await callWarthogCLI('db:migrate:generate --name cli_test_db_migration'); const stdout = spy.getStdOutErr(); expect(stdout).toContain('-CliTestDbMigration.ts'); expect(stdout).toContain('has been generated successfully.'); const migrationDir = String(process.env.WARTHOG_DB_MIGRATIONS_DIR); const migrationFileName = fs.readdirSync(migrationDir)[0]; const migrationContents = fs.readFileSync(path.join(migrationDir, migrationFileName), 'utf-8'); expect(migrationContents).toContain('CREATE TABLE "kitchen_sinks"'); expect(migrationContents).toContain('CREATE TABLE "dishs"'); expect(migrationContents).toContain('DROP TABLE "dishs"'); expect(migrationContents).toContain('DROP TABLE "kitchen_sinks"'); done(); }); test('warthog (with no command)', async done => { await callWarthogCLI(''); const stdout = spy.getStdOutErr(); expect(stdout).toContain('Warthog: GraphQL API Framework'); done(); }); test('warthog playground', async done => { await callWarthogCLI('playground'); expect(openMock).toBeCalledWith('http://localhost:4000/playground', { wait: false }); done(); }); test('codegen creates correct files', async done => { const folder = './tmp/codegen'; filesystem.remove(folder); process.env.WARTHOG_GENERATED_FOLDER = folder; await callWarthogCLI('codegen'); // TODO: how much file content validation should we do here? const bindingContents = filesystem.read(`${folder}/binding.ts`); expect(bindingContents).toContain('export interface Binding'); const classContents = filesystem.read(`${folder}/classes.ts`); expect(classContents).toContain('export enum KitchenSinkOrderByEnum'); const indexContents = filesystem.read(`${folder}/index.ts`); expect(indexContents).toContain("export * from './classes';"); const ormConfigContents = filesystem.read(`${folder}/ormconfig.ts`); expect(ormConfigContents).toContain('module.exports = getBaseConfig();'); const schemaContents = filesystem.read(`${folder}/schema.graphql`); expect(schemaContents).toContain('input KitchenSinkWhereInput'); filesystem.remove(folder); done(); }); test('warthog new', async (done: Function) => { const tmpFolder = path.join(__dirname, 'tmp'); // delete folder first await callWarthogCLI('new foo', { WARTHOG_CLI_GENERATE_PATH: tmpFolder }); const packageJson = require(path.join(__dirname, 'tmp', 'package.json')); // eslint-disable-line const caretDep = /^\^\d+/; // ex: "^4" expect(packageJson.dependencies['dotenv']).toMatch(caretDep); expect(packageJson.dependencies['reflect-metadata']).toMatch(caretDep); expect(packageJson.dependencies['warthog']).toMatch(caretDep); expect(packageJson.devDependencies['@types/jest']).toMatch(caretDep); expect(packageJson.devDependencies['dotenvi']).toMatch(caretDep); expect(packageJson.devDependencies['jest']).toMatch(caretDep); expect(packageJson.devDependencies['ts-jest']).toMatch(caretDep); expect(packageJson.devDependencies['ts-node']).toMatch(caretDep); expect(packageJson.devDependencies['typescript']).toMatch(caretDep); filesystem.remove(tmpFolder); done(); }); }); async function allowError(promise: Promise<unknown>, msg: string) { try { await promise; } catch (error) { console.log(`Allowing error [${msg}]`, error.message); } }
the_stack
import { isString } from '@vue/shared' import { ForParseResult } from './transforms/vFor' import { CREATE_VNODE, WITH_DIRECTIVES, RENDER_SLOT, CREATE_SLOTS, RENDER_LIST, OPEN_BLOCK, CREATE_BLOCK, FRAGMENT } from './runtimeHelpers' import { PropsExpression } from './transforms/transformElement' // Vue template is a platform-agnostic superset of HTML (syntax only). // More namespaces like SVG and MathML are declared by platform specific // compilers. export type Namespace = number export const enum Namespaces { HTML } export const enum NodeTypes { ROOT, ELEMENT, TEXT, COMMENT, SIMPLE_EXPRESSION, INTERPOLATION, ATTRIBUTE, DIRECTIVE, // containers COMPOUND_EXPRESSION, IF, IF_BRANCH, FOR, TEXT_CALL, // codegen JS_CALL_EXPRESSION, JS_OBJECT_EXPRESSION, JS_PROPERTY, JS_ARRAY_EXPRESSION, JS_FUNCTION_EXPRESSION, JS_SEQUENCE_EXPRESSION, JS_CONDITIONAL_EXPRESSION, JS_CACHE_EXPRESSION } export const enum ElementTypes { ELEMENT, COMPONENT, SLOT, TEMPLATE, PORTAL, SUSPENSE } export interface Node { type: NodeTypes loc: SourceLocation } // The node's range. The `start` is inclusive and `end` is exclusive. // [start, end) export interface SourceLocation { start: Position end: Position source: string } export interface Position { offset: number // from start of file line: number column: number } export type ParentNode = RootNode | ElementNode | IfBranchNode | ForNode export type ExpressionNode = SimpleExpressionNode | CompoundExpressionNode export type TemplateChildNode = | ElementNode | InterpolationNode | CompoundExpressionNode | TextNode | CommentNode | IfNode | ForNode | TextCallNode export interface RootNode extends Node { type: NodeTypes.ROOT children: TemplateChildNode[] helpers: symbol[] components: string[] directives: string[] hoists: JSChildNode[] cached: number codegenNode: TemplateChildNode | JSChildNode | undefined } export type ElementNode = | PlainElementNode | ComponentNode | SlotOutletNode | TemplateNode | PortalNode | SuspenseNode export interface BaseElementNode extends Node { type: NodeTypes.ELEMENT ns: Namespace tag: string tagType: ElementTypes isSelfClosing: boolean props: Array<AttributeNode | DirectiveNode> children: TemplateChildNode[] codegenNode: CallExpression | SimpleExpressionNode | undefined } export interface PlainElementNode extends BaseElementNode { tagType: ElementTypes.ELEMENT codegenNode: ElementCodegenNode | undefined | SimpleExpressionNode // only when hoisted } export interface ComponentNode extends BaseElementNode { tagType: ElementTypes.COMPONENT codegenNode: ComponentCodegenNode | undefined } export interface SlotOutletNode extends BaseElementNode { tagType: ElementTypes.SLOT codegenNode: SlotOutletCodegenNode | undefined } export interface TemplateNode extends BaseElementNode { tagType: ElementTypes.TEMPLATE codegenNode: | ElementCodegenNode | CodegenNodeWithDirective<ElementCodegenNode> | undefined } export interface PortalNode extends BaseElementNode { tagType: ElementTypes.PORTAL codegenNode: ElementCodegenNode | undefined } export interface SuspenseNode extends BaseElementNode { tagType: ElementTypes.SUSPENSE codegenNode: ElementCodegenNode | undefined } export interface TextNode extends Node { type: NodeTypes.TEXT content: string isEmpty: boolean } export interface CommentNode extends Node { type: NodeTypes.COMMENT content: string } export interface AttributeNode extends Node { type: NodeTypes.ATTRIBUTE name: string value: TextNode | undefined } export interface DirectiveNode extends Node { type: NodeTypes.DIRECTIVE name: string exp: ExpressionNode | undefined arg: ExpressionNode | undefined modifiers: string[] // optional property to cache the expression parse result for v-for parseResult?: ForParseResult } export interface SimpleExpressionNode extends Node { type: NodeTypes.SIMPLE_EXPRESSION content: string isStatic: boolean isConstant: boolean // an expression parsed as the params of a function will track // the identifiers declared inside the function body. identifiers?: string[] } export interface InterpolationNode extends Node { type: NodeTypes.INTERPOLATION content: ExpressionNode } export interface CompoundExpressionNode extends Node { type: NodeTypes.COMPOUND_EXPRESSION children: ( | SimpleExpressionNode | InterpolationNode | TextNode | string | symbol)[] // an expression parsed as the params of a function will track // the identifiers declared inside the function body. identifiers?: string[] } export interface IfNode extends Node { type: NodeTypes.IF branches: IfBranchNode[] codegenNode: IfCodegenNode } export interface IfBranchNode extends Node { type: NodeTypes.IF_BRANCH condition: ExpressionNode | undefined // else children: TemplateChildNode[] } export interface ForNode extends Node { type: NodeTypes.FOR source: ExpressionNode valueAlias: ExpressionNode | undefined keyAlias: ExpressionNode | undefined objectIndexAlias: ExpressionNode | undefined children: TemplateChildNode[] codegenNode: ForCodegenNode } export interface TextCallNode extends Node { type: NodeTypes.TEXT_CALL content: TextNode | InterpolationNode | CompoundExpressionNode codegenNode: CallExpression } // We also include a number of JavaScript AST nodes for code generation. // The AST is an intentionally minimal subset just to meet the exact needs of // Vue render function generation. export type JSChildNode = | CallExpression | ObjectExpression | ArrayExpression | ExpressionNode | FunctionExpression | ConditionalExpression | SequenceExpression | CacheExpression export interface CallExpression extends Node { type: NodeTypes.JS_CALL_EXPRESSION callee: string | symbol arguments: ( | string | symbol | JSChildNode | TemplateChildNode | TemplateChildNode[])[] } export interface ObjectExpression extends Node { type: NodeTypes.JS_OBJECT_EXPRESSION properties: Array<Property> } export interface Property extends Node { type: NodeTypes.JS_PROPERTY key: ExpressionNode value: JSChildNode } export interface ArrayExpression extends Node { type: NodeTypes.JS_ARRAY_EXPRESSION elements: Array<string | JSChildNode> } export interface FunctionExpression extends Node { type: NodeTypes.JS_FUNCTION_EXPRESSION params: ExpressionNode | ExpressionNode[] | undefined returns: TemplateChildNode | TemplateChildNode[] | JSChildNode newline: boolean } export interface SequenceExpression extends Node { type: NodeTypes.JS_SEQUENCE_EXPRESSION expressions: JSChildNode[] } export interface ConditionalExpression extends Node { type: NodeTypes.JS_CONDITIONAL_EXPRESSION test: ExpressionNode consequent: JSChildNode alternate: JSChildNode } export interface CacheExpression extends Node { type: NodeTypes.JS_CACHE_EXPRESSION index: number value: JSChildNode } // Codegen Node Types ---------------------------------------------------------- // createVNode(...) export interface PlainElementCodegenNode extends CallExpression { callee: typeof CREATE_VNODE | typeof CREATE_BLOCK arguments: // tag, props, children, patchFlag, dynamicProps | [string | symbol] | [string | symbol, PropsExpression] | [string | symbol, 'null' | PropsExpression, TemplateChildNode[]] | [ string | symbol, 'null' | PropsExpression, 'null' | TemplateChildNode[], string ] | [ string | symbol, 'null' | PropsExpression, 'null' | TemplateChildNode[], string, string ] } export type ElementCodegenNode = | PlainElementCodegenNode | CodegenNodeWithDirective<PlainElementCodegenNode> // createVNode(...) export interface PlainComponentCodegenNode extends CallExpression { callee: typeof CREATE_VNODE | typeof CREATE_BLOCK arguments: // Comp, props, slots, patchFlag, dynamicProps | [string | symbol] | [string | symbol, PropsExpression] | [string | symbol, 'null' | PropsExpression, SlotsExpression] | [ string | symbol, 'null' | PropsExpression, 'null' | SlotsExpression, string ] | [ string | symbol, 'null' | PropsExpression, 'null' | SlotsExpression, string, string ] } export type ComponentCodegenNode = | PlainComponentCodegenNode | CodegenNodeWithDirective<PlainComponentCodegenNode> export type SlotsExpression = SlotsObjectExpression | DynamicSlotsExpression // { foo: () => [...] } export interface SlotsObjectExpression extends ObjectExpression { properties: SlotsObjectProperty[] } export interface SlotsObjectProperty extends Property { value: SlotFunctionExpression } export interface SlotFunctionExpression extends FunctionExpression { returns: TemplateChildNode[] } // createSlots({ ... }, [ // foo ? () => [] : undefined, // renderList(list, i => () => [i]) // ]) export interface DynamicSlotsExpression extends CallExpression { callee: typeof CREATE_SLOTS arguments: [SlotsObjectExpression, DynamicSlotEntries] } export interface DynamicSlotEntries extends ArrayExpression { elements: (ConditionalDynamicSlotNode | ListDynamicSlotNode)[] } export interface ConditionalDynamicSlotNode extends ConditionalExpression { consequent: DynamicSlotNode alternate: DynamicSlotNode | SimpleExpressionNode } export interface ListDynamicSlotNode extends CallExpression { callee: typeof RENDER_LIST arguments: [ExpressionNode, ListDynamicSlotIterator] } export interface ListDynamicSlotIterator extends FunctionExpression { returns: DynamicSlotNode } export interface DynamicSlotNode extends ObjectExpression { properties: [Property, DynamicSlotFnProperty] } export interface DynamicSlotFnProperty extends Property { value: SlotFunctionExpression } // withDirectives(createVNode(...), [ // [_directive_foo, someValue], // [_directive_bar, someValue, "arg", { mod: true }] // ]) export interface CodegenNodeWithDirective<T extends CallExpression> extends CallExpression { callee: typeof WITH_DIRECTIVES arguments: [T, DirectiveArguments] } export interface DirectiveArguments extends ArrayExpression { elements: DirectiveArgumentNode[] } export interface DirectiveArgumentNode extends ArrayExpression { elements: // dir, exp, arg, modifiers | [string] | [string, ExpressionNode] | [string, ExpressionNode, ExpressionNode] | [string, ExpressionNode, ExpressionNode, ObjectExpression] } // renderSlot(...) export interface SlotOutletCodegenNode extends CallExpression { callee: typeof RENDER_SLOT arguments: // $slots, name, props, fallback | [string, string | ExpressionNode] | [string, string | ExpressionNode, PropsExpression] | [ string, string | ExpressionNode, PropsExpression | '{}', TemplateChildNode[] ] } export type BlockCodegenNode = | ElementCodegenNode | ComponentCodegenNode | SlotOutletCodegenNode export interface IfCodegenNode extends SequenceExpression { expressions: [OpenBlockExpression, IfConditionalExpression] } export interface IfConditionalExpression extends ConditionalExpression { consequent: BlockCodegenNode alternate: BlockCodegenNode | IfConditionalExpression } export interface ForCodegenNode extends SequenceExpression { expressions: [OpenBlockExpression, ForBlockCodegenNode] } export interface ForBlockCodegenNode extends CallExpression { callee: typeof CREATE_BLOCK arguments: [typeof FRAGMENT, 'null', ForRenderListExpression, string] } export interface ForRenderListExpression extends CallExpression { callee: typeof RENDER_LIST arguments: [ExpressionNode, ForIteratorExpression] } export interface ForIteratorExpression extends FunctionExpression { returns: BlockCodegenNode } export interface OpenBlockExpression extends CallExpression { callee: typeof OPEN_BLOCK arguments: [] } // AST Utilities --------------------------------------------------------------- // Some expressions, e.g. sequence and conditional expressions, are never // associated with template nodes, so their source locations are just a stub. // Container types like CompoundExpression also don't need a real location. export const locStub: SourceLocation = { source: '', start: { line: 1, column: 1, offset: 0 }, end: { line: 1, column: 1, offset: 0 } } export function createArrayExpression( elements: ArrayExpression['elements'], loc: SourceLocation = locStub ): ArrayExpression { return { type: NodeTypes.JS_ARRAY_EXPRESSION, loc, elements } } export function createObjectExpression( properties: ObjectExpression['properties'], loc: SourceLocation = locStub ): ObjectExpression { return { type: NodeTypes.JS_OBJECT_EXPRESSION, loc, properties } } export function createObjectProperty( key: Property['key'] | string, value: Property['value'] ): Property { return { type: NodeTypes.JS_PROPERTY, loc: locStub, key: isString(key) ? createSimpleExpression(key, true) : key, value } } export function createSimpleExpression( content: SimpleExpressionNode['content'], isStatic: SimpleExpressionNode['isStatic'], loc: SourceLocation = locStub, isConstant: boolean = false ): SimpleExpressionNode { return { type: NodeTypes.SIMPLE_EXPRESSION, loc, isConstant, content, isStatic } } export function createInterpolation( content: InterpolationNode['content'] | string, loc: SourceLocation ): InterpolationNode { return { type: NodeTypes.INTERPOLATION, loc, content: isString(content) ? createSimpleExpression(content, false, loc) : content } } export function createCompoundExpression( children: CompoundExpressionNode['children'], loc: SourceLocation = locStub ): CompoundExpressionNode { return { type: NodeTypes.COMPOUND_EXPRESSION, loc, children } } type InferCodegenNodeType<T> = T extends | typeof CREATE_VNODE | typeof CREATE_BLOCK ? PlainElementCodegenNode | PlainComponentCodegenNode : T extends typeof WITH_DIRECTIVES ? | CodegenNodeWithDirective<PlainElementCodegenNode> | CodegenNodeWithDirective<PlainComponentCodegenNode> : T extends typeof RENDER_SLOT ? SlotOutletCodegenNode : CallExpression export function createCallExpression<T extends CallExpression['callee']>( callee: T, args: CallExpression['arguments'] = [], loc: SourceLocation = locStub ): InferCodegenNodeType<T> { return { type: NodeTypes.JS_CALL_EXPRESSION, loc, callee, arguments: args } as any } export function createFunctionExpression( params: FunctionExpression['params'], returns: FunctionExpression['returns'], newline: boolean = false, loc: SourceLocation = locStub ): FunctionExpression { return { type: NodeTypes.JS_FUNCTION_EXPRESSION, params, returns, newline, loc } } export function createSequenceExpression( expressions: SequenceExpression['expressions'] ): SequenceExpression { return { type: NodeTypes.JS_SEQUENCE_EXPRESSION, expressions, loc: locStub } } export function createConditionalExpression( test: ConditionalExpression['test'], consequent: ConditionalExpression['consequent'], alternate: ConditionalExpression['alternate'] ): ConditionalExpression { return { type: NodeTypes.JS_CONDITIONAL_EXPRESSION, test, consequent, alternate, loc: locStub } } export function createCacheExpression( index: number, value: JSChildNode ): CacheExpression { return { type: NodeTypes.JS_CACHE_EXPRESSION, index, value, loc: locStub } }
the_stack
import { BaseResource, CloudError, AzureServiceClientOptions } from "@azure/ms-rest-azure-js"; import * as msRest from "@azure/ms-rest-js"; export { BaseResource, CloudError }; /** * @interface * An interface representing SkuCapacity. * Describes scaling information of a SKU. * */ export interface SkuCapacity { /** * @member {number} [minimum] The minimum capacity. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly minimum?: number; /** * @member {number} [maximum] The maximum capacity that can be set. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly maximum?: number; /** * @member {number} [default] The default capacity. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly default?: number; /** * @member {SkuCapacityScaleType} [scaleType] The scale type applicable to * the sku. Possible values include: 'Automatic', 'Manual', 'None' * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly scaleType?: SkuCapacityScaleType; } /** * @interface * An interface representing SkuCapability. * Describes The SKU capabilites object. * */ export interface SkuCapability { /** * @member {string} [name] The capability name. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly name?: string; /** * @member {string} [value] The capability value. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly value?: string; } /** * @interface * An interface representing SkuCost. * Describes metadata for SKU cost info. * */ export interface SkuCost { /** * @member {string} [meterID] The meter used for this part of a SKU's cost. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly meterID?: string; /** * @member {number} [quantity] The multiplier for the meter ID. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly quantity?: number; /** * @member {string} [extendedUnit] The overall duration represented by the * quantity. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly extendedUnit?: string; } /** * @interface * An interface representing SkuRestrictions. * Describes restrictions which would prevent a SKU from being used. * */ export interface SkuRestrictions { /** * @member {ResourceSkuRestrictionsType} [type] The type of restrictions. * Possible values include: 'location', 'zone' * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly type?: ResourceSkuRestrictionsType; /** * @member {string[]} [values] The value of restrictions. If the restriction * type is set to location. This would be different locations where the SKU * is restricted. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly values?: string[]; /** * @member {ResourceSkuRestrictionsReasonCode} [reasonCode] The reason for * restriction. Possible values include: 'QuotaId', * 'NotAvailableForSubscription' * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly reasonCode?: ResourceSkuRestrictionsReasonCode; } /** * @interface * An interface representing CatalogSku. * Details of a commitment plan SKU. * */ export interface CatalogSku { /** * @member {string} [resourceType] Resource type name * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly resourceType?: string; /** * @member {string} [name] SKU name * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly name?: string; /** * @member {string} [tier] SKU tier * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly tier?: string; /** * @member {string[]} [locations] Regions where the SKU is available. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly locations?: string[]; /** * @member {SkuCapacity} [capacity] SKU scaling information * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly capacity?: SkuCapacity; /** * @member {SkuCapability[]} [capabilities] The capability information for * the specified SKU. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly capabilities?: SkuCapability[]; /** * @member {SkuCost[]} [costs] The cost information for the specified SKU. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly costs?: SkuCost[]; /** * @member {SkuRestrictions[]} [restrictions] Restrictions which would * prevent a SKU from being used. This is empty if there are no restrictions. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly restrictions?: SkuRestrictions[]; } /** * @interface * An interface representing Resource. * Common properties of an ARM resource. * * @extends BaseResource */ export interface Resource extends BaseResource { /** * @member {string} [id] Resource Id. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly id?: string; /** * @member {string} [name] Resource name. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly name?: string; /** * @member {string} location Resource location. */ location: string; /** * @member {string} [type] Resource type. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly type?: string; /** * @member {{ [propertyName: string]: string }} [tags] User-defined tags for * the resource. */ tags?: { [propertyName: string]: string }; } /** * @interface * An interface representing CommitmentAssociationProperties. * Properties of an Azure ML commitment association. * */ export interface CommitmentAssociationProperties { /** * @member {string} [associatedResourceId] The ID of the resource this * association points to, such as the ARM ID of an Azure ML web service. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly associatedResourceId?: string; /** * @member {string} [commitmentPlanId] The ARM ID of the parent Azure ML * commitment plan. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly commitmentPlanId?: string; /** * @member {Date} [creationDate] The date at which this commitment * association was created, in ISO 8601 format. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly creationDate?: Date; } /** * @interface * An interface representing CommitmentAssociation. * Represents the association between a commitment plan and some other * resource, such as a Machine Learning web service. * * @extends Resource */ export interface CommitmentAssociation extends Resource { /** * @member {string} [etag] An entity tag used to enforce optimistic * concurrency. */ etag?: string; /** * @member {CommitmentAssociationProperties} [properties] The properties of * the commitment association resource. */ properties?: CommitmentAssociationProperties; } /** * @interface * An interface representing ResourceSku. * The SKU of a resource. * */ export interface ResourceSku { /** * @member {number} [capacity] The scale-out capacity of the resource. 1 is * 1x, 2 is 2x, etc. This impacts the quantities and cost of any commitment * plan resource. */ capacity?: number; /** * @member {string} [name] The SKU name. Along with tier, uniquely identifies * the SKU. */ name?: string; /** * @member {string} [tier] The SKU tier. Along with name, uniquely identifies * the SKU. */ tier?: string; } /** * @interface * An interface representing MoveCommitmentAssociationRequest. * Specifies the destination Azure ML commitment plan for a move operation. * */ export interface MoveCommitmentAssociationRequest { /** * @member {string} [destinationPlanId] The ARM ID of the commitment plan to * re-parent the commitment association to. */ destinationPlanId?: string; } /** * @interface * An interface representing CommitmentPlanPatchPayload. * The properties of a commitment plan which may be updated via PATCH. * */ export interface CommitmentPlanPatchPayload { /** * @member {{ [propertyName: string]: string }} [tags] User-defined tags for * the commitment plan. */ tags?: { [propertyName: string]: string }; /** * @member {ResourceSku} [sku] The commitment plan SKU. */ sku?: ResourceSku; } /** * @interface * An interface representing PlanQuantity. * Represents the quantity a commitment plan provides of a metered resource. * */ export interface PlanQuantity { /** * @member {number} [allowance] The quantity added to the commitment plan at * an interval specified by its allowance frequency. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly allowance?: number; /** * @member {number} [amount] The quantity available to the plan the last time * usage was calculated. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly amount?: number; /** * @member {string} [includedQuantityMeter] The Azure meter for usage against * included quantities. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly includedQuantityMeter?: string; /** * @member {string} [overageMeter] The Azure meter for usage which exceeds * included quantities. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly overageMeter?: string; } /** * @interface * An interface representing CommitmentPlanProperties. * Properties of an Azure ML commitment plan. * */ export interface CommitmentPlanProperties { /** * @member {boolean} [chargeForOverage] Indicates whether usage beyond the * commitment plan's included quantities will be charged. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly chargeForOverage?: boolean; /** * @member {boolean} [chargeForPlan] Indicates whether the commitment plan * will incur a charge. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly chargeForPlan?: boolean; /** * @member {Date} [creationDate] The date at which this commitment plan was * created, in ISO 8601 format. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly creationDate?: Date; /** * @member {{ [propertyName: string]: PlanQuantity }} [includedQuantities] * The included resource quantities this plan gives you. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly includedQuantities?: { [propertyName: string]: PlanQuantity }; /** * @member {number} [maxAssociationLimit] The maximum number of commitment * associations that can be children of this commitment plan. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly maxAssociationLimit?: number; /** * @member {number} [maxCapacityLimit] The maximum scale-out capacity for * this commitment plan. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly maxCapacityLimit?: number; /** * @member {number} [minCapacityLimit] The minimum scale-out capacity for * this commitment plan. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly minCapacityLimit?: number; /** * @member {string} [planMeter] The Azure meter which will be used to charge * for this commitment plan. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly planMeter?: string; /** * @member {number} [refillFrequencyInDays] The frequency at which this * commitment plan's included quantities are refilled. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly refillFrequencyInDays?: number; /** * @member {boolean} [suspendPlanOnOverage] Indicates whether this commitment * plan will be moved into a suspended state if usage goes beyond the * commitment plan's included quantities. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly suspendPlanOnOverage?: boolean; } /** * @interface * An interface representing CommitmentPlan. * An Azure ML commitment plan resource. * * @extends Resource */ export interface CommitmentPlan extends Resource { /** * @member {string} [etag] An entity tag used to enforce optimistic * concurrency. */ etag?: string; /** * @member {CommitmentPlanProperties} [properties] The commitment plan * properties. * **NOTE: This property will not be serialized. It can only be populated by * the server.** */ readonly properties?: CommitmentPlanProperties; /** * @member {ResourceSku} [sku] The commitment plan SKU. */ sku?: ResourceSku; } /** * @interface * An interface representing PlanUsageHistory. * Represents historical information about usage of the Azure resources * associated with a commitment plan. * */ export interface PlanUsageHistory { /** * @member {{ [propertyName: string]: number }} [planDeletionOverage] Overage * incurred as a result of deleting a commitment plan. */ planDeletionOverage?: { [propertyName: string]: number }; /** * @member {{ [propertyName: string]: number }} [planMigrationOverage] * Overage incurred as a result of migrating a commitment plan from one SKU * to another. */ planMigrationOverage?: { [propertyName: string]: number }; /** * @member {{ [propertyName: string]: number }} [planQuantitiesAfterUsage] * Included quantities remaining after usage against the commitment plan's * associated resources was calculated. */ planQuantitiesAfterUsage?: { [propertyName: string]: number }; /** * @member {{ [propertyName: string]: number }} [planQuantitiesBeforeUsage] * Included quantities remaining before usage against the commitment plan's * associated resources was calculated. */ planQuantitiesBeforeUsage?: { [propertyName: string]: number }; /** * @member {{ [propertyName: string]: number }} [planUsageOverage] Usage * against the commitment plan's associated resources which was not covered * by included quantities and is therefore overage. */ planUsageOverage?: { [propertyName: string]: number }; /** * @member {{ [propertyName: string]: number }} [usage] Usage against the * commitment plan's associated resources. */ usage?: { [propertyName: string]: number }; /** * @member {Date} [usageDate] The date of usage, in ISO 8601 format. */ usageDate?: Date; } /** * @interface * An interface representing CommitmentAssociationsListOptionalParams. * Optional Parameters. * * @extends RequestOptionsBase */ export interface CommitmentAssociationsListOptionalParams extends msRest.RequestOptionsBase { /** * @member {string} [skipToken] Continuation token for pagination. */ skipToken?: string; } /** * @interface * An interface representing CommitmentPlansListOptionalParams. * Optional Parameters. * * @extends RequestOptionsBase */ export interface CommitmentPlansListOptionalParams extends msRest.RequestOptionsBase { /** * @member {string} [skipToken] Continuation token for pagination. */ skipToken?: string; } /** * @interface * An interface representing CommitmentPlansListInResourceGroupOptionalParams. * Optional Parameters. * * @extends RequestOptionsBase */ export interface CommitmentPlansListInResourceGroupOptionalParams extends msRest.RequestOptionsBase { /** * @member {string} [skipToken] Continuation token for pagination. */ skipToken?: string; } /** * @interface * An interface representing UsageHistoryListOptionalParams. * Optional Parameters. * * @extends RequestOptionsBase */ export interface UsageHistoryListOptionalParams extends msRest.RequestOptionsBase { /** * @member {string} [skipToken] Continuation token for pagination. */ skipToken?: string; } /** * @interface * An interface representing AzureMLCommitmentPlansManagementClientOptions. * @extends AzureServiceClientOptions */ export interface AzureMLCommitmentPlansManagementClientOptions extends AzureServiceClientOptions { /** * @member {string} [baseUri] */ baseUri?: string; } /** * @interface * An interface representing the SkuListResult. * The list of commitment plan SKUs. * * @extends Array<CatalogSku> */ export interface SkuListResult extends Array<CatalogSku> { } /** * @interface * An interface representing the CommitmentAssociationListResult. * A page of commitment association resources. * * @extends Array<CommitmentAssociation> */ export interface CommitmentAssociationListResult extends Array<CommitmentAssociation> { /** * @member {string} [nextLink] A URI to retrieve the next page of results. */ nextLink?: string; } /** * @interface * An interface representing the CommitmentPlanListResult. * A page of commitment plan resources. * * @extends Array<CommitmentPlan> */ export interface CommitmentPlanListResult extends Array<CommitmentPlan> { /** * @member {string} [nextLink] A URI to retrieve the next page of results. */ nextLink?: string; } /** * @interface * An interface representing the PlanUsageHistoryListResult. * A page of usage history. * * @extends Array<PlanUsageHistory> */ export interface PlanUsageHistoryListResult extends Array<PlanUsageHistory> { /** * @member {string} [nextLink] A URI to retrieve the next page of results. */ nextLink?: string; } /** * Defines values for SkuCapacityScaleType. * Possible values include: 'Automatic', 'Manual', 'None' * @readonly * @enum {string} */ export type SkuCapacityScaleType = 'Automatic' | 'Manual' | 'None'; /** * Defines values for ResourceSkuRestrictionsType. * Possible values include: 'location', 'zone' * @readonly * @enum {string} */ export type ResourceSkuRestrictionsType = 'location' | 'zone'; /** * Defines values for ResourceSkuRestrictionsReasonCode. * Possible values include: 'QuotaId', 'NotAvailableForSubscription' * @readonly * @enum {string} */ export type ResourceSkuRestrictionsReasonCode = 'QuotaId' | 'NotAvailableForSubscription'; /** * Contains response data for the list operation. */ export type SkusListResponse = SkuListResult & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: SkuListResult; }; }; /** * Contains response data for the get operation. */ export type CommitmentAssociationsGetResponse = CommitmentAssociation & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentAssociation; }; }; /** * Contains response data for the list operation. */ export type CommitmentAssociationsListResponse = CommitmentAssociationListResult & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentAssociationListResult; }; }; /** * Contains response data for the move operation. */ export type CommitmentAssociationsMoveResponse = CommitmentAssociation & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentAssociation; }; }; /** * Contains response data for the listNext operation. */ export type CommitmentAssociationsListNextResponse = CommitmentAssociationListResult & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentAssociationListResult; }; }; /** * Contains response data for the get operation. */ export type CommitmentPlansGetResponse = CommitmentPlan & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentPlan; }; }; /** * Contains response data for the createOrUpdate operation. */ export type CommitmentPlansCreateOrUpdateResponse = CommitmentPlan & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentPlan; }; }; /** * Contains response data for the patch operation. */ export type CommitmentPlansPatchResponse = CommitmentPlan & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentPlan; }; }; /** * Contains response data for the list operation. */ export type CommitmentPlansListResponse = CommitmentPlanListResult & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentPlanListResult; }; }; /** * Contains response data for the listInResourceGroup operation. */ export type CommitmentPlansListInResourceGroupResponse = CommitmentPlanListResult & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentPlanListResult; }; }; /** * Contains response data for the listNext operation. */ export type CommitmentPlansListNextResponse = CommitmentPlanListResult & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentPlanListResult; }; }; /** * Contains response data for the listInResourceGroupNext operation. */ export type CommitmentPlansListInResourceGroupNextResponse = CommitmentPlanListResult & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: CommitmentPlanListResult; }; }; /** * Contains response data for the list operation. */ export type UsageHistoryListResponse = PlanUsageHistoryListResult & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: PlanUsageHistoryListResult; }; }; /** * Contains response data for the listNext operation. */ export type UsageHistoryListNextResponse = PlanUsageHistoryListResult & { /** * The underlying HTTP response. */ _response: msRest.HttpResponse & { /** * The response body as text (string format) */ bodyAsText: string; /** * The response body as parsed JSON or XML */ parsedBody: PlanUsageHistoryListResult; }; };
the_stack
import { MessagePackage } from '../transport_strategy/api_transport_base'; import * as coreState from '../../core_state'; import { getDefaultRequestHandler, actionMap } from './api_protocol_base'; import {ApiPath, ApiPolicyDelegate, Endpoint} from '../shapes'; const rvmBus = require('../../rvm/rvm_message_bus').rvmMessageBus; // retrieve permission setting from registry import { GetDesktopOwnerSettings } from '../../rvm/rvm_message_bus'; import { writeToLog } from '../../log'; import {app as electronApp} from 'electron'; const configUrlPermissionsMap : { [url: string]: any } = {}; // cached configUrl => permission object, retrieved from RVM // if a configUrl is mapped to a boolean true, request to RVM is successful // did not return permissions const CONFIG_URL_WILDCARD = 'default'; // can set as default for all applications. Checked ONLY IF permissions // for a particular URL is not defined const DESKTOP_OWNER_SETTINGS_TIMEOUT: string = 'desktop-owner-settings-timeout'; // timeout for requesting from RVM in ms let desktopOwnerSettingsTimeout: number = 2000; // in ms let applicationSettingsEnabled: boolean = false; // true, if applicationSettings is defined in desktop owner settings type ApiPolicy = { // for backwards compatible, policyName can be a single config URL [policyName: string]: { urls?: [string]; // support wildcard patterns. If missing, policyName should be a single URL (no wildcard) permissions: any; } }; let apiPolicies: ApiPolicy; // policies for all APIs enum POLICY_AUTH_RESULT { Allowed = 1, Denied, NotDefined // config URL not found in policy. Check window options instead } const delegateMap: Map<ApiPath, ApiPolicyDelegate> = new Map(); function getApiPath(action: string) : string { return actionMap[action] ? actionMap[action].apiPath : ''; } function getApiDefaultPermission(action: string) : boolean { if (actionMap[action]) { return actionMap[action].defaultPermission === undefined || actionMap[action].defaultPermission; } else { return true; } } function searchPolicyByConfigUrl(url: string): any { writeToLog(1, `searchPolicyByConfigUrl ${url}`, true); if (apiPolicies) { for (const policyName of Object.keys(apiPolicies)) { const policy = apiPolicies[policyName]; if (url === CONFIG_URL_WILDCARD && url === policyName) { return policy; } else if (Array.isArray(policy.urls) && electronApp.matchesURL(url, policy.urls)) { writeToLog(1, `searchPolicyByConfigUrl matched by policy name ${policyName}`, true); return policy; } else if (electronApp.matchesURL(url, [policyName])) { writeToLog(1, `searchPolicyByConfigUrl matched by policy name ${policyName}`, true); return policy; } } } else { writeToLog('error', 'searchPolicyByConfigUrl: missing API policies'); } } /** * Checks if action is allowed based on permission definitions * * Example of permission definitions * * permissions": { * "System": { "launchExternalProcess": true }, * "System": { "Clipboard" : { "availableFormats": true } }, * "Window": { "getNativeId": true }, * { "readRegistryValue": * { "enabled": true, * "registryKeys": [ "HKEY_CURRENT_USER\\Software\\OpenFin\\RVM", "HKEY_CURRENT_USER\\Software\\Oracle" ] * } * } * } * * * @param apiPath array of strings such as ['Window', 'getNativeId'] * @param permissionDefinitions permissions options * @param payload API message payload * @param defaultPermission default permission for the API * @returns {boolean} true means permitted */ function checkPermissionDefinitions(apiPath: ApiPath, permissionDefinitions: any, payload: any, defaultPermission: boolean) : boolean { let permitted: boolean = defaultPermission; if (permissionDefinitions) { const parts: string[] = apiPath.split('.'); const levels: number = parts.length; let level: number; let lastValue: any = permissionDefinitions; for (level = 0; level < levels; level += 1) { const part: string = parts[level]; if (lastValue.hasOwnProperty(part)) { lastValue = lastValue[part]; } else { break; } } writeToLog(1, `checkPermissionDefinitions level ${level} ${apiPath}`, true); if (level === levels) { if (typeof lastValue === 'boolean') { // simple true or false permitted = lastValue; } else if (delegateMap.has(apiPath)) { writeToLog(1, `checkPermissionDefinitions calling delegate ${apiPath}`, true); permitted = delegateMap.get(apiPath).checkPermissions({apiPath, permissionSettings: lastValue, payload}); } else { writeToLog(1, `checkPermissionDefinitions api path not defined ${apiPath}`, true); } } } return permitted; } /** * Authorize the action for a window sending the action based on window options * * @param windowOpts window options * @param parentUuid uuid of parent app * @param action in message * @returns {Promise<boolean>} resolves true if authorized */ function authorizeActionFromWindowOptions(windowOpts: any, parentUuid: string, action: string, payload: any): boolean { windowOpts = windowOpts || {}; // todo Is this really needed? const { uuid, name, permissions } = windowOpts; const logSuffix = `'${action}' for ${uuid} ${name}`; writeToLog(1, `authorizeAction ${logSuffix}`, true); const apiPath: ApiPath = getApiPath(action); let allowed: boolean = getApiDefaultPermission(action); if (apiPath) { // if listed in the map, has to be checked if (permissions) { allowed = checkPermissionDefinitions(apiPath, permissions, payload, allowed); } } if (allowed && parentUuid) { // check parent if there is one const parentObject = coreState.getAppByUuid(parentUuid); if (parentObject) { const parentOpts = parentObject._options; if (parentOpts) { writeToLog(1, `authorizeAction checks parent ${parentUuid} ${logSuffix}`, true); allowed = authorizeActionFromWindowOptions(parentOpts, parentObject.parentUuid, action, payload); } else { writeToLog(1, `authorizeAction missing parent options ${parentUuid} ${logSuffix}`, true); } } else { writeToLog(1, `authorizeAction missing parent ${parentUuid} ${logSuffix}`, true); } } return allowed; } /** * Authorize the action for a window sending the action based on policies supplied by RVM * * @param windowOpts * @param action * @returns {Promise<boolean>} * @returns {Promise<string>} resolves with POLICY_AUTH_RESULT */ function authorizeActionFromPolicy(windowOpts: any, action: string, payload: any): Promise<POLICY_AUTH_RESULT> { const { uuid, name } = windowOpts; const logSuffix = `'${action}' for ${uuid} ${name}`; writeToLog(1, `authorizeActionFromPolicy ${logSuffix}`, true); const apiPath: ApiPath = getApiPath(action); return new Promise((resolve, reject) => { if (applicationSettingsEnabled === true) { const configUrl = coreState.getConfigUrlByUuid(uuid); const defaultPermission: boolean = getApiDefaultPermission(action); if (configUrl) { writeToLog(1, `authorizeActionFromPolicy checking with config url ${configUrl} ${logSuffix}`, true); requestAppPermissions(configUrl).then((resultByUrl: any) => { if (resultByUrl.permissions) { resolve(checkPermissionDefinitions(apiPath, resultByUrl.permissions, payload, defaultPermission) ? POLICY_AUTH_RESULT.Allowed : POLICY_AUTH_RESULT.Denied); } else { // check default permissions defined with CONFIG_URL_WILDCARD writeToLog(1, `authorizeActionFromPolicy checking with RVM ${CONFIG_URL_WILDCARD} ${logSuffix}`, true); requestAppPermissions(CONFIG_URL_WILDCARD).then((resultByDefault: any) => { if (resultByDefault.permissions) { resolve(checkPermissionDefinitions(apiPath, resultByDefault.permissions, payload, defaultPermission) ? POLICY_AUTH_RESULT.Allowed : POLICY_AUTH_RESULT.Denied); } else { resolve(POLICY_AUTH_RESULT.NotDefined); // config URL not defined in policy } }).catch((error: any) => { writeToLog(1, `authorizeActionFromPolicy query for permissions failed ${CONFIG_URL_WILDCARD} ${logSuffix} ${error}`, true); reject(false); }); } }).catch((error: any) => { writeToLog(1, `authorizeActionFromPolicy query for permissions failed ${configUrl} ${logSuffix} ${error}`, true); reject(false); }); } else { writeToLog(1, `authorizeActionFromPolicy configUrl not defined ${logSuffix}`, true); resolve(POLICY_AUTH_RESULT.NotDefined); // config URL not defined in policy } } else { writeToLog(1, `authorizeActionFromPolicy applicationSettingsEnabled ${applicationSettingsEnabled} ${logSuffix}`, true); resolve(POLICY_AUTH_RESULT.NotDefined); // config URL not defined in policy } }); } /** * Message pre-processor * * @param msg message package to check * @param next function to call if ok to proceed */ function apiPolicyPreProcessor(msg: MessagePackage, next: () => void): void { const { identity, data, nack } = msg; const {action, payload} = data; const apiPath: ApiPath = getApiPath(action); const errorMessage = 'Rejected, action is not authorized. See: https://developers.openfin.co/docs/api-security'; if (typeof identity === 'object' && apiPath) { // only check if included in the map const { uuid, name } = identity; const logSuffix = `'${action}' from ${uuid} ${name}`; writeToLog(1, `apiPolicyPreProcessor ${logSuffix}`, true); let originWindow = coreState.getRoutingInfoByUuidFrame(uuid, name); if (!originWindow && identity.entityType === 'iframe') { const info = coreState.getInfoByUuidFrame(identity); if (info && info.parent) { originWindow = coreState.getRoutingInfoByUuidFrame(info.parent.uuid, info.parent.name); } } if (originWindow) { const appObject = coreState.getAppByUuid(uuid); // parentUuid for child windows is uuid of the app const parentUuid = uuid === name ? appObject.parentUuid : uuid; authorizeActionFromPolicy(originWindow._options, action, payload). then((result: POLICY_AUTH_RESULT) => { if (result === POLICY_AUTH_RESULT.Denied) { writeToLog(1, `apiPolicyPreProcessor rejecting from policy ${logSuffix}`, true); nack(errorMessage); } else { if (result === POLICY_AUTH_RESULT.Allowed) { writeToLog(1, `apiPolicyPreProcessor allowed from policy, still need to check window options ${logSuffix}`, true); } if (authorizeActionFromWindowOptions(originWindow._options, parentUuid, action, payload)) { next(); } else { writeToLog(1, `apiPolicyPreProcessor rejecting from win opts ${logSuffix}`, true); nack(errorMessage); } } }).catch(() => { writeToLog(1, `apiPolicyPreProcessor rejecting from error ${logSuffix}`, true); nack(errorMessage); }); } else { writeToLog(1, `apiPolicyPreProcessor missing origin window ${logSuffix}`, true); next(); } } else { next(); } } /** * Get application permissions from cache or RVM * * @param configUrl url of startup manifest * @returns {Promise<any>} resolves with permissions defined in application assets; * reject if request to RVM failed */ function requestAppPermissions(configUrl: string): Promise<any> { writeToLog(1, `requestAppPermissions ${configUrl}`, true); return new Promise((resolve, reject) => { if (configUrlPermissionsMap[configUrl]) { writeToLog(1, 'requestAppPermissions cached', true); resolve(configUrlPermissionsMap[configUrl]); } else { const policy = searchPolicyByConfigUrl(configUrl); if (policy && policy.permissions) { configUrlPermissionsMap[configUrl] = {permissions: policy.permissions}; } else { configUrlPermissionsMap[configUrl] = {}; } resolve(configUrlPermissionsMap[configUrl]); } }); } function registerDelegate(apiPath: ApiPath, delegate: ApiPolicyDelegate) { writeToLog(1, `register API policy delegate ${apiPath}`, true); delegateMap.set(apiPath, delegate); } // Example response from RVM // { "broadcast": false, "messageId": "735267b6-0a12-4524-bf12-00ea556d74aa", "payload": // { "action": "get-desktop-owner-settings", "applicationSettingsExists": false, // "desktopOwnerFileExists": true, "payload": { }, "success": true }, "topic": "application" } function retrieveAPIPolicyContent(): Promise<any> { writeToLog(1, 'retrieveAPIPolicyContent', true); return new Promise((resolve, reject) => { const msg: GetDesktopOwnerSettings = { topic: 'application', action: 'get-desktop-owner-settings', sourceUrl: 'https://openfin.co', // ignored by RVM if isGlobal is true isGlobal: true, // get all polices timeToLive: desktopOwnerSettingsTimeout / 1000 }; rvmBus.publish(msg, (rvmResponse: any) => { writeToLog('info', `requestAppPermissions from RVM ${JSON.stringify(rvmResponse)} `); if (rvmResponse.payload && rvmResponse.payload.success === true && rvmResponse.payload.desktopOwnerFileExists === true && rvmResponse.payload.payload) { if (rvmResponse.payload.applicationSettingsExists === true) { resolve(rvmResponse.payload.payload); } else if (rvmResponse.payload.applicationSettingsExists === false) { writeToLog('info', 'requestAppPermissions applicationSettings not set in desktop-owner-settings'); reject(rvmResponse); } else if (Object.keys(rvmResponse.payload.payload).length > 0) { // older versions of RVM do not set applicationSettingsExists // accept only if it is not empty resolve(rvmResponse.payload.payload); } else { writeToLog('info', 'requestAppPermissions applicationSettings not set in desktop-owner-settings'); reject(rvmResponse); } } else { writeToLog('error', `requestAppPermissions from RVM failed ${JSON.stringify(rvmResponse)}`); reject(rvmResponse); // false indicates request to RVM failed } }, false); }); } writeToLog('info', `Installing API policy PreProcessor ${JSON.stringify(coreState.getStartManifest())}`); getDefaultRequestHandler().addPreProcessor(apiPolicyPreProcessor); if (coreState.argo[DESKTOP_OWNER_SETTINGS_TIMEOUT]) { desktopOwnerSettingsTimeout = Number(coreState.argo[DESKTOP_OWNER_SETTINGS_TIMEOUT]); writeToLog(1, `desktopOwnerSettingsTimeout ${desktopOwnerSettingsTimeout}`, true); } for (const key of Object.keys(actionMap)) { const endPoint: Endpoint = actionMap[key]; if (endPoint.apiPolicyDelegate) { registerDelegate(endPoint.apiPath, endPoint.apiPolicyDelegate); } } retrieveAPIPolicyContent().then((content: ApiPolicy) => { apiPolicies = content; if (apiPolicies) { applicationSettingsEnabled = true; if (!apiPolicies.hasOwnProperty(CONFIG_URL_WILDCARD)) { apiPolicies[CONFIG_URL_WILDCARD] = { permissions: {} }; writeToLog('info', 'default policy missing, setting to {} '); } } else { writeToLog(1, 'API policy not defined in desktopOwnerSettings', true); } }).catch(e => { writeToLog(1, `Error retrieveAPIPolicies ${JSON.stringify(e)}`, true); }); export {apiPolicyPreProcessor};
the_stack
import {IVpc} from '@aws-cdk/aws-ec2'; import { ApplicationListener, ApplicationLoadBalancer, ApplicationProtocol, ApplicationTargetGroup, Protocol, } from '@aws-cdk/aws-elasticloadbalancingv2'; import {Construct} from '@aws-cdk/core'; import { HealthCheckConfig, HealthMonitor, IMonitorableFleet, Limit, HealthMonitorProps, } from './health-monitor'; /** * This class is responsible for managing the statistics for all the * load balancers created in this construct. It is also responsible to search * for the finding the first Load balancer/Listener which can accomodate the * worker-fleet based on its size. * * A typical load balancer hierarchy looks like following: * |__ Load Balancer 1 * | |____________Listener 1 * | | |_______Target Group 1 ------- Target/Fleet * | | |_______Target Group 2 ------- Target/Fleet * | | * | |____________Listener 2 * | |_______Target Group 1 ------- Target/Fleet * | |_______Target Group 2 ------- Target/Fleet * | * |__ Load Balancer 2 * |____________Listener 1 * | |_______Target Group 1 ------- Target/Fleet * | |_______Target Group 2 ------- Target/Fleet * | * |____________Listener 2 * |_______Target Group 1 ------- Target/Fleet * |_______Target Group 2 ------- Target/Fleet * * Components: * 1. LoadBalancerFactory: This is the root node of the tree. It contains the * map of load balancer to its managers. It is responsible for creating a * new load balancer if required. It delegates the registerFleet calls to * downstream and returns parent load balancer, listener and target group * of the registered fleet if the registration was successful * * 2. LoadBalancerManager: This class manages a single load balancer. It * contains a map of all the listeners->manager. It also contains the component * counts like listener, target group and target count. It delegates the * registration call to downstream listeners and updates the stats when * the registration is successful. It returns the parent listener and * target group on successful registration. * * 3. ListenerManager: This class managers a single Listener. It contains a map * of all of its target groups to its associated fleet. It also contains the * component counts. It returns the target group on registration. */ export class LoadBalancerFactory { public static readonly DEFAULT_LISTENERS_PER_APPLICATION_LOAD_BALANCER = 50; public static readonly DEFAULT_TARGETS_PER_APPLICATION_LOAD_BALANCER = 1000; public static readonly DEFAULT_TARGET_GROUPS_PER_ACTION_ON_APPLICATION_LOAD_BALANCER = 5; public static readonly DEFAULT_TARGET_GROUPS_PER_APPLICATION_LOAD_BALANCER = 100; public static getAccountLimit( limitName: string, defaultValue: number, elbAccountLimits?: Limit[]): number { if (!elbAccountLimits) { return defaultValue; } const foundLimit = elbAccountLimits.find(limit => limit.name === limitName); if (!foundLimit) { return defaultValue; } return foundLimit.max; } private readonly vpc: IVpc; private readonly healthMonitorScope: Construct; private loadBalancerMap = new Map<ApplicationLoadBalancer, LoadBalancerManager>(); constructor( healthMonitorScope: Construct, vpc: IVpc) { this.healthMonitorScope = healthMonitorScope; this.vpc = vpc; } /** * This method scans all the load balancers and its listeners and registers the fleet * to the load balancer and/or listener which can accommodate it. * This method also updates the statistics for the given fleet size. * If the registration is successful, it then returns the load balancer, listener * and target group to which the fleet was registered. * * @param fleet * @param healthCheckConfig * @param elbAccountLimits */ public registerWorkerFleet( fleet: IMonitorableFleet, healthCheckConfig: HealthCheckConfig, healthMonitorProps: HealthMonitorProps): { loadBalancer: ApplicationLoadBalancer, listener: ApplicationListener, targetGroup: ApplicationTargetGroup } { let loadBalancerParent = null; let listenerParent = null; let targetGroupParent = null; // iterate through each load balancer and try registering to each one. for (const [loadBalancer, loadBalancerMeta] of this.loadBalancerMap.entries()) { try { const {listener, targetGroup} = loadBalancerMeta.registerWorkerFleet( loadBalancer, fleet, healthCheckConfig, healthMonitorProps); loadBalancerParent = loadBalancer; listenerParent = listener; targetGroupParent = targetGroup; break; } catch (e) { // suppress all AWSLimitExhaustedError, we will scale in case of this error /* istanbul ignore next */ if (!(e instanceof AWSLimitExhaustedError)) { /* istanbul ignore next */ throw e; } } } // Check if fleet was not registered. if (!loadBalancerParent) { // If this section is reached, no load balancer was found which could // accommodate fleet, create a new one and register loadBalancerParent = this.createLoadBalancer( this.healthMonitorScope, this.loadBalancerMap.size, healthMonitorProps); const loadBalancerManager = new LoadBalancerManager(); // Add it to the map this.loadBalancerMap.set(loadBalancerParent, loadBalancerManager); // try registering the fleet to the new load balancer try { const {listener, targetGroup} = loadBalancerManager.registerWorkerFleet( loadBalancerParent, fleet, healthCheckConfig, healthMonitorProps); listenerParent = listener; targetGroupParent = targetGroup; } catch (e) { throw e; } } /* istanbul ignore next */ if (!loadBalancerParent || !listenerParent || !targetGroupParent) { /* istanbul ignore next */ throw new Error('Fleet registered successfully but a parent was found null'); } return { loadBalancer: loadBalancerParent, listener: listenerParent, targetGroup: targetGroupParent, }; } /** * Following method creates a new load balancer within the given scope. * * @param scope * @param loadBalancerindex */ private createLoadBalancer(scope: Construct, loadBalancerindex: number, healthMonitorProps: HealthMonitorProps, ): ApplicationLoadBalancer { const loadBalancer = new ApplicationLoadBalancer(scope, `ALB_${loadBalancerindex}`, { vpc: this.vpc, internetFacing: false, vpcSubnets: healthMonitorProps.vpcSubnets, deletionProtection: healthMonitorProps.deletionProtection ?? true, securityGroup: healthMonitorProps.securityGroup, }); // Enabling dropping of invalid HTTP header fields on the load balancer to prevent http smuggling attacks. loadBalancer.setAttribute('routing.http.drop_invalid_header_fields.enabled', 'true'); return loadBalancer; } } /** * This class manages the properties of a single load balancer and its statistics. * It is also responsible to scan through all the listeners registered under it * and register the given fleet. */ class LoadBalancerManager { private listenerMap: Map<ApplicationListener, ListenerManager> = new Map(); private loadBalancerComponentCount = new LoadBalancerComponentStats(); /** * This method scans all the listeners of this load balancer and registers the fleet * to one which can accomodate it. * This method also updates the statistics for the given fleet size. * If the registration is successful, it then returns the listener * and target group to which the fleet was registered. * * @param loadBalancer * @param fleet * @param healthCheckConfig * @param elbAccountLimits */ public registerWorkerFleet( loadBalancer: ApplicationLoadBalancer, fleet: IMonitorableFleet, healthCheckConfig: HealthCheckConfig, healthMonitorProps: HealthMonitorProps) { // this initializes with 0 and keeps the track of all components // newly added down the hierarchy. const statsDelta = new LoadBalancerComponentStats(); // Do all the load balancer level service limit checks first // check for target limit in load balancer const targetPerLoadBalancerLimit = LoadBalancerFactory.getAccountLimit('targets-per-application-load-balancer', LoadBalancerFactory.DEFAULT_TARGETS_PER_APPLICATION_LOAD_BALANCER, healthMonitorProps.elbAccountLimits); if ((this.loadBalancerComponentCount.targetCount + fleet.targetCapacity) > targetPerLoadBalancerLimit) { throw new AWSLimitExhaustedError('AWS service limit "targets-per-application-load-balancer" reached. Limit: ' + targetPerLoadBalancerLimit); } // check for target group limit in load balancer const targetGroupsPerLoadBalancerLimit = LoadBalancerFactory.getAccountLimit('target-groups-per-application-load-balancer', LoadBalancerFactory.DEFAULT_TARGET_GROUPS_PER_APPLICATION_LOAD_BALANCER, healthMonitorProps.elbAccountLimits); if ((this.loadBalancerComponentCount.targetGroupCount + 1) > targetGroupsPerLoadBalancerLimit) { throw new AWSLimitExhaustedError('AWS service limit "target-groups-per-application-load-balancer" reached. Limit: ' + targetGroupsPerLoadBalancerLimit); } let listenerParent = null; let targetGroupParent = null; // try registering to each listener. for (const [listener, listenerMeta] of this.listenerMap.entries()) { try { const {componentsAdded, targetGroup} = listenerMeta.registerWorkerFleet( loadBalancer, listener, fleet, healthCheckConfig, healthMonitorProps); statsDelta.add(componentsAdded); listenerParent = listener; targetGroupParent = targetGroup; break; } catch (e) { // suppress all AWSLimitExhaustedError, we will scale in case of this error /* istanbul ignore next */ if (!(e instanceof AWSLimitExhaustedError)) { /* istanbul ignore next */ throw e; } } } /* istanbul ignore next */ if (!listenerParent) { // If this section is reached, no listener was found which could accommodate fleet // create new listener and register const listenersPerLoadBalancerLimit = LoadBalancerFactory.getAccountLimit('listeners-per-application-load-balancer', LoadBalancerFactory.DEFAULT_LISTENERS_PER_APPLICATION_LOAD_BALANCER, healthMonitorProps.elbAccountLimits); if ((this.loadBalancerComponentCount.listenerCount + 1) > listenersPerLoadBalancerLimit) { throw new AWSLimitExhaustedError('AWS service limit "listeners-per-application-load-balancer" reached. Limit: ' + listenersPerLoadBalancerLimit); } listenerParent = this.createListener(fleet.targetScope, loadBalancer); const listenerManager = new ListenerManager(); this.listenerMap.set(listenerParent, listenerManager); statsDelta.add(new LoadBalancerComponentStats(1, 0, 0)); try { const {componentsAdded, targetGroup} = listenerManager.registerWorkerFleet( loadBalancer, listenerParent, fleet, healthCheckConfig, healthMonitorProps); targetGroupParent = targetGroup; statsDelta.add(componentsAdded); } catch (e) { throw e; } } // update the current load balancer's stats this.loadBalancerComponentCount.add(statsDelta); return { componentsAdded: statsDelta, listener: listenerParent, targetGroup: targetGroupParent, }; } /** * Following method creates a new listener in the fleet's scope and * registers it to the given load balancer. * * @param scope * @param loadBalancer */ private createListener(scope: Construct, loadBalancer: ApplicationLoadBalancer): ApplicationListener { return new ApplicationListener(scope, 'Listener', { port: HealthMonitor.LOAD_BALANCER_LISTENING_PORT + this.listenerMap.size, // dummy port for load balancing protocol: ApplicationProtocol.HTTP, loadBalancer, open: false, }); } } /** * This class manages the properties of a single listener and all the components * under its hierarchy. * It is also responsible to create a new target group and register the given fleet. */ class ListenerManager { private targetMap: Map<ApplicationTargetGroup, IMonitorableFleet> = new Map(); private listenerComponentCount = new LoadBalancerComponentStats(); /** * This method scans all the listeners of this load balancer and registers the fleet * to one which can accommodate it. * This method also updates the statistics for the given fleet size. * If the registration is successful, it then returns the target group * to which the fleet was registered. * * @param loadBalancer * @param listener * @param fleet * @param healthCheckConfig * @param elbAccountLimits */ public registerWorkerFleet( loadBalancer: ApplicationLoadBalancer, listener: ApplicationListener, fleet: IMonitorableFleet, healthCheckConfig: HealthCheckConfig, healthMonitorProps: HealthMonitorProps) { const componentsAdded = new LoadBalancerComponentStats(); // Do all listener level service limit checks // check for target limit in listener const targetGroupPerLoadBalancerLimit = LoadBalancerFactory.getAccountLimit('target-groups-per-action-on-application-load-balancer', LoadBalancerFactory.DEFAULT_TARGET_GROUPS_PER_ACTION_ON_APPLICATION_LOAD_BALANCER, healthMonitorProps.elbAccountLimits); if ((this.listenerComponentCount.targetGroupCount + 1) > targetGroupPerLoadBalancerLimit) { throw new AWSLimitExhaustedError('AWS service limit "target-groups-per-action-on-application-load-balancer" reached. Limit: ' + targetGroupPerLoadBalancerLimit); } // latest version of CDK does not support 'forwardConfig' in listener rule yet. This means // we cannot add multiple target groups to a single listener. Adding this check till this // feature is supported. if (this.listenerComponentCount.targetGroupCount > 0) { throw new AWSLimitExhaustedError('Unable to add more than 1 Target Group to Listener.'); } // Create a new target group const targetGroup = this.createTargetGroup( fleet.targetScope, loadBalancer, listener, fleet, healthCheckConfig); this.targetMap.set(targetGroup, fleet); // update the listener stats componentsAdded.targetGroupCount++; componentsAdded.targetCount += fleet.targetCapacity; // update the current listener's stats this.listenerComponentCount.add(componentsAdded); return { componentsAdded, targetGroup, }; } /** * Following method creates a new new target group in the fleet's scope and * registers it to the given listener. * * @param scope * @param loadBalancer * @param listener * @param monitorableFleet * @param healthCheckConfig */ private createTargetGroup( scope: Construct, loadBalancer: ApplicationLoadBalancer, listener: ApplicationListener, monitorableFleet: IMonitorableFleet, healthCheckConfig: HealthCheckConfig): ApplicationTargetGroup { const targetGroup = new ApplicationTargetGroup(scope, 'TargetGroup', { port: HealthMonitor.LOAD_BALANCER_LISTENING_PORT, // dummy port for load balancing protocol: ApplicationProtocol.HTTP, targets: [monitorableFleet.targetToMonitor], healthCheck: { port: healthCheckConfig.port ? healthCheckConfig.port.toString() : HealthMonitor.LOAD_BALANCER_LISTENING_PORT.toString(), interval: healthCheckConfig.interval || HealthMonitor.DEFAULT_HEALTH_CHECK_INTERVAL, healthyThresholdCount: healthCheckConfig.instanceHealthyThresholdCount || HealthMonitor.DEFAULT_HEALTHY_HOST_THRESHOLD, unhealthyThresholdCount: healthCheckConfig.instanceUnhealthyThresholdCount || HealthMonitor.DEFAULT_UNHEALTHY_HOST_THRESHOLD, protocol: Protocol.HTTP, }, vpc: loadBalancer.vpc, }); listener.addTargetGroups('TargetGroup', { targetGroups: [targetGroup], }); return targetGroup; } } /** * This class contains the statistics of all the nested load balancer * components like listener count, target group count and target count. * This statistics object will be associated with each load balancer * and listener for tracking the count of components. */ class LoadBalancerComponentStats { public listenerCount: number; public targetGroupCount: number; public targetCount: number; constructor( listenerCount: number = 0, targetGroupCount: number = 0, targetCount: number = 0) { this.listenerCount = listenerCount; this.targetGroupCount = targetGroupCount; this.targetCount = targetCount; } public add(operand: LoadBalancerComponentStats) { this.listenerCount += operand.listenerCount; this.targetGroupCount += operand.targetGroupCount; this.targetCount += operand.targetCount; } } export class AWSLimitExhaustedError extends Error { constructor(message: string) { super(message); } }
the_stack
import { fixture, elementUpdated, expect, html, oneEvent, } from '@open-wc/testing'; import { spy } from 'sinon'; import '../sync/sp-split-button.js'; import { SplitButton } from '..'; import splitButtonDefault, { field, more, } from '../stories/split-button-cta.stories.js'; import { MenuItem } from '@spectrum-web-components/menu'; import { arrowDownEvent } from '../../../test/testing-helpers.js'; import { TemplateResult } from '@spectrum-web-components/base'; // wrap in div method function wrapInDiv(storyArgument: TemplateResult): TemplateResult { return html` <div>${storyArgument}</div> `; } const deprecatedMenu = (): TemplateResult => html` <sp-menu> <sp-menu-item>Option 1</sp-menu-item> <sp-menu-item>Option Extended</sp-menu-item> <sp-menu-item>Short</sp-menu-item> </sp-menu> `; describe('Splitbutton', () => { it('loads [type="field"] splitbutton accessibly', async () => { const test = await fixture<HTMLDivElement>( wrapInDiv(field(splitButtonDefault.args)) ); const el1 = test.querySelector('sp-split-button') as SplitButton; const el2 = test.querySelector('sp-split-button[left]') as SplitButton; await elementUpdated(el1); await elementUpdated(el2); await expect(el1).to.be.accessible(); await expect(el2).to.be.accessible(); }); it('loads [type="field"] splitbutton accessibly with deprecated syntax', async () => { const test = await fixture<HTMLDivElement>(html` <div> <sp-split-button>${deprecatedMenu()}</sp-split-button> <sp-split-button left>${deprecatedMenu()}</sp-split-button> </div> `); const el1 = test.querySelector('sp-split-button') as SplitButton; const el2 = test.querySelector('sp-split-button[left]') as SplitButton; await elementUpdated(el1); await elementUpdated(el2); await expect(el1).to.be.accessible(); await expect(el2).to.be.accessible(); }); it('loads [type="more"] splitbutton accessibly', async () => { const test = await fixture<HTMLDivElement>( wrapInDiv(more({ ...splitButtonDefault.args, ...more.args })) ); const el1 = test.querySelector('sp-split-button') as SplitButton; const el2 = test.querySelector('sp-split-button[left]') as SplitButton; await elementUpdated(el1); await elementUpdated(el2); await expect(el1).to.be.accessible(); await expect(el2).to.be.accessible(); }); it('loads [type="more"] splitbutton accessibly with deprecated syntax', async () => { const test = await fixture<HTMLDivElement>(html` <div> <sp-split-button type="more"> ${deprecatedMenu()} </sp-split-button> <sp-split-button type="more" left> ${deprecatedMenu()} </sp-split-button> </div> `); const el1 = test.querySelector('sp-split-button') as SplitButton; const el2 = test.querySelector('sp-split-button[left]') as SplitButton; await elementUpdated(el1); await elementUpdated(el2); await expect(el1).to.be.accessible(); await expect(el2).to.be.accessible(); }); it('receives "focus()"', async () => { const test = await fixture<HTMLDivElement>( wrapInDiv(field(splitButtonDefault.args)) ); const el1 = test.querySelector('sp-split-button') as SplitButton; const el2 = test.querySelector('sp-split-button[left]') as SplitButton; const el1FocusElement = el1.focusElement; const el2FocusElement = el2.shadowRoot.querySelector( '.trigger' ) as HTMLElement; await elementUpdated(el1); await elementUpdated(el2); el1.focus(); await elementUpdated(el1); expect(document.activeElement).to.equal(el1); expect(el1.shadowRoot.activeElement).to.equal(el1FocusElement); el2.focus(); await elementUpdated(el2); expect(document.activeElement).to.equal(el2); expect(el2.shadowRoot.activeElement).to.equal(el2FocusElement); }); it('[type="field"] manages `selectedItem`', async () => { const test = await fixture<HTMLDivElement>( wrapInDiv(field(splitButtonDefault.args)) ); const el = test.querySelector('sp-split-button') as SplitButton; await elementUpdated(el); expect(el.selectedItem?.itemText).to.equal('Option 1'); expect(el.open).to.be.false; const item3 = el.querySelector('sp-menu-item:nth-child(3)') as MenuItem; const root = el.shadowRoot ? el.shadowRoot : el; const toggleButton = root.querySelector( '.trigger' ) as HTMLButtonElement; const opened = oneEvent(el, 'sp-opened'); toggleButton.click(); await opened; await elementUpdated(el); expect(el.open).to.be.true; const closed = oneEvent(el, 'sp-closed'); item3.click(); await closed; await elementUpdated(el); expect(el.selectedItem?.itemText).to.equal('Short'); expect(el.open).to.be.false; }); it('[type="more"] manages `selectedItem.itemText`', async () => { const test = await fixture<HTMLDivElement>( wrapInDiv(more({ ...splitButtonDefault.args, ...more.args })) ); const el = test.querySelector('sp-split-button') as SplitButton; await elementUpdated(el); expect(el.selectedItem?.itemText).to.equal('Option 1'); expect(el.open).to.be.false; const item3 = el.querySelector('sp-menu-item:nth-child(3)') as MenuItem; const root = el.shadowRoot ? el.shadowRoot : el; const toggleButton = root.querySelector( '.trigger' ) as HTMLButtonElement; const opened = oneEvent(el, 'sp-opened'); toggleButton.click(); await opened; await elementUpdated(el); expect(el.open).to.be.true; const closed = oneEvent(el, 'sp-closed'); item3.click(); await closed; await elementUpdated(el); expect(el.open).to.be.false; expect(el.selectedItem?.itemText).to.equal('Option 1'); }); it('passes click events as [type="field"]', async () => { const firstItemSpy = spy(); const secondItemSpy = spy(); const thirdItemSpy = spy(); const test = await fixture<HTMLDivElement>( wrapInDiv( field(splitButtonDefault.args, { firstItemHandler: (): void => firstItemSpy(), secondItemHandler: (): void => secondItemSpy(), thirdItemHandler: (): void => thirdItemSpy(), }) ) ); const el = test.querySelector('sp-split-button') as SplitButton; await elementUpdated(el); expect(el.selectedItem?.itemText).to.equal('Option 1'); expect(el.open).to.be.false; const item1 = el.querySelector('sp-menu-item:nth-child(1)') as MenuItem; const item2 = el.querySelector('sp-menu-item:nth-child(2)') as MenuItem; const item3 = el.querySelector('sp-menu-item:nth-child(3)') as MenuItem; const root = el.shadowRoot ? el.shadowRoot : el; const main = root.querySelector('#button') as HTMLButtonElement; main.click(); await elementUpdated(el); expect(firstItemSpy.called, 'first called').to.be.true; expect(firstItemSpy.calledOnce, 'first calledOnce').to.be.true; const trigger = root.querySelector('.trigger') as HTMLButtonElement; let opened = oneEvent(el, 'sp-opened'); trigger.click(); await opened; await elementUpdated(el); expect(el.open, 'open').to.be.true; let closed = oneEvent(el, 'sp-closed'); item3.click(); await closed; await elementUpdated(el); expect(el.open, 'not open').to.be.false; expect(thirdItemSpy.called, 'third called').to.be.true; expect(thirdItemSpy.calledOnce, 'third calledOnce').to.be.true; main.click(); await elementUpdated(el); expect(el.open).to.be.false; expect(el.selectedItem?.itemText).to.equal('Short'); expect(thirdItemSpy.called, 'third called, still').to.be.true; expect(thirdItemSpy.callCount, 'third callCount').to.equal(2); expect(thirdItemSpy.calledTwice, 'third calledTwice').to.be.true; trigger.focus(); opened = oneEvent(el, 'sp-opened'); trigger.dispatchEvent(arrowDownEvent); await opened; await elementUpdated(el); expect(el.open, 'reopened').to.be.true; closed = oneEvent(el, 'sp-closed'); item2.click(); await closed; await elementUpdated(el); main.click(); await elementUpdated(el); expect(el.open).to.be.false; expect(el.selectedItem?.itemText).to.equal('Option Extended'); expect(secondItemSpy.called, 'second called').to.be.true; expect(secondItemSpy.calledTwice, 'second twice').to.be.true; opened = oneEvent(el, 'sp-opened'); trigger.click(); await opened; await elementUpdated(el); expect(el.open, 'opened again').to.be.true; closed = oneEvent(el, 'sp-closed'); item1.click(); await closed; await elementUpdated(el); main.click(); await elementUpdated(el); expect(el.selectedItem?.itemText).to.equal('Option 1'); expect(firstItemSpy.called, 'first called, sill').to.be.true; expect(firstItemSpy.callCount, 'first callCount').to.equal(3); }); it('passes click events as [type="more"]', async () => { const firstItemSpy = spy(); const secondItemSpy = spy(); const thirdItemSpy = spy(); const test = await fixture<HTMLDivElement>( wrapInDiv( more( { ...splitButtonDefault.args, ...more.args }, { firstItemHandler: (): void => firstItemSpy(), secondItemHandler: (): void => secondItemSpy(), thirdItemHandler: (): void => thirdItemSpy(), } ) ) ); const el = test.querySelector('sp-split-button') as SplitButton; await elementUpdated(el); expect(el.selectedItem?.itemText).to.equal('Option 1'); expect(el.open).to.be.false; const item2 = el.querySelector('sp-menu-item:nth-child(2)') as MenuItem; const item3 = el.querySelector('sp-menu-item:nth-child(3)') as MenuItem; const root = el.shadowRoot ? el.shadowRoot : el; const main = root.querySelector('#button') as HTMLButtonElement; main.click(); await elementUpdated(el); expect(firstItemSpy.called, '1st called').to.be.true; expect(firstItemSpy.calledOnce, '1st called once').to.be.true; const trigger = root.querySelector('.trigger') as HTMLButtonElement; let opened = oneEvent(el, 'sp-opened'); trigger.click(); await opened; await elementUpdated(el); expect(el.open).to.be.true; let closed = oneEvent(el, 'sp-closed'); item3.click(); await closed; await elementUpdated(el); expect(el.open, 'not open').to.be.false; expect(el.selectedItem?.itemText).to.equal('Option 1'); expect(thirdItemSpy.called, '3rd called').to.be.true; expect(thirdItemSpy.calledOnce, '3rd called once').to.be.true; opened = oneEvent(el, 'sp-opened'); trigger.click(); await opened; await elementUpdated(el); expect(el.open).to.be.true; closed = oneEvent(el, 'sp-closed'); item2.click(); await closed; await elementUpdated(el); expect(el.open).to.be.false; expect(el.selectedItem?.itemText).to.equal('Option 1'); expect(secondItemSpy.called, '2nd called').to.be.true; expect(secondItemSpy.calledOnce, '2nd called once').to.be.true; main.click(); await elementUpdated(el); expect(firstItemSpy.called).to.be.true; expect(firstItemSpy.calledTwice, '1st called twice').to.be.true; }); });
the_stack
import React, { memo, useCallback, useEffect, useState } from 'react'; import { ReflexContainer, ReflexSplitter, ReflexElement } from 'react-reflex'; import { Button, Checkbox, Dropdown, Form, Spin, Tooltip } from 'antd'; import hotkeys from 'hotkeys-js'; import cn from 'classnames'; // @Components import { DebugEvents } from 'ui/components/CodeDebugger/DebugEvents'; import { CodeEditor } from 'ui/components/CodeEditor/CodeEditor'; // @Icons import CaretRightOutlined from '@ant-design/icons/lib/icons/CaretRightOutlined'; import UnorderedListOutlined from '@ant-design/icons/lib/icons/UnorderedListOutlined'; // @Styles import 'react-reflex/styles.css'; import styles from './CodeDebugger.module.less'; import { Event as RecentEvent } from '../../../lib/services/events'; import { SyntaxHighlighterAsync } from 'lib/components/SyntaxHighlighter/SyntaxHighlighter'; import { CloseOutlined, CodeOutlined, LoadingOutlined, DownloadOutlined, EyeFilled, EyeInvisibleFilled } from '@ant-design/icons'; export interface CodeDebuggerProps { /** * Run handler, async. * That function takes form values and returns response or error * */ run: (values: FormValues) => any; /** * @deprecated * Prop to make code field hidden, visible by default * */ codeFieldVisible?: boolean; /** * Prop to customize label of code field, `Code` by default * */ codeFieldLabel?: string; /** * Additional className for wrap div * */ className?: string; /** * InitialValue for code field * */ defaultCodeValue?: string; /** * Code field change handler * */ handleCodeChange?: (value: string | object) => void; /** * Close modal for cases with custom close button * */ handleClose?: () => void; /** * Callback for the `save` button */ handleSaveCode?: () => void; } export interface FormValues { object: string; code: string; } interface CalculationResult { code: 'error' | 'success'; format: string | null; message: string; } const CodeDebugger = ({ className, codeFieldLabel = 'Table Name Expression', defaultCodeValue, handleCodeChange, handleClose, handleSaveCode: _handleSaveCode, run }: CodeDebuggerProps) => { const [isCodeSaved, setIsCodeSaved] = useState<boolean>(true); const [objectInitialValue, setObjectInitialValue] = useState<string>(''); const [isEventsVisible, setEventsVisible] = useState<boolean>(false); const [calcResult, setCalcResult] = useState<CalculationResult>(); const [runIsLoading, setRunIsLoading] = useState<boolean>(false); const [showInputEditor, setShowInputEditor] = useState<boolean>(true); const [showCodeEditor, setShowCodeEditor] = useState<boolean>(true); const [showOutput, setShowOutput] = useState<boolean>(false); const [form] = Form.useForm(); const toggleInputEditor = useCallback(() => { setShowInputEditor((val) => !val); }, []); const toggleCodeEditor = useCallback(() => { setShowCodeEditor((val) => !val); }, []); const toggleOutput = useCallback(() => { setShowOutput((val) => !val); }, []); const handleChange = (name: 'object' | 'code') => (value: string | object) => { form.setFieldsValue({ [name]: value ? value : '' }); if (name === 'code' && handleCodeChange) { handleCodeChange(value); isCodeSaved && setIsCodeSaved(false); } }; const handleSaveCode = () => { _handleSaveCode(); setIsCodeSaved(true); }; const handleRun = async (values: FormValues) => { setShowOutput(true); setRunIsLoading(true); try { const response = await run(values); setCalcResult({ code: 'success', format: response.format, message: response.result }); } catch (error) { setCalcResult({ code: 'error', format: error?._response?.format, message: error?.message ?? 'Error' }); } finally { setRunIsLoading(false); } }; const handleEventClick = (event: RecentEvent) => () => { handleChange('object')(JSON.stringify(event, null, 2)); setEventsVisible(false); }; const handleSwitchEventsVisible = () => setEventsVisible((isEventsVisible) => !isEventsVisible); const handleCloseEvents = useCallback((e) => { if ( !e.target.closest('.ant-dropdown') && !e.target.closest('#events-button') ) { setEventsVisible(false); } }, []); useEffect(() => { if (defaultCodeValue) { form.setFieldsValue({ code: defaultCodeValue }); } }, [defaultCodeValue]); useEffect(() => { document.body.addEventListener('click', handleCloseEvents); return () => document.body.removeEventListener('click', handleCloseEvents); }, [handleCloseEvents]); return ( <div className={cn( className, 'flex flex-col items-stretch h-screen max-h-full pt-4;' )} > <div className="w-full mb-2"> <Controls inputChecked={showInputEditor} codeChecked={showCodeEditor} outputChecked={showOutput} toggleInput={toggleInputEditor} toggleCode={toggleCodeEditor} toggleOutput={toggleOutput} handleExit={handleClose} handleSave={handleSaveCode} handleRun={form.submit} /> </div> <Form form={form} className="flex-auto relative" id="inputs" onFinish={handleRun} > <ReflexContainer orientation="vertical"> {showInputEditor && ( <ReflexElement> <SectionWithLabel label="Event JSON" htmlFor="object"> <Form.Item className={`${styles.field} w-full`} name="object"> <CodeEditor initialValue={ form.getFieldValue('object') ?? objectInitialValue } language={'json'} handleChange={handleChange('object')} hotkeysOverrides={{ onCmdCtrlEnter: form.submit, onCmdCtrlI: toggleInputEditor, onCmdCtrlU: toggleCodeEditor }} /> </Form.Item> </SectionWithLabel> <Dropdown forceRender className="absolute right-4 bottom-3" placement="topRight" overlay={<DebugEvents handleClick={handleEventClick} />} trigger={['click']} visible={isEventsVisible} > <Button size="small" type="link" icon={<UnorderedListOutlined />} id="events-button" onClick={handleSwitchEventsVisible} > Copy Recent Event </Button> </Dropdown> </ReflexElement> )} {showInputEditor && ( <ReflexSplitter propagate className={`${styles.splitter}`} /> )} {showCodeEditor && ( <ReflexElement> <SectionWithLabel label={`${codeFieldLabel}`} labelClassName={isCodeSaved ? '' : styles.saveIndicator} htmlFor="code" > <Form.Item className={`${styles.field} pl-2`} colon={false} name="code" > <CodeEditor initialValue={ form.getFieldValue('code') ?? defaultCodeValue } language="javascript" enableLineNumbers reRenderEditorOnInitialValueChange={false} handleChange={handleChange('code')} hotkeysOverrides={{ onCmdCtrlEnter: form.submit, onCmdCtrlI: toggleInputEditor, onCmdCtrlU: toggleCodeEditor }} /> </Form.Item> </SectionWithLabel> </ReflexElement> )} {showCodeEditor && showOutput && ( <ReflexSplitter propagate className={`${styles.splitter}`} /> )} {showOutput && ( <ReflexElement> <SectionWithLabel label="Result"> <div className={`h-full box-border font-mono list-none px-2 pt-1 m-0 ${styles.darkenBackground}`} > <p className={cn('flex flex-col w-full h-full m-0', { [styles.itemError]: calcResult?.code === 'error', [styles.itemSuccess]: calcResult?.code === 'success' })} > <strong className={cn( `absolute top-1 right-2 flex-shrink-0 text-xs` )} > {runIsLoading ? ( <Spin indicator={ <LoadingOutlined style={{ fontSize: 15 }} spin /> } /> ) : ( `${calcResult?.code ?? ''}` )} </strong> {calcResult && ( <span className={`flex-auto min-w-0 text-xs`}> {calcResult.code === 'error' ? ( calcResult.message ) : ( <SyntaxHighlighterAsync language="json" className={`h-full w-full overflow-auto ${styles.darkenBackground} ${styles.syntaxHighlighter} ${styles.withSmallScrollbar}`} > {calcResult.message} {/* { // 'safdasfs afdasfasdgasgdfags gasgafasdf asfafasdfasf afdasfdafdda sfasfadsfas fasfafsdasfafas' JSON.stringify( JSON.parse(calcResult.message), null, 2 ) } */} </SyntaxHighlighterAsync> )} </span> )} </p> </div> </SectionWithLabel> </ReflexElement> )} </ReflexContainer> {/** * Elements below keep the form values when the inputs are unmounted. * Keep these elements out of the ReflexContainer, otherwise they will break the layout. * */} {!showInputEditor && ( <Form.Item className={`hidden`} name="object"> <CodeEditor initialValue={form.getFieldValue('object') ?? objectInitialValue} language={'json'} handleChange={handleChange('object')} hotkeysOverrides={{ onCmdCtrlEnter: form.submit, onCmdCtrlI: toggleInputEditor, onCmdCtrlU: toggleCodeEditor }} /> </Form.Item> )} {!showCodeEditor && ( <Form.Item className={`hidden`} name="code"> <CodeEditor initialValue={form.getFieldValue('code') ?? defaultCodeValue} language={'json'} handleChange={handleChange('code')} hotkeysOverrides={{ onCmdCtrlEnter: form.submit, onCmdCtrlI: toggleInputEditor, onCmdCtrlU: toggleCodeEditor }} /> </Form.Item> )} </Form> </div> ); }; CodeDebugger.displayName = 'CodeDebugger'; export { CodeDebugger }; /** * Controls */ const OS_CMD_BUTTON = navigator.userAgent.includes('Mac') ? '⌘' : 'Ctrl'; type ControlsProps = { inputChecked: boolean; codeChecked: boolean; outputChecked: boolean; toggleInput: () => void; toggleCode: () => void; toggleOutput: () => void; handleExit: () => void; handleSave: () => void; handleRun: () => void; }; const ControlsComponent: React.FC<ControlsProps> = ({ inputChecked, codeChecked, outputChecked, toggleInput, toggleCode, toggleOutput, handleExit, handleSave, handleRun }) => { useEffect(() => { const handleToggleInput = () => { toggleInput(); return false; // to prevent browsers' default behaviour }; const handleToggleCode = () => { toggleCode(); return false; }; const handleToggleOutput = () => { toggleOutput(); return false; }; const _handleExit = () => { handleExit(); }; const _handleSave = (e: KeyboardEvent) => { e.preventDefault(); handleSave(); return false; }; const _handleRun = (e: KeyboardEvent) => { e.stopPropagation(); handleRun(); return false; }; hotkeys.filter = () => true; // to enable hotkeys everywhere, even in input fields hotkeys('cmd+i,ctrl+i', handleToggleInput); hotkeys('cmd+u,ctrl+u', handleToggleCode); hotkeys('cmd+o,ctrl+o', handleToggleOutput); hotkeys('escape', _handleExit); hotkeys('cmd+s,ctrl+s', _handleSave); hotkeys('cmd+enter,ctrl+enter', _handleRun); return () => { hotkeys.unbind('cmd+i,ctrl+i', handleToggleInput); hotkeys.unbind('cmd+u,ctrl+u', handleToggleCode); hotkeys.unbind('cmd+o,ctrl+o', handleToggleOutput); hotkeys.unbind('escape', _handleExit); hotkeys.unbind('cmd+s,ctrl+s', _handleSave); hotkeys.unbind('cmd+enter,ctrl+enter', _handleRun); }; }, []); return ( <div className="flex items-stretch w-full h-full"> <Button size="middle" className="flex-grow-0" onClick={handleExit}> <CloseOutlined className={styles.adaptiveIcon} /> <span className={`${styles.adaptiveLabel} ${styles.noMargins}`}> {'Close'} </span> </Button> <div className="flex justify-center items-center flex-auto min-w-0"> <Tooltip title={`${OS_CMD_BUTTON}+I`} mouseEnterDelay={1}> <Checkbox checked={inputChecked} className={cn( 'relative', styles.checkbox, styles.hideAntdCheckbox, styles.checkboxLabel, { [styles.checkboxChecked]: inputChecked } )} onClick={toggleInput} > <i className="block absolute left-0.5"> {inputChecked ? <EyeFilled /> : <EyeInvisibleFilled />} </i> <span className={styles.adaptiveIcon}>{'{ }'}</span> <span className={`${styles.adaptiveLabel} ${styles.noMargins}`}> {'Input'} </span> </Checkbox> </Tooltip> <Tooltip title={`${OS_CMD_BUTTON}+U`} mouseEnterDelay={1}> <Checkbox checked={codeChecked} className={cn( 'relative', styles.checkbox, styles.hideAntdCheckbox, styles.checkboxLabel, { [styles.checkboxChecked]: codeChecked } )} onClick={toggleCode} > <i className="block absolute left-0.5"> {codeChecked ? <EyeFilled /> : <EyeInvisibleFilled />} </i> <span className={styles.adaptiveIcon}>{'</>'}</span> <span className={`${styles.adaptiveLabel} ${styles.noMargins}`}> {'Expression'} </span> </Checkbox> </Tooltip> <Tooltip title={`${OS_CMD_BUTTON}+O`} mouseEnterDelay={1}> <Checkbox checked={outputChecked} className={cn( 'relative', styles.checkbox, styles.hideAntdCheckbox, styles.checkboxLabel, { [styles.checkboxChecked]: outputChecked } )} onClick={toggleOutput} > <i className="block absolute left-0.5"> {outputChecked ? <EyeFilled /> : <EyeInvisibleFilled />} </i> <CodeOutlined className={styles.adaptiveIcon} /> <span className={`${styles.adaptiveLabel} ${styles.noMargins}`}> {'Result'} </span> </Checkbox> </Tooltip> </div> <div className="flex-grow-0 ant-btn-group"> <Tooltip title={`${OS_CMD_BUTTON}+↵`} mouseEnterDelay={1}> <Button size="middle" type="primary" icon={<CaretRightOutlined />} className={`${styles.buttonGreen}`} onClick={handleRun} > <span className={`${styles.adaptiveLabel}`}>{'Run'}</span> </Button> </Tooltip> <Tooltip title={`${OS_CMD_BUTTON}+S`} mouseEnterDelay={1}> <Button size="middle" type="primary" icon={<DownloadOutlined />}> <span className={`${styles.adaptiveLabel}`}>{'Save'}</span> </Button> </Tooltip> </div> </div> ); }; const Controls = memo(ControlsComponent); type SectionProps = { label: string; labelClassName?: string; htmlFor?: string; }; const SectionWithLabel: React.FC<SectionProps> = ({ label, labelClassName, htmlFor, children }) => { return ( <div className={`relative w-full h-full overflow-hidden pt-7 rounded-md ${styles.darkenBackground}`} > <label className={`absolute top-1 left-2 z-10 ${styles.label} ${ labelClassName ?? '' }`} htmlFor={htmlFor} > {label} </label> {children} </div> ); };
the_stack
import React, { Component } from 'react'; import { Dimensions, Image, ListRenderItem, ScrollView, StyleSheet, Text, TouchableOpacity, View } from 'react-native'; import { ImageData, ZoomCarouselProps } from './types'; import { CarouselController, MultiCarousel } from '../MultiCarousel'; import { PhotoSwipe } from './PhotoSwipe.web'; import { Modal } from '../Modal'; import FSI18n, { translationKeys } from '@brandingbrand/fsi18n'; const componentTranslationKeys = translationKeys.flagship.zoomCarousel.actions; const zoomTranslationKey = FSI18n.string(componentTranslationKeys.fullscreen.actionBtn); const searchIcon = require('../../../assets/images/search.png'); let ZOOM_CAROUSEL_ID = 0; type ImageGetSize = ( uri: string, success: (width: number, height: number) => void, failure: (error: any) => void ) => any; // @ts-ignore @types/react-native does not correctly define Image.getSize as a static method. const getSize: ImageGetSize = Image.getSize.bind(Image); export interface ImageSize { width: number; height: number; } export interface ZoomCarouselStateType { isZooming: boolean; currentIndex: number; imageSizes: ImageSize[]; imageWidth: number; imageHeight: number; screenWidth: number; } const S = StyleSheet.create({ carouselContainer: { flex: 1, flexBasis: 'auto' }, searchIcon: { width: 25, height: 25 }, zoomButtonContainer: { position: 'absolute', right: 50, bottom: 30, zIndex: 101 }, zoomButton: { opacity: 0.5 }, goToNext: { position: 'absolute', top: '50%', right: 0, zIndex: 100, marginTop: -15, padding: 10 }, goToPrev: { position: 'absolute', top: '50%', left: 0, zIndex: 100, marginTop: -15, padding: 10 }, buttonPrevIcon: { width: 25, height: 25, borderTopWidth: 2, borderLeftWidth: 2, borderColor: 'black', transform: [ { rotate: '-45deg' } ] }, buttonNextIcon: { width: 25, height: 25, borderTopWidth: 2, borderRightWidth: 2, borderColor: 'black', transform: [ { rotate: '45deg' } ] }, fullHeight: { height: '100%' }, thumbnailImg: { width: '100%', height: '100%' }, thumbnail: { marginRight: 10, width: 50, height: 50 }, thumbnailContainer: { margin: 10 }, thumbnailSelected: { borderWidth: 3, borderColor: 'red' }, imageCounter: { position: 'absolute', right: 0, top: 0 } }); export class ZoomCarousel extends Component<ZoomCarouselProps, ZoomCarouselStateType> { static defaultProps: ZoomCarouselProps = { images: [], peekSize: 0, gapSize: 0 }; multiCarousel?: CarouselController; id: number; constructor(props: ZoomCarouselProps) { super(props); const screenWidth = Dimensions.get('window').width; const peekSize = props.peekSize || 0; const gapSize = props.gapSize || 0; this.id = ZOOM_CAROUSEL_ID++; const itemWidth = props.centerMode ? screenWidth - 2 * peekSize - gapSize : screenWidth - peekSize; const imageWidth = itemWidth - gapSize; const imageHeight = itemWidth - gapSize; this.state = { isZooming: false, currentIndex: 0, imageSizes: [], imageWidth, imageHeight, screenWidth }; // get the image ratio props.images.forEach((img, i) => { const uri = img.src.uri; if (uri) { getSize( uri, (width, height) => { const imageSizes = this.state.imageSizes; imageSizes[i] = { width, height }; this.setState({ imageSizes }); }, err => { console.warn('image getSize failed', err); } ); } }); } goToNext = () => { this.multiCarousel?.goToNext(); } goToPrev = () => { this.multiCarousel?.goToPrev(); } openZoom = () => { this.setState({ isZooming: true }); } closeZoom = () => { this.setState({ isZooming: false }); } handleSlideChange = ({ currentIndex, nextIndex }: any) => { this.setState({ currentIndex: nextIndex }); } handleZoomCarouselChange = (pswp: any) => { const currentIndex = pswp.getCurrentIndex(); this.setState({ currentIndex }); this.multiCarousel?.goTo(currentIndex); } handleLayoutChange = (e: any) => { const { centerMode, peekSize = 0, gapSize = 0 } = this.props; const screenWidth = Dimensions.get('window').width; const itemWidth = centerMode ? screenWidth - 2 * peekSize - gapSize : screenWidth - peekSize; const imageWidth = itemWidth - gapSize; const imageHeight = itemWidth - gapSize; this.setState({ imageWidth, imageHeight, screenWidth }); } goTo = (i: number) => { this.multiCarousel?.goTo(i); } handleThumbPress = (i: number) => () => { this.goTo(i); } extractMultiCarousel = (controller: CarouselController) => { this.multiCarousel = controller; } itemUpdated = (oldItem: ImageData, newItem: ImageData, index: number, changed: () => void) => { if ( newItem.src && (newItem.src.uri ? newItem.src.uri !== oldItem.src.uri : newItem.src !== oldItem.src) ) { changed(); } } renderImage: ListRenderItem<ImageData> = ({ item, index }) => { return ( <View style={this.props.fillContainer ? S.fullHeight : null}> {(this.props.renderImageWeb && this.props.renderImageWeb(item, index)) || ( <Image source={item.src} resizeMode='contain' style={{ width: this.state.imageWidth, height: this.state.imageHeight }} /> )} </View> ); } renderImageCounter = () => { const total: number = (this.props.images && this.props.images.length) || 0; const currentIndex = this.state.currentIndex + 1; return ( <View style={this.props.imageCounterStyle || S.imageCounter}> <Text>{`${currentIndex}/${total}`}</Text> </View> ); } renderPhotoSwipe = () => ( <PhotoSwipe isOpen={this.state.isZooming} items={this.props.images .map(img => img.zoomSrc || img.src) .map((img, i) => ({ src: img.uri || img, w: this.state.screenWidth, h: this.state.imageSizes[i] ? (this.state.screenWidth * this.state.imageSizes[i].height) / this.state.imageSizes[i].width : this.state.imageHeight }))} options={{ loop: false, fullscreenEl: false, shareEl: false, captionEl: false, history: false, closeOnScroll: false, index: this.state.currentIndex }} afterChange={this.handleZoomCarouselChange} onClose={this.closeZoom} /> ) renderCustomModal = () => this.props.renderModalContent ? ( <Modal visible={this.state.isZooming} transparent={true}> {this.props.renderModalContent(this.closeZoom)} </Modal> ) : ( this.renderPhotoSwipe() ) renderThumbnails = () => ( <ScrollView horizontal={true} contentContainerStyle={[S.thumbnailContainer, this.props.thumbnailContainerStyle]} > {this.props.images.map((img, i) => ( <TouchableOpacity key={i} style={[ S.thumbnail, this.props.thumbnailStyle, this.state.currentIndex === i && S.thumbnailSelected ]} onPress={this.handleThumbPress(i)} accessibilityRole={'button'} accessibilityLabel={FSI18n.string(componentTranslationKeys.focus.actionBtn)} > <Image resizeMode='cover' source={img.src} style={S.thumbnailImg} /> </TouchableOpacity> ))} </ScrollView> ) // tslint:disable-next-line: cyclomatic-complexity render(): JSX.Element { const { peekSize = 0, gapSize = 0 } = this.props; return ( <View style={this.props.contentContainerStyle || S.carouselContainer} onLayout={this.handleLayoutChange} > <View style={this.props.imageContainerStyle || S.carouselContainer}> <div id={`zoom-carousel-${this.id}`} style={this.props.fillContainer ? { height: '100%' } : undefined} > <MultiCarousel carouselController={this.extractMultiCarousel} onSlideChange={this.handleSlideChange} peekSize={peekSize + (this.props.centerMode ? gapSize / 2 : 0)} itemsPerPage={1} data={this.props.images} renderItem={this.renderImage} showArrow={this.props.showArrow} dotStyle={this.props.dotStyle} dotActiveStyle={this.props.dotActiveStyle} pageIndicatorStyle={this.props.pageIndicatorStyle} renderPageIndicator={this.props.renderPageIndicator} centerMode={this.props.centerMode} style={this.props.fillContainer ? S.fullHeight : null} nextArrowOnBlur={this.props.nextArrowOnBlur} hidePageIndicator={this.props.hidePageIndicator} hideOverflow={this.props.hideOverflow} /> {!this.props.hideZoomButton && ( <View style={[S.zoomButtonContainer, this.props.zoomButtonStyle]}> {this.props.renderZoomButton ? ( this.props.renderZoomButton(this.openZoom) ) : ( <TouchableOpacity style={S.zoomButton} onPress={this.openZoom} accessibilityRole={'button'} accessibilityLabel={zoomTranslationKey} > <Image style={S.searchIcon} source={searchIcon} /> </TouchableOpacity> )} </View> )} {this.renderCustomModal()} </div> </View> {this.props.showThumbnails && (this.props.renderThumbnails ? this.props.renderThumbnails(this.state.currentIndex, this.goTo) : this.renderThumbnails())} {this.props.showImageCounter && (this.props.renderImageCounter ? this.props.renderImageCounter(this.state.currentIndex) : this.renderImageCounter())} </View> ); } }
the_stack
import { expect } from "chai"; import Long from "long"; import { execSync } from "child_process"; import { Flags } from "df/common/flags"; import * as testProtos from "df/protoc-gen-ts/tests/test1"; import { suite, test } from "df/testing"; const flags = { reserializerLocation: Flags.string("reserializer-location") }; suite(__filename, { parallel: true }, () => { suite("single-field non-repeated reserialization", { parallel: true }, () => { const testCases = [ // double_field testProtos.TestMessage.create({ doubleField: Number.NaN }), testProtos.TestMessage.create({ doubleField: Number.NEGATIVE_INFINITY }), testProtos.TestMessage.create({ doubleField: 4.940656458412465441765687928682213723651e-324 }), testProtos.TestMessage.create({ doubleField: 35.6 }), testProtos.TestMessage.create({ doubleField: 1.797693134862315708145274237317043567981e308 }), testProtos.TestMessage.create({ doubleField: Number.POSITIVE_INFINITY }), // float_field testProtos.TestMessage.create({ floatField: Number.NaN }), testProtos.TestMessage.create({ floatField: Number.NEGATIVE_INFINITY }), testProtos.TestMessage.create({ floatField: 1.40129846432481707092372958328991613128e-45 }), testProtos.TestMessage.create({ floatField: 3.4028234663852885981170418348451692544e38 }), testProtos.TestMessage.create({ floatField: Number.POSITIVE_INFINITY }), // int64_field testProtos.TestMessage.create({ int64Field: Long.MIN_VALUE }), testProtos.TestMessage.create({ int64Field: Long.MAX_VALUE }), // uint64_field testProtos.TestMessage.create({ uint64Field: Long.UZERO }), testProtos.TestMessage.create({ uint64Field: Long.MAX_UNSIGNED_VALUE }), // int32_field testProtos.TestMessage.create({ int32Field: -2147483648 }), testProtos.TestMessage.create({ int32Field: 2147483647 }), // fixed64_field testProtos.TestMessage.create({ fixed64Field: Long.UZERO }), testProtos.TestMessage.create({ fixed64Field: Long.MAX_UNSIGNED_VALUE }), // fixed32_field testProtos.TestMessage.create({ fixed32Field: 0 }), // bool_field testProtos.TestMessage.create({ boolField: false }), testProtos.TestMessage.create({ boolField: true }), // string_field testProtos.TestMessage.create({ stringField: "" }), testProtos.TestMessage.create({ stringField: "hello world" }), // message_field testProtos.TestMessage.create({ messageField: testProtos.TestMessage.create({}) }), testProtos.TestMessage.create({ messageField: testProtos.TestMessage.create({ stringField: "hello world" }) }), // bytes_field testProtos.TestMessage.create({ bytesField: new Uint8Array([]) }), testProtos.TestMessage.create({ bytesField: new Uint8Array([0x5, 0xff]) }), // uint32_field testProtos.TestMessage.create({ uint32Field: 0 }), testProtos.TestMessage.create({ uint32Field: 4294967295 }), // enum_field testProtos.TestMessage.create({ enumField: testProtos.TestEnum.VAL0 }), testProtos.TestMessage.create({ enumField: testProtos.TestEnum.VAL1 }), // sfixed32_field testProtos.TestMessage.create({ sfixed32Field: -2147483648 }), testProtos.TestMessage.create({ sfixed32Field: 2147483647 }), // sfixed64_field testProtos.TestMessage.create({ sfixed64Field: Long.MIN_VALUE }), testProtos.TestMessage.create({ sfixed64Field: Long.MAX_VALUE }), // sint32_field testProtos.TestMessage.create({ sint32Field: -2147483648 }), testProtos.TestMessage.create({ sint32Field: 2147483647 }), testProtos.TestMessage.create({ fixed32Field: 4294967295 }), // sint64_field testProtos.TestMessage.create({ sint64Field: Long.MIN_VALUE }), testProtos.TestMessage.create({ sint64Field: Long.MAX_VALUE }), // oneof_int32_field testProtos.TestMessage.create({ oneof: { field: "oneofInt32Field", value: 0 } }), testProtos.TestMessage.create({ oneof: { field: "oneofInt32Field", value: 1234 } }), // oneof_string_field testProtos.TestMessage.create({ oneof: { field: "oneofStringField", value: "" } }), testProtos.TestMessage.create({ oneof: { field: "oneofStringField", value: "hello world" } }) ]; for (const input of testCases) { test(`reserialized ${JSON.stringify(input.toJson())}`, () => { expect(input.serialize()).eql(reserialize("TestMessage", input.serialize())); expect(testProtos.TestMessage.deserialize(input.serialize())).eql(input); }); } }); suite("single-field repeated packed reserialization", { parallel: true }, () => { const testCases = [ // double_field testProtos.TestRepeatedMessage.create({ doubleField: [ Number.NaN, Number.NEGATIVE_INFINITY, 4.940656458412465441765687928682213723651e-324, 35.6, Number.POSITIVE_INFINITY ] }), // float_field testProtos.TestRepeatedMessage.create({ floatField: [ Number.NaN, Number.NEGATIVE_INFINITY, 1.40129846432481707092372958328991613128e-45, 35.5, Number.POSITIVE_INFINITY ] }), // int32_field testProtos.TestRepeatedMessage.create({ int32Field: [-100, 99, 0, 76, 10231862] }), // uint32_field testProtos.TestRepeatedMessage.create({ uint32Field: [89, 3, 67, 0, 213131] }), // sint32_field testProtos.TestRepeatedMessage.create({ sint32Field: [-21332, 323, 555, 0, -23123] }), // fixed32_field testProtos.TestRepeatedMessage.create({ fixed32Field: [1232, 0, 51232, 222] }), // sfixed32_field testProtos.TestRepeatedMessage.create({ sfixed32Field: [-13279, 3232, 0, -231] }), // int64_field testProtos.TestRepeatedMessage.create({ int64Field: [Long.fromNumber(12323), Long.ZERO, Long.fromNumber(-121927)] }), // uint64_field testProtos.TestRepeatedMessage.create({ uint64Field: [Long.fromNumber(12323, true), Long.UZERO, Long.fromNumber(172168261, true)] }), // sint64_field testProtos.TestRepeatedMessage.create({ sint64Field: [Long.fromNumber(1212), Long.ZERO, Long.fromNumber(-1271333)] }), // fixed64_field testProtos.TestRepeatedMessage.create({ fixed64Field: [Long.fromNumber(1323, true), Long.fromNumber(0, true)] }), // sfixed64_field testProtos.TestRepeatedMessage.create({ sfixed64Field: [ Long.fromNumber(-1821921), Long.fromNumber(-1), Long.fromNumber(12121982172) ] }), // bool_field testProtos.TestRepeatedMessage.create({ boolField: [true, false, false, true, true] }), // enum_field testProtos.TestRepeatedMessage.create({ enumField: [testProtos.TestEnum.VAL1, testProtos.TestEnum.VAL0, testProtos.TestEnum.VAL2] }), // string_field testProtos.TestRepeatedMessage.create({ stringField: ["", "foo", "bar"] }), // message_field testProtos.TestRepeatedMessage.create({ messageField: [ testProtos.TestMessage.create({ stringField: "one" }), testProtos.TestMessage.create({ stringField: "two" }) ] }), // bytes_field testProtos.TestRepeatedMessage.create({ bytesField: [ Uint8Array.from([5, 8, 19, 33]), Uint8Array.from([50]), Uint8Array.from([89, 0]) ] }) ]; for (const input of testCases) { test(`reserialized ${JSON.stringify(input.toJson())}`, () => { expect(input.serialize()).eql(reserialize("TestRepeatedMessage", input.serialize())); expect(testProtos.TestRepeatedMessage.deserialize(input.serialize())).eql(input); }); } }); suite("single-field repeated unpacked reserialization", { parallel: true }, () => { const testCases = [ // double_field testProtos.TestUnpackedRepeatedMessage.create({ doubleField: [ Number.NaN, Number.NEGATIVE_INFINITY, 4.940656458412465441765687928682213723651e-324, 35.6, Number.POSITIVE_INFINITY ] }), // float_field testProtos.TestUnpackedRepeatedMessage.create({ floatField: [ Number.NaN, Number.NEGATIVE_INFINITY, 1.40129846432481707092372958328991613128e-45, 35.5, Number.POSITIVE_INFINITY ] }), // int32_field testProtos.TestUnpackedRepeatedMessage.create({ int32Field: [-100, 99, 0, 76, 10231862] }), // uint32_field testProtos.TestUnpackedRepeatedMessage.create({ uint32Field: [89, 3, 67, 0, 213131] }), // sint32_field testProtos.TestUnpackedRepeatedMessage.create({ sint32Field: [-21332, 323, 555, 0, -23123] }), // fixed32_field testProtos.TestUnpackedRepeatedMessage.create({ fixed32Field: [1232, 0, 51232, 222] }), // sfixed32_field testProtos.TestUnpackedRepeatedMessage.create({ sfixed32Field: [-13279, 3232, 0, -231] }), // int64_field testProtos.TestUnpackedRepeatedMessage.create({ int64Field: [Long.fromNumber(12323), Long.ZERO, Long.fromNumber(-121927)] }), // uint64_field testProtos.TestUnpackedRepeatedMessage.create({ uint64Field: [Long.fromNumber(12323, true), Long.UZERO, Long.fromNumber(172168261, true)] }), // sint64_field testProtos.TestUnpackedRepeatedMessage.create({ sint64Field: [Long.fromNumber(1212), Long.ZERO, Long.fromNumber(-1271333)] }), // fixed64_field testProtos.TestUnpackedRepeatedMessage.create({ fixed64Field: [Long.fromNumber(1323, true), Long.fromNumber(0, true)] }), // sfixed64_field testProtos.TestUnpackedRepeatedMessage.create({ sfixed64Field: [ Long.fromNumber(-1821921), Long.fromNumber(-1), Long.fromNumber(12121982172) ] }), // bool_field testProtos.TestUnpackedRepeatedMessage.create({ boolField: [true, false, false, true, true] }), // enum_field testProtos.TestUnpackedRepeatedMessage.create({ enumField: [testProtos.TestEnum.VAL1, testProtos.TestEnum.VAL0, testProtos.TestEnum.VAL2] }), // string_field testProtos.TestUnpackedRepeatedMessage.create({ stringField: ["", "foo", "bar"] }), // message_field testProtos.TestUnpackedRepeatedMessage.create({ messageField: [ testProtos.TestMessage.create({ stringField: "one" }), testProtos.TestMessage.create({ stringField: "two" }) ] }), // bytes_field testProtos.TestUnpackedRepeatedMessage.create({ bytesField: [ Uint8Array.from([5, 8, 19, 33]), Uint8Array.from([50]), Uint8Array.from([89, 0]) ] }) ]; for (const input of testCases) { test(`reserialized ${JSON.stringify(input.toJson())}`, () => { expect(input.serialize()).eql( reserialize("TestUnpackedRepeatedMessage", input.serialize()) ); expect(testProtos.TestUnpackedRepeatedMessage.deserialize(input.serialize())).eql(input); }); } }); suite("field type compatibility", { parallel: true }, () => { test("repeated Messages are concatenated when interpreted as singular", () => { expect( testProtos.SingleConcatenatedMessageWrapper.deserialize( testProtos.RepeatedConcatenatedMessageWrapper.create({ concatenatedMessage: [ testProtos.ConcatenatedMessage.create({ int32Field: 45, stringField: ["foo", "bar"], uint32Field: [78, 0] }), testProtos.ConcatenatedMessage.create({ int32Field: -89, stringField: ["baz"], uint32Field: [1, 100] }) ] }).serialize() ) ).eql( testProtos.SingleConcatenatedMessageWrapper.create({ concatenatedMessage: testProtos.ConcatenatedMessage.create({ int32Field: -89, stringField: ["foo", "bar", "baz"], uint32Field: [78, 0, 1, 100] }) }) ); }); }); suite("unknown values", { parallel: true }, () => { test("unknown fields ignored", () => { expect( testProtos.FieldSubsetMessage.deserialize( testProtos.FieldSupersetMessage.create({ int32Field: 89 }).serialize() ) ).eql(testProtos.FieldSubsetMessage.create({})); }); // This behaviour is language-dependent. test("unknown enum values retained", () => { expect( testProtos.FieldSubsetMessage.deserialize( testProtos.FieldSupersetMessage.create({ enumField: testProtos.FieldSupersetMessage.SupersetEnum.VAL_1 }).serialize() ) ).eql( testProtos.FieldSubsetMessage.create({ enumField: 1 }) ); }); }); suite("json support", { parallel: true }, () => { test("singular fields", () => { expect( testProtos.TestMessage.create({ doubleField: 45.8, floatField: Number.NaN, int64Field: Long.MAX_VALUE, uint64Field: Long.UONE, int32Field: 190323, fixed64Field: Long.fromNumber(12988, true), fixed32Field: 4173723, boolField: true, stringField: "hello world", messageField: testProtos.TestMessage.create({ stringField: "byeeee" }), bytesField: Uint8Array.from([5, 78, 93, 101]), uint32Field: 12455, enumField: testProtos.TestEnum.VAL1, sfixed32Field: -135131, sfixed64Field: Long.fromValue(-9102713712), mapField: new Map([ ["hello", 5], ["goodbye", 0] ]) }).toJson() ).eql({ doubleField: 45.8, floatField: "NaN", int64Field: "9223372036854775807", uint64Field: "1", int32Field: 190323, fixed64Field: "12988", fixed32Field: 4173723, boolField: true, stringField: "hello world", messageField: { stringField: "byeeee" }, bytesField: "BU5dZQ==", uint32Field: 12455, enumField: "VAL1", sfixed32Field: -135131, sfixed64Field: "-9102713712", mapField: { hello: 5, goodbye: 0 } }); }); test("repeated fields", () => { expect( testProtos.TestRepeatedMessage.create({ doubleField: [45.8, 78.1], floatField: [Number.NaN, Number.NEGATIVE_INFINITY], int64Field: [Long.MAX_VALUE], uint64Field: [Long.UONE], int32Field: [190323, -18278], fixed64Field: [Long.fromNumber(12988, true)], fixed32Field: [4173723], boolField: [true, false, false, true], stringField: ["hello", "world"], messageField: [ testProtos.TestMessage.create({ stringField: "byeeee" }), testProtos.TestMessage.create({ stringField: "wow" }) ], bytesField: [Uint8Array.from([5, 78, 93, 101]), Uint8Array.from([7, 121, 1])], uint32Field: [12455], enumField: [testProtos.TestEnum.VAL0, testProtos.TestEnum.VAL1], sfixed32Field: [-135131], sfixed64Field: [Long.fromValue(-9102713712)] }).toJson() ).eql({ doubleField: [45.8, 78.1], floatField: ["NaN", "-Infinity"], int64Field: ["9223372036854775807"], uint64Field: ["1"], int32Field: [190323, -18278], fixed64Field: ["12988"], fixed32Field: [4173723], boolField: [true, false, false, true], stringField: ["hello", "world"], messageField: [ { stringField: "byeeee" }, { stringField: "wow" } ], bytesField: ["BU5dZQ==", "B3kB"], uint32Field: [12455], enumField: ["VAL0", "VAL1"], sfixed32Field: [-135131], sfixed64Field: ["-9102713712"] }); }); }); }); function reserialize( messageType: "TestMessage" | "TestRepeatedMessage" | "TestUnpackedRepeatedMessage", bytes: Uint8Array ): Uint8Array { const base64EncodedBytes = Buffer.from(bytes).toString("base64"); const returnedBase64EncodedBytes = execSync( `../${flags.reserializerLocation.get()} --proto_type=${messageType} --base64_proto_value=${base64EncodedBytes}` ).toString(); return Buffer.from(returnedBase64EncodedBytes, "base64"); }
the_stack
import { ClipInput, ClipParams, clipToPaths, clipToPolyTree } from "./clipFunctions"; import { ClipperError } from "./ClipperError"; import { hiRange } from "./constants"; import { ClipType, EndType, JoinType, NativeClipperLibLoadedFormat, NativeClipperLibRequestedFormat, PointInPolygonResult, PolyFillType } from "./enums"; import * as functions from "./functions"; import { IntPoint } from "./IntPoint"; import { IntRect } from "./IntRect"; import { NativeClipperLibInstance } from "./native/NativeClipperLibInstance"; import { OffsetInput, OffsetParams, offsetToPaths, offsetToPolyTree } from "./offsetFunctions"; import { Path, ReadonlyPath } from "./Path"; import { Paths, ReadonlyPaths } from "./Paths"; import { PolyNode } from "./PolyNode"; import { PolyTree } from "./PolyTree"; // export types export { ClipType, EndType, JoinType, PolyFillType, NativeClipperLibLoadedFormat, NativeClipperLibRequestedFormat, PointInPolygonResult, PolyNode, PolyTree, IntPoint, IntRect, Path, ReadonlyPath, Paths, ReadonlyPaths, ClipInput, ClipParams, OffsetInput, OffsetParams, ClipperError }; let wasmModule: NativeClipperLibInstance | undefined | Error; let asmJsModule: NativeClipperLibInstance | undefined; /** * A wrapper for the Native Clipper Library instance with all the operations available. */ export class ClipperLibWrapper { /** * Max coordinate value (both positive and negative). */ static readonly hiRange = hiRange; /** * Native library instance. */ readonly instance: NativeClipperLibInstance; /** * Native library format. */ readonly format: NativeClipperLibLoadedFormat; /** * Internal constructor. Use loadNativeClipperLibInstanceAsync instead. * * @param instance * @param format */ constructor(instance: NativeClipperLibInstance, format: NativeClipperLibLoadedFormat) { this.format = format; this.instance = instance; } /** * Performs a polygon clipping (boolean) operation, returning the resulting Paths or throwing an error if failed. * * The solution parameter in this case is a Paths or PolyTree structure. The Paths structure is simpler than the PolyTree structure. Because of this it is * quicker to populate and hence clipping performance is a little better (it's roughly 10% faster). However, the PolyTree data structure provides more * information about the returned paths which may be important to users. Firstly, the PolyTree structure preserves nested parent-child polygon relationships * (ie outer polygons owning/containing holes and holes owning/containing other outer polygons etc). Also, only the PolyTree structure can differentiate * between open and closed paths since each PolyNode has an IsOpen property. (The Path structure has no member indicating whether it's open or closed.) * For this reason, when open paths are passed to a Clipper object, the user must use a PolyTree object as the solution parameter, otherwise an exception * will be raised. * * When a PolyTree object is used in a clipping operation on open paths, two ancilliary functions have been provided to quickly separate out open and * closed paths from the solution - OpenPathsFromPolyTree and ClosedPathsFromPolyTree. PolyTreeToPaths is also available to convert path data to a Paths * structure (irrespective of whether they're open or closed). * * There are several things to note about the solution paths returned: * - they aren't in any specific order * - they should never overlap or be self-intersecting (but see notes on rounding) * - holes will be oriented opposite outer polygons * - the solution fill type can be considered either EvenOdd or NonZero since it will comply with either filling rule * - polygons may rarely share a common edge (though this is now very rare as of version 6) * * @param params - clipping operation data * @return {Paths} - the resulting Paths. */ clipToPaths(params: ClipParams): Paths | undefined { return clipToPaths(this.instance, params); } /** * Performs a polygon clipping (boolean) operation, returning the resulting PolyTree or throwing an error if failed. * * The solution parameter in this case is a Paths or PolyTree structure. The Paths structure is simpler than the PolyTree structure. Because of this it is * quicker to populate and hence clipping performance is a little better (it's roughly 10% faster). However, the PolyTree data structure provides more * information about the returned paths which may be important to users. Firstly, the PolyTree structure preserves nested parent-child polygon relationships * (ie outer polygons owning/containing holes and holes owning/containing other outer polygons etc). Also, only the PolyTree structure can differentiate * between open and closed paths since each PolyNode has an IsOpen property. (The Path structure has no member indicating whether it's open or closed.) * For this reason, when open paths are passed to a Clipper object, the user must use a PolyTree object as the solution parameter, otherwise an exception * will be raised. * * When a PolyTree object is used in a clipping operation on open paths, two ancilliary functions have been provided to quickly separate out open and * closed paths from the solution - OpenPathsFromPolyTree and ClosedPathsFromPolyTree. PolyTreeToPaths is also available to convert path data to a Paths * structure (irrespective of whether they're open or closed). * * There are several things to note about the solution paths returned: * - they aren't in any specific order * - they should never overlap or be self-intersecting (but see notes on rounding) * - holes will be oriented opposite outer polygons * - the solution fill type can be considered either EvenOdd or NonZero since it will comply with either filling rule * - polygons may rarely share a common edge (though this is now very rare as of version 6) * * @param params - clipping operation data * @return {PolyTree} - the resulting PolyTree or undefined. */ clipToPolyTree(params: ClipParams): PolyTree | undefined { return clipToPolyTree(this.instance, params); } /** * Performs a polygon offset operation, returning the resulting Paths or undefined if failed. * * This method encapsulates the process of offsetting (inflating/deflating) both open and closed paths using a number of different join types * and end types. * * Preconditions for offsetting: * 1. The orientations of closed paths must be consistent such that outer polygons share the same orientation, and any holes have the opposite orientation * (ie non-zero filling). Open paths must be oriented with closed outer polygons. * 2. Polygons must not self-intersect. * * Limitations: * When offsetting, small artefacts may appear where polygons overlap. To avoid these artefacts, offset overlapping polygons separately. * * @param params - offset operation params * @return {Paths|undefined} - the resulting Paths or undefined if failed. */ offsetToPaths(params: OffsetParams): Paths | undefined { return offsetToPaths(this.instance, params); } /** * Performs a polygon offset operation, returning the resulting PolyTree or undefined if failed. * * This method encapsulates the process of offsetting (inflating/deflating) both open and closed paths using a number of different join types * and end types. * * Preconditions for offsetting: * 1. The orientations of closed paths must be consistent such that outer polygons share the same orientation, and any holes have the opposite orientation * (ie non-zero filling). Open paths must be oriented with closed outer polygons. * 2. Polygons must not self-intersect. * * Limitations: * When offsetting, small artefacts may appear where polygons overlap. To avoid these artefacts, offset overlapping polygons separately. * * @param params - offset operation params * @return {PolyTree|undefined} - the resulting PolyTree or undefined if failed. */ offsetToPolyTree(params: OffsetParams): PolyTree | undefined { return offsetToPolyTree(this.instance, params); } //noinspection JSMethodCanBeStatic /** * This function returns the area of the supplied polygon. It's assumed that the path is closed and does not self-intersect. Depending on orientation, * this value may be positive or negative. If Orientation is true, then the area will be positive and conversely, if Orientation is false, then the * area will be negative. * * @param path - The path * @return {number} - Area */ area(path: ReadonlyPath): number { return functions.area(path); } /** * Removes vertices: * - that join co-linear edges, or join edges that are almost co-linear (such that if the vertex was moved no more than the specified distance the edges * would be co-linear) * - that are within the specified distance of an adjacent vertex * - that are within the specified distance of a semi-adjacent vertex together with their out-lying vertices * * Vertices are semi-adjacent when they are separated by a single (out-lying) vertex. * * The distance parameter's default value is approximately √2 so that a vertex will be removed when adjacent or semi-adjacent vertices having their * corresponding X and Y coordinates differing by no more than 1 unit. (If the egdes are semi-adjacent the out-lying vertex will be removed too.) * * @param path - The path to clean * @param distance - How close points need to be before they are cleaned * @return {Path} - The cleaned path */ cleanPolygon(path: ReadonlyPath, distance = 1.1415): Path { return functions.cleanPolygon(this.instance, path, distance); } /** * Removes vertices: * - that join co-linear edges, or join edges that are almost co-linear (such that if the vertex was moved no more than the specified distance the edges * would be co-linear) * - that are within the specified distance of an adjacent vertex * - that are within the specified distance of a semi-adjacent vertex together with their out-lying vertices * * Vertices are semi-adjacent when they are separated by a single (out-lying) vertex. * * The distance parameter's default value is approximately √2 so that a vertex will be removed when adjacent or semi-adjacent vertices having their * corresponding X and Y coordinates differing by no more than 1 unit. (If the egdes are semi-adjacent the out-lying vertex will be removed too.) * * @param paths - The paths to clean * @param distance - How close points need to be before they are cleaned * @return {Paths} - The cleaned paths */ cleanPolygons(paths: ReadonlyPaths, distance = 1.1415): Paths { return functions.cleanPolygons(this.instance, paths, distance); } //noinspection JSMethodCanBeStatic /** * This function filters out open paths from the PolyTree structure and returns only closed paths in a Paths structure. * * @param polyTree * @return {Paths} */ closedPathsFromPolyTree(polyTree: PolyTree): Paths { return functions.closedPathsFromPolyTree(polyTree); } /** * Minkowski Difference is performed by subtracting each point in a polygon from the set of points in an open or closed path. A key feature of Minkowski * Difference is that when it's applied to two polygons, the resulting polygon will contain the coordinate space origin whenever the two polygons touch or * overlap. (This function is often used to determine when polygons collide.) * * @param poly1 * @param poly2 * @return {Paths} */ minkowskiDiff(poly1: ReadonlyPath, poly2: ReadonlyPath): Paths { return functions.minkowskiDiff(this.instance, poly1, poly2); } /** * Minkowski Addition is performed by adding each point in a polygon 'pattern' to the set of points in an open or closed path. The resulting polygon * (or polygons) defines the region that the 'pattern' would pass over in moving from the beginning to the end of the 'path'. * * @param pattern * @param path * @param pathIsClosed * @return {Paths} */ minkowskiSumPath(pattern: ReadonlyPath, path: ReadonlyPath, pathIsClosed: boolean): Paths { return functions.minkowskiSumPath(this.instance, pattern, path, pathIsClosed); } /** * Minkowski Addition is performed by adding each point in a polygon 'pattern' to the set of points in an open or closed path. The resulting polygon * (or polygons) defines the region that the 'pattern' would pass over in moving from the beginning to the end of the 'path'. * * @param pattern * @param paths * @param pathIsClosed * @return {Paths} */ minkowskiSumPaths(pattern: ReadonlyPath, paths: ReadonlyPaths, pathIsClosed: boolean): Paths { return functions.minkowskiSumPaths(this.instance, pattern, paths, pathIsClosed); } //noinspection JSMethodCanBeStatic /** * This function filters out closed paths from the PolyTree structure and returns only open paths in a Paths structure. * * @param polyTree * @return {ReadonlyPath[]} */ openPathsFromPolyTree(polyTree: PolyTree): ReadonlyPath[] { return functions.openPathsFromPolyTree(polyTree); } //noinspection JSMethodCanBeStatic /** * Orientation is only important to closed paths. Given that vertices are declared in a specific order, orientation refers to the direction (clockwise or * counter-clockwise) that these vertices progress around a closed path. * * Orientation is also dependent on axis direction: * - On Y-axis positive upward displays, orientation will return true if the polygon's orientation is counter-clockwise. * - On Y-axis positive downward displays, orientation will return true if the polygon's orientation is clockwise. * * Notes: * - Self-intersecting polygons have indeterminate orientations in which case this function won't return a meaningful value. * - The majority of 2D graphic display libraries (eg GDI, GDI+, XLib, Cairo, AGG, Graphics32) and even the SVG file format have their coordinate origins * at the top-left corner of their respective viewports with their Y axes increasing downward. However, some display libraries (eg Quartz, OpenGL) have their * coordinate origins undefined or in the classic bottom-left position with their Y axes increasing upward. * - For Non-Zero filled polygons, the orientation of holes must be opposite that of outer polygons. * - For closed paths (polygons) in the solution returned by the clip method, their orientations will always be true for outer polygons and false * for hole polygons (unless the reverseSolution property has been enabled). * * @param path - Path * @return {boolean} */ orientation(path: ReadonlyPath): boolean { return functions.orientation(path); } //noinspection JSMethodCanBeStatic /** * Returns PointInPolygonResult.Outside when false, PointInPolygonResult.OnBoundary when point is on poly and PointInPolygonResult.Inside when point is in * poly. * * It's assumed that 'poly' is closed and does not self-intersect. * * @param point * @param path * @return {PointInPolygonResult} */ pointInPolygon(point: Readonly<IntPoint>, path: ReadonlyPath): PointInPolygonResult { return functions.pointInPolygon(point, path); } //noinspection JSMethodCanBeStatic /** * This function converts a PolyTree structure into a Paths structure. * * @param polyTree * @return {Paths} */ polyTreeToPaths(polyTree: PolyTree): Paths { return functions.polyTreeToPaths(polyTree); } //noinspection JSMethodCanBeStatic /** * Reverses the vertex order (and hence orientation) in the specified path. * * @param path - Path to reverse, which gets overwritten rather than copied */ reversePath(path: Path): void { functions.reversePath(path); } //noinspection JSMethodCanBeStatic /** * Reverses the vertex order (and hence orientation) in each contained path. * * @param paths - Paths to reverse, which get overwritten rather than copied */ reversePaths(paths: Paths): void { functions.reversePaths(paths); } /** * Removes self-intersections from the supplied polygon (by performing a boolean union operation using the nominated PolyFillType). * Polygons with non-contiguous duplicate vertices (ie 'touching') will be split into two polygons. * * Note: There's currently no guarantee that polygons will be strictly simple since 'simplifying' is still a work in progress. * * @param path * @param fillType * @return {Paths} - The solution */ simplifyPolygon(path: ReadonlyPath, fillType: PolyFillType = PolyFillType.EvenOdd): Paths { return functions.simplifyPolygon(this.instance, path, fillType); } /** * Removes self-intersections from the supplied polygons (by performing a boolean union operation using the nominated PolyFillType). * Polygons with non-contiguous duplicate vertices (ie 'vertices are touching') will be split into two polygons. * * Note: There's currently no guarantee that polygons will be strictly simple since 'simplifying' is still a work in progress. * * @param paths * @param fillType * @return {Paths} - The solution */ simplifyPolygons(paths: ReadonlyPaths, fillType: PolyFillType = PolyFillType.EvenOdd): Paths { return functions.simplifyPolygons(this.instance, paths, fillType); } //noinspection JSMethodCanBeStatic /** * Scales a path by multiplying all its points by a number and then rounding them. * * @param path - Path to scale * @param scale - Scale multiplier * @return {Path} - The scaled path */ scalePath(path: ReadonlyPath, scale: number): Path { return functions.scalePath(path, scale); } //noinspection JSMethodCanBeStatic /** * Scales all inner paths by multiplying all its points by a number and then rounding them. * * @param paths - Paths to scale * @param scale - Scale multiplier * @return {Paths} - The scaled paths */ scalePaths(paths: ReadonlyPaths, scale: number): Paths { return functions.scalePaths(paths, scale); } } /** * Asynchronously tries to load a new native instance of the clipper library to be shared across all method invocations. * * @param format - Format to load, either WasmThenAsmJs, WasmOnly or AsmJsOnly. * @return {Promise<ClipperLibWrapper>} - Promise that resolves with the wrapper instance. */ export const loadNativeClipperLibInstanceAsync = async ( format: NativeClipperLibRequestedFormat ): Promise<ClipperLibWrapper> => { // TODO: in the future use these methods instead https://github.com/jedisct1/libsodium.js/issues/94 let tryWasm; let tryAsmJs; switch (format) { case NativeClipperLibRequestedFormat.WasmWithAsmJsFallback: tryWasm = true; tryAsmJs = true; break; case NativeClipperLibRequestedFormat.WasmOnly: tryWasm = true; tryAsmJs = false; break; case NativeClipperLibRequestedFormat.AsmJsOnly: tryWasm = false; tryAsmJs = true; break; default: throw new ClipperError("unknown native clipper format"); } function getModuleAsync( initModule: (overrides: object) => NativeClipperLibInstance | undefined ): Promise<NativeClipperLibInstance> { return new Promise<NativeClipperLibInstance>((resolve, reject) => { let finalModule: NativeClipperLibInstance | undefined; //noinspection JSUnusedLocalSymbols const moduleOverrides = { noExitRuntime: true, preRun() { if (finalModule) { resolve(finalModule); } else { setTimeout(() => { resolve(finalModule!); }, 1); } }, quit(code: number, err: Error) { reject(err); } }; finalModule = initModule(moduleOverrides); }); } if (tryWasm) { if (wasmModule instanceof Error) { // skip } else if (wasmModule === undefined) { try { const initModule = require("./wasm/clipper-wasm").init; wasmModule = await getModuleAsync(initModule); return new ClipperLibWrapper(wasmModule, NativeClipperLibLoadedFormat.Wasm); } catch (err) { wasmModule = err; } } else { return new ClipperLibWrapper(wasmModule, NativeClipperLibLoadedFormat.Wasm); } } if (tryAsmJs) { if (asmJsModule instanceof Error) { // skip } else if (asmJsModule === undefined) { try { const initModule = require("./wasm/clipper").init; asmJsModule = await getModuleAsync(initModule); return new ClipperLibWrapper(asmJsModule, NativeClipperLibLoadedFormat.AsmJs); } catch (err) { asmJsModule = err; } } else { return new ClipperLibWrapper(asmJsModule, NativeClipperLibLoadedFormat.AsmJs); } } throw new ClipperError("could not load native clipper in the desired format"); };
the_stack
import {reactive, autorun, StopFunction} from '@lume/variable' import {WebGLRenderer} from 'three/src/renderers/WebGLRenderer.js' import {BasicShadowMap, PCFSoftShadowMap, PCFShadowMap} from 'three/src/constants.js' import {PMREMGenerator} from 'three/src/extras/PMREMGenerator.js' import {TextureLoader} from 'three/src/loaders/TextureLoader.js' import {VRButton} from 'three/examples/jsm/webxr/VRButton.js' // TODO import {ARButton} from 'three/examples/jsm/webxr/ARButton.js' import type {Scene} from '../core/Scene.js' import type {Texture} from 'three/src/textures/Texture.js' interface SceneState { renderer: WebGLRenderer pmremgen?: PMREMGenerator backgroundIsEquirectangular?: boolean hasBackground?: boolean hasEnvironment?: boolean sizeChangeHandler: () => void } const sceneStates = new WeakMap<Scene, SceneState>() let instance: WebglRendererThree | null = null let isCreatingSingleton = false /** @typedef {'pcf' | 'pcfsoft' | 'basic'} ShadowMapTypeString */ export type ShadowMapTypeString = 'pcf' | 'pcfsoft' | 'basic' /** * @internal * A singleton responsible for setting up and * drawing a WebGL scene for a given core/Scene using Three.js */ @reactive export class WebglRendererThree { static singleton() { if (instance) return instance else { try { isCreatingSingleton = true return (instance = new WebglRendererThree()) } catch (e) { throw e } finally { isCreatingSingleton = false } } } private constructor() { if (!isCreatingSingleton) throw new Error('class is a singleton, use the static .singleton() method to get an instance') } @reactive localClippingEnabled = false disposers: StopFunction[] = [] initialize(scene: Scene) { let sceneState = sceneStates.get(scene) if (sceneState) return // TODO: options controlled by HTML attributes on scene elements. const renderer = new WebGLRenderer({ // TODO: how do we change alpha:true to alpha:false after the fact? alpha: true, premultipliedAlpha: true, antialias: true, }) this.disposers.push( autorun(() => { renderer.localClippingEnabled = this.localClippingEnabled }), ) // TODO: make some of the renderer options configurable by property/attribute. // Needs to be enabled first for it to work? If so, we need to destroy // and reinitialize renderes to toggle between XR or non-XR scenes. renderer.xr.enabled = true renderer.setPixelRatio(window.devicePixelRatio) renderer.shadowMap.enabled = true renderer.shadowMap.type = PCFSoftShadowMap // default PCFShadowMap sceneStates.set( scene, (sceneState = { renderer, sizeChangeHandler: () => this.updateResolution(scene), }), ) this.updateResolution(scene) scene.on('sizechange', sceneState.sizeChangeHandler) // TODO? Maybe the html/scene.js element should be responsible for // making this, so that DOM logic is encapsulated there? scene._glLayer!.appendChild(renderer.domElement) } uninitialize(scene: Scene) { const sceneState = sceneStates.get(scene) if (!sceneState) return scene.off('sizechange', sceneState.sizeChangeHandler) scene._glLayer?.removeChild(sceneState.renderer.domElement) sceneState.renderer.dispose() sceneState.pmremgen?.dispose() sceneStates.delete(scene) } drawScene(scene: Scene) { const sceneState = sceneStates.get(scene) if (!sceneState) throw new ReferenceError('Can not draw scene. Scene state should be initialized first.') const {renderer} = sceneState renderer.render(scene.three, scene.threeCamera) } // TODO FIXME This is tied to the `sizechange` event of Scene, which means // camera and renderer resize happens outside of the animation loop, but as // with _calcSize, we want to see if we can put this in the animation loop // as well. Putting this logic in the loop depends on putting _calcSize in // the loop. #66 updateResolution(scene: Scene) { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Unable to update resolution. Scene state should be initialized first.') scene._updateCameraAspect() scene._updateCameraPerspective() scene._updateCameraProjection() const {x, y} = scene.calculatedSize state.renderer.setSize(x, y) scene.needsUpdate() } setClearColor(scene: Scene, color: any, opacity: number) { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Unable to set clear color. Scene state should be initialized first.') state.renderer.setClearColor(color, opacity) } setClearAlpha(scene: Scene, opacity: number) { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Unable to set clear alpha. Scene state should be initialized first.') state.renderer.setClearAlpha(opacity) } setShadowMapType(scene: Scene, type: ShadowMapTypeString | null) { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Unable to set clear alpha. Scene state should be initialized first.') // default if (!type) { state.renderer.shadowMap.type = PCFShadowMap return } // TODO shouldn't need a cast here. Bug on TypeScript: https://github.com/microsoft/TypeScript/issues/32054 type = type.toLowerCase() as ShadowMapTypeString if (type == 'pcf') { state.renderer.shadowMap.type = PCFShadowMap } else if (type == 'pcfsoft') { state.renderer.shadowMap.type = PCFSoftShadowMap } else if (type == 'basic') { state.renderer.shadowMap.type = BasicShadowMap } } #bgVersion = 0 /** * @method enableBackground - Enable background texture handling for the given scene. * @param {Scene} scene - The given scene. * @param {boolean} isEquirectangular - True if the background is equirectangular (to use as an environment map), false for a static background image. * @param {(t: Texture | undefined) => void} cb - A callback that is called * when the background mechanics are done loading. The Callback receives the * background Texture instance. */ enableBackground(scene: Scene, isEquirectangular: boolean, cb: (tex: Texture | undefined) => void): void { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Internal error: Scene not registered with WebGLRendererThree.') this.#bgVersion += 1 state.backgroundIsEquirectangular = isEquirectangular if (isEquirectangular) { // Load the PMREM machinery only if needed. if (!state.pmremgen) { state.pmremgen = new PMREMGenerator(state.renderer) state.pmremgen.compileCubemapShader() } } state.hasBackground = true this.#loadBackgroundTexture(scene, cb) } /** * @method disableBackground - Disable background for the given scene. * @param {Scene} scene - The given scene. */ disableBackground(scene: Scene): void { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Internal error: Scene not registered with WebGLRendererThree.') this.#bgVersion += 1 if (!state.hasBackground && !state.hasEnvironment) { state.pmremgen?.dispose() state.pmremgen = undefined } } /** * @private * @method #loadBackgroundTexture - Load the background texture for the given scene. * @param {Scene} scene - The given scene. * @param {(t: Texture | undefined) => void} cb - Callback called when the * texture is done loading. It receives the Texture, or undefined if loading * was canceled or if other issues. */ #loadBackgroundTexture(scene: Scene, cb: (texture: Texture) => void): void { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Internal error: Scene not registered with WebGLRendererThree.') const version = this.#bgVersion new TextureLoader().load(scene.background ?? '', tex => { // In case state changed during load, ignore a loaded texture that // corresponds to previous state: if (version !== this.#bgVersion) return if (state.backgroundIsEquirectangular) { cb(state.pmremgen!.fromEquirectangular(tex).texture) } else { cb(tex) } }) } #envVersion = 0 /** * @method enableEnvironment - Enable environment texture handling for the given scene. * @param {Scene} scene - The given scene. * @param {(t: Texture | undefined) => void} cb - A callback that is called * when the environment mechanics are done loading. The Callback receives the * background Texture instance. */ enableEnvironment(scene: Scene, cb: (tex: Texture) => void): void { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Internal error: Scene not registered with WebGLRendererThree.') this.#envVersion += 1 // Load the PMREM machinery only if needed. if (!state.pmremgen) { state.pmremgen = new PMREMGenerator(state.renderer) state.pmremgen.compileCubemapShader() } state.hasEnvironment = true this.#loadEnvironmentTexture(scene, cb) } /** * @method disableEnvironment - Disable the environment map for the given scene. * @param {Scene} scene - The given scene. */ disableEnvironment(scene: Scene): void { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Internal error: Scene not registered with WebGLRendererThree.') this.#envVersion += 1 if (!state.hasBackground && !state.hasEnvironment) { state.pmremgen?.dispose() state.pmremgen = undefined } } /** * @private * @method #loadEnvironmentTexture - Load the environment texture for the given scene. * @param {Scene} scene - The given scene. * @param {(t: Texture | undefined) => void} cb - Callback called when the * texture is done loading. It receives the Texture. */ #loadEnvironmentTexture(scene: Scene, cb: (texture: Texture) => void): void { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Internal error: Scene not registered with WebGLRendererThree.') const version = this.#envVersion new TextureLoader().load(scene.environment ?? '', tex => { // In case state changed during load, ignore a loaded texture that // corresponds to previous state: if (version !== this.#envVersion) return cb(state.pmremgen!.fromEquirectangular(tex).texture) tex.dispose() // Three.js demos do this. Not sure if it is really needed. }) } requestFrame(scene: Scene, fn: FrameRequestCallback) { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Unable to request frame. Scene state should be initialized first.') const {renderer} = state if (renderer.setAnimationLoop) // >= r94 renderer.setAnimationLoop(fn) else if (renderer.animate) // < r94 renderer.animate(fn as () => void) } // TODO: at the moment this has only been tested toggling it on // once. Should we be able to turn it off too (f.e. the vr attribute is removed)? // TODO Update to WebXR (WebXRManager in Three) enableVR(scene: Scene, enable: boolean) { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Unable to enable VR. Scene state should be initialized first.') const {renderer} = state renderer.xr.enabled = enable } // TODO the UI here should be configurable via HTML // TODO Update to WebXR createDefaultVRButton(scene: Scene): HTMLElement { const state = sceneStates.get(scene) if (!state) throw new ReferenceError('Unable to create VR button. Scene state should be initialized first.') const {renderer} = state return VRButton.createButton(renderer) } } export function releaseWebGLRendererThree() { instance = null }
the_stack
import React from 'react' import { extractNameFromKey, createNonClashingNameAsync } from '@remix-ui/helper' import Gists from 'gists' import { customAction } from '@remixproject/plugin-api/lib/file-system/file-panel/type' import { displayNotification, displayPopUp, fetchDirectoryError, fetchDirectoryRequest, fetchDirectorySuccess, focusElement, fsInitializationCompleted, hidePopUp, removeInputFieldSuccess, setCurrentWorkspace, setExpandPath, setMode, setWorkspaces } from './payload' import { listenOnPluginEvents, listenOnProviderEvents } from './events' import { createWorkspaceTemplate, getWorkspaces, loadWorkspacePreset, setPlugin } from './workspace' export * from './events' export * from './workspace' const QueryParams = require('../../../../../../apps/remix-ide/src/lib/query-params') const queryParams = new QueryParams() let plugin, dispatch: React.Dispatch<any> export const initWorkspace = (filePanelPlugin) => async (reducerDispatch: React.Dispatch<any>) => { if (filePanelPlugin) { plugin = filePanelPlugin dispatch = reducerDispatch setPlugin(plugin, dispatch) const workspaceProvider = filePanelPlugin.fileProviders.workspace const localhostProvider = filePanelPlugin.fileProviders.localhost const params = queryParams.get() const workspaces = await getWorkspaces() || [] dispatch(setWorkspaces(workspaces)) if (params.gist) { await createWorkspaceTemplate('gist-sample', 'gist-template') plugin.setWorkspace({ name: 'gist-sample', isLocalhost: false }) dispatch(setCurrentWorkspace('gist-sample')) await loadWorkspacePreset('gist-template') } else if (params.code || params.url) { await createWorkspaceTemplate('code-sample', 'code-template') plugin.setWorkspace({ name: 'code-sample', isLocalhost: false }) dispatch(setCurrentWorkspace('code-sample')) const filePath = await loadWorkspacePreset('code-template') plugin.on('editor', 'editorMounted', () => plugin.fileManager.openFile(filePath)) } else { if (workspaces.length === 0) { await createWorkspaceTemplate('default_workspace', 'default-template') plugin.setWorkspace({ name: 'default_workspace', isLocalhost: false }) dispatch(setCurrentWorkspace('default_workspace')) await loadWorkspacePreset('default-template') } else { if (workspaces.length > 0) { workspaceProvider.setWorkspace(workspaces[workspaces.length - 1]) plugin.setWorkspace({ name: workspaces[workspaces.length - 1], isLocalhost: false }) dispatch(setCurrentWorkspace(workspaces[workspaces.length - 1])) } } } listenOnPluginEvents(plugin) listenOnProviderEvents(workspaceProvider)(dispatch) listenOnProviderEvents(localhostProvider)(dispatch) dispatch(setMode('browser')) plugin.setWorkspaces(await getWorkspaces()) dispatch(fsInitializationCompleted()) plugin.emit('workspaceInitializationCompleted') } } export const fetchDirectory = async (path: string) => { const provider = plugin.fileManager.currentFileProvider() const promise = new Promise((resolve) => { provider.resolveDirectory(path, (error, fileTree) => { if (error) console.error(error) resolve(fileTree) }) }) dispatch(fetchDirectoryRequest(promise)) promise.then((fileTree) => { dispatch(fetchDirectorySuccess(path, fileTree)) }).catch((error) => { dispatch(fetchDirectoryError({ error })) }) return promise } export const removeInputField = async (path: string) => { dispatch(removeInputFieldSuccess(path)) } export const publishToGist = async (path?: string, type?: string) => { // If 'id' is not defined, it is not a gist update but a creation so we have to take the files from the browser explorer. const folder = path || '/' const id = type === 'gist' ? extractNameFromKey(path).split('-')[1] : null try { const packaged = await packageGistFiles(folder) // check for token const config = plugin.registry.get('config').api const accessToken = config.get('settings/gist-access-token') if (!accessToken) { dispatch(displayNotification('Authorize Token', 'Remix requires an access token (which includes gists creation permission). Please go to the settings tab to create one.', 'Close', null, () => {})) } else { const description = 'Created using remix-ide: Realtime Ethereum Contract Compiler and Runtime. \n Load this file by pasting this gists URL or ID at https://remix.ethereum.org/#version=' + queryParams.get().version + '&optimize=' + queryParams.get().optimize + '&runs=' + queryParams.get().runs + '&gist=' const gists = new Gists({ token: accessToken }) if (id) { const originalFileList = await getOriginalFiles(id) // Telling the GIST API to remove files const updatedFileList = Object.keys(packaged) const allItems = Object.keys(originalFileList) .filter(fileName => updatedFileList.indexOf(fileName) === -1) .reduce((acc, deleteFileName) => ({ ...acc, [deleteFileName]: null }), originalFileList) // adding new files updatedFileList.forEach((file) => { const _items = file.split('/') const _fileName = _items[_items.length - 1] allItems[_fileName] = packaged[file] }) dispatch(displayPopUp('Saving gist (' + id + ') ...')) gists.edit({ description: description, public: true, files: allItems, id: id }, (error, result) => { handleGistResponse(error, result) if (!error) { for (const key in allItems) { if (allItems[key] === null) delete allItems[key] } } }) } else { // id is not existing, need to create a new gist dispatch(displayPopUp('Creating a new gist ...')) gists.create({ description: description, public: true, files: packaged }, (error, result) => { handleGistResponse(error, result) }) } } } catch (error) { console.log(error) dispatch(displayNotification('Publish to gist Failed', 'Failed to create gist: ' + error.message, 'Close', null, async () => {})) } } export const clearPopUp = async () => { dispatch(hidePopUp()) } export const createNewFile = async (path: string, rootDir: string) => { const fileManager = plugin.fileManager const newName = await createNonClashingNameAsync(path, fileManager) const createFile = await fileManager.writeFile(newName, '') if (!createFile) { return dispatch(displayPopUp('Failed to create file ' + newName)) } else { const path = newName.indexOf(rootDir + '/') === 0 ? newName.replace(rootDir + '/', '') : newName await fileManager.open(path) setFocusElement([{ key: path, type: 'file' }]) } } export const setFocusElement = async (elements: { key: string, type: 'file' | 'folder' | 'gist' }[]) => { dispatch(focusElement(elements)) } export const createNewFolder = async (path: string, rootDir: string) => { const fileManager = plugin.fileManager const dirName = path + '/' const exists = await fileManager.exists(dirName) if (exists) { return dispatch(displayNotification('Rename File Failed', `A file or folder ${extractNameFromKey(path)} already exists at this location. Please choose a different name.`, 'Close', null, () => {})) } await fileManager.mkdir(dirName) path = path.indexOf(rootDir + '/') === 0 ? path.replace(rootDir + '/', '') : path dispatch(focusElement([{ key: path, type: 'folder' }])) } export const deletePath = async (path: string[]) => { const fileManager = plugin.fileManager for (const p of path) { try { await fileManager.remove(p) } catch (e) { const isDir = await fileManager.isDirectory(p) dispatch(displayPopUp(`Failed to remove ${isDir ? 'folder' : 'file'} ${p}.`)) } } } export const renamePath = async (oldPath: string, newPath: string) => { const fileManager = plugin.fileManager const exists = await fileManager.exists(newPath) if (exists) { dispatch(displayNotification('Rename File Failed', `A file or folder ${extractNameFromKey(newPath)} already exists at this location. Please choose a different name.`, 'Close', null, () => {})) } else { await fileManager.rename(oldPath, newPath) } } export const copyFile = async (src: string, dest: string) => { const fileManager = plugin.fileManager try { fileManager.copyFile(src, dest) } catch (error) { dispatch(displayPopUp('Oops! An error ocurred while performing copyFile operation.' + error)) } } export const copyFolder = async (src: string, dest: string) => { const fileManager = plugin.fileManager try { fileManager.copyDir(src, dest) } catch (error) { dispatch(displayPopUp('Oops! An error ocurred while performing copyDir operation.' + error)) } } export const runScript = async (path: string) => { const provider = plugin.fileManager.currentFileProvider() provider.get(path, (error, content: string) => { if (error) { return dispatch(displayPopUp(error)) } plugin.call('scriptRunner', 'execute', content) }) } export const emitContextMenuEvent = async (cmd: customAction) => { plugin.call(cmd.id, cmd.name, cmd) } export const handleClickFile = async (path: string, type: 'file' | 'folder' | 'gist') => { plugin.fileManager.open(path) dispatch(focusElement([{ key: path, type }])) } export const handleExpandPath = (paths: string[]) => { dispatch(setExpandPath(paths)) } const packageGistFiles = (directory) => { return new Promise((resolve, reject) => { const workspaceProvider = plugin.fileProviders.workspace const isFile = workspaceProvider.isFile(directory) const ret = {} if (isFile) { try { workspaceProvider.get(directory, (error, content) => { if (error) throw new Error('An error ocurred while getting file content. ' + directory) if (/^\s+$/.test(content) || !content.length) { content = '// this line is added to create a gist. Empty file is not allowed.' } directory = directory.replace(/\//g, '...') ret[directory] = { content } return resolve(ret) }) } catch (e) { return reject(e) } } else { try { (async () => { await workspaceProvider.copyFolderToJson(directory, ({ path, content }) => { if (/^\s+$/.test(content) || !content.length) { content = '// this line is added to create a gist. Empty file is not allowed.' } if (path.indexOf('gist-') === 0) { path = path.split('/') path.shift() path = path.join('/') } path = path.replace(/\//g, '...') ret[path] = { content } }) resolve(ret) })() } catch (e) { return reject(e) } } }) } const handleGistResponse = (error, data) => { if (error) { dispatch(displayNotification('Publish to gist Failed', 'Failed to manage gist: ' + error, 'Close', null)) } else { if (data.html_url) { dispatch(displayNotification('Gist is ready', `The gist is at ${data.html_url}. Would you like to open it in a new window?`, 'OK', 'Cancel', () => { window.open(data.html_url, '_blank') }, () => {})) } else { const error = JSON.stringify(data.errors, null, '\t') || '' const message = data.message === 'Not Found' ? data.message + '. Please make sure the API token has right to create a gist.' : data.message dispatch(displayNotification('Publish to gist Failed', message + ' ' + data.documentation_url + ' ' + error, 'Close', null)) } } } /** * This function is to get the original content of given gist * @params id is the gist id to fetch */ const getOriginalFiles = async (id) => { if (!id) { return [] } const url = `https://api.github.com/gists/${id}` const res = await fetch(url) const data = await res.json() return data.files || [] }
the_stack
import { isServiceOrientedApp } from './soa-component'; import { resolveAssetsPath } from '../libs/iso-libs'; import * as deepmerge from 'deepmerge'; import { IConfigParseResult } from '../libs/config-parse-result'; import {IPlugin, forwardChildIamRoleStatements} from '../libs/plugin'; import { PARSER_MODES } from '../libs/parser'; import extractDomain from 'extract-domain'; /** * Parameters that apply to the whole Plugin, passed by other plugins */ export interface ISoaPlugin { /** * the stage is the environment to apply */ stage: string, /** * one of the [[PARSER_MODES]] */ parserMode: string, /** * path to a directory where we put the final bundles */ buildPath: string, /** * path to the main config file */ configFilePath: string } /** * A Plugin to detect SinglePage-App-Components * @param props */ export const SoaPlugin = (props: ISoaPlugin): IPlugin => { //console.log("configFilePath: " , props.configFilePath); const result: IPlugin = { // identify Isomorphic-App-Components applies: (component): boolean => { return isServiceOrientedApp(component); }, // convert the component into configuration parts process: ( component: any, childConfigs: Array<IConfigParseResult>, infrastructureMode: string | undefined ): IConfigParseResult => { console.log("services: ", component.services); const path = require('path'); // we use the hardcoded name `server` as name const serverName = "server"; const serverBuildPath = path.join(require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").currentAbsolutePath(), props.buildPath); // the service-oriented app has a server application const serverWebPack = require("../../../infrastructure-scripts/dist/infra-comp-utils/webpack-libs").complementWebpackConfig( require("../../../infrastructure-scripts/dist/infra-comp-utils/webpack-libs").createServerWebpackConfig( "./"+path.join("node_modules", "infrastructure-components", "dist" , "assets", "soa-server.js"), //entryPath: string, serverBuildPath, //use the buildpath from the parent plugin serverName, // name of the server { __CONFIG_FILE_PATH__: require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").pathToConfigFile(props.configFilePath), // replace the IsoConfig-Placeholder with the real path to the main-config-bundle // required of data-layer, makes the context match! "infrastructure-components": path.join( require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").currentAbsolutePath(), "node_modules", "infrastructure-components", "dist", "index.js"), // required of the routed-app "react-router-dom": path.join( require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").currentAbsolutePath(), "node_modules", "react-router-dom"), // required of the data-layer / apollo "react-apollo": path.join( require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").currentAbsolutePath(), "node_modules", "react-apollo"), }, { __SERVICEORIENTED_ID__: `"${component.instanceId}"`, __ISOFFLINE__: props.parserMode === PARSER_MODES.MODE_START, //__ASSETS_PATH__: `"${component.assetsPath}"`, __DATALAYER_ID__: `"${component.dataLayerId}"`, /*__RESOLVED_ASSETS_PATH__: `"${resolveAssetsPath( component.buildPath, serverName, component.assetsPath ) }"`*/ // TODO add replacements of datalayers here! }, ), props.parserMode === PARSER_MODES.MODE_DEPLOY //isProd ); const webappBuildPath = path.join(require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").currentAbsolutePath(), props.buildPath); const soaWebPack = require("../../../infrastructure-scripts/dist/infra-comp-utils/webpack-libs") .complementWebpackConfig(require("../../../infrastructure-scripts/dist/infra-comp-utils/webpack-libs") .createClientWebpackConfig( "./"+path.join("node_modules", "infrastructure-components", "dist" , "assets", "soa.js"), //entryPath: string, path.join(require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").currentAbsolutePath(), props.buildPath), //use the buildpath from the parent plugin component.id, //appName undefined, //assetsPath undefined, // stagePath: TODO take from Environment! { __CONFIG_FILE_PATH__: require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").pathToConfigFile(props.configFilePath), // replace the IsoConfig-Placeholder with the real path to the main-config-bundle // required of the routed-app "react-router-dom": path.join( require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").currentAbsolutePath(), "node_modules", "react-router-dom"), // required of the data-layer / apollo "react-apollo": path.join( require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").currentAbsolutePath(), "node_modules", "react-apollo"), }, { } ), props.parserMode === PARSER_MODES.MODE_DEPLOY //isProd ); // provide all client configs in a flat list const webpackConfigs: any = childConfigs.reduce((result, config) => result.concat(config.webpackConfigs), []); const copyAssetsPostBuild = () => { //console.log("check for >>copyAssetsPostBuild<<"); /* if (props.parserMode !== PARSER_MODES.MODE_DOMAIN && props.parserMode !== PARSER_MODES.MODE_DEPLOY) { // always copy the assets, unless we setup the domain console.log("copyAssetsPostBuild: now copy the assets!"); webpackConfigs.map(config => require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").copyAssets( config.output.path, path.join(serverBuildPath, serverName, component.assetsPath))); } else { // delete the assets folder for we don't want to include all these bundled files in the deployment-package const rimraf = require("rimraf"); rimraf.sync(path.join(serverBuildPath, serverName, component.assetsPath)); } */ }; const environments = childConfigs.reduce((result, config) => (result !== undefined ? result : []).concat(config.environments !== undefined ? config.environments : []), []); // check whether we already created the domain of this environment const deployedDomain = process.env[`DOMAIN_${props.stage}`] !== undefined; const domain = childConfigs.map(config => config.domain).reduce((result, domain) => result !== undefined ? result : domain, undefined); const certArn = childConfigs.map(config => config.certArn).reduce((result, certArn) => result !== undefined ? result : certArn, undefined); const stagePath = props.parserMode === PARSER_MODES.MODE_DEPLOY && domain == undefined && environments !== undefined && environments.length > 0 ? environments[0].name : undefined; const createHtml = ( { serviceEndpoints }) => { //console.log("check for >>copyAssetsPostBuild<<"); //if (props.parserMode == PARSER_MODES.MODE_BUILD) { console.log("write the index.html!"); console.log("serviceEndpoints: ", serviceEndpoints); // we need to get rid of the path of the endpoint const servicePath = serviceEndpoints && serviceEndpoints.length > 0 ? ( stagePath ? // when we have a stagePath, we can remove anything behind it serviceEndpoints[0].substr(0, serviceEndpoints[0].indexOf(stagePath)+stagePath.length) : // when we don't have a stagePath - TODO serviceEndpoints[0] ) : undefined; console.log ("servicePath: " , servicePath); // TODO this should not be hard-coded const graphqlUrl = component.dataLayerId ? ( props.parserMode === PARSER_MODES.MODE_START ? "http://localhost:3001/query" : servicePath+"/query" ) : undefined; //region: 'localhost', //endpoint: 'http://localhost:8000', require('fs').writeFileSync(path.join(webappBuildPath, component.stackName, "index.html"), `<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8" /> <title>${component.stackName}</title> <style> body { display: block; margin: 0px; } </style> </head> <body> <noscript>You need to enable JavaScript to run this app.</noscript> <div id="root"></div> <script> ${graphqlUrl !== undefined ? `window.__GRAPHQL__ ="${graphqlUrl}"` : ""}; ${servicePath !== undefined ? `window.__BASENAME__ ="${servicePath}"` : ""}; </script> <script src="${component.stackName}.bundle.js"></script> </body> </html>`); }; const invalidateCloudFrontCache = () => { if (deployedDomain && props.parserMode === PARSER_MODES.MODE_DEPLOY) { require("../../../infrastructure-scripts/dist/infra-comp-utils/sls-libs").invalidateCloudFrontCache(domain); } } const hostedZoneName = domain !== undefined ? extractDomain(domain.toString()) : {}; /** post build function to write to the .env file that the domain has been deployed */ const writeDomainEnv = () => { //console.log("check for >>writeDomainEnv<<"); // we only write to the .env file when we are in domain mode, i.e. this script creates the domain // and we did not yet deployed the domain previously if (!deployedDomain && props.parserMode === PARSER_MODES.MODE_DOMAIN) { require('fs').appendFileSync( path.join( require("../../../infrastructure-scripts/dist/infra-comp-utils/system-libs").currentAbsolutePath(), ".env"), `\nDOMAIN_${props.stage}=TRUE` ); } }; /* const postDeploy = async () => { //console.log("check for >>showStaticPageName<<"); if (props.parserMode === PARSER_MODES.MODE_DEPLOY) { await require('../libs/scripts-libs').fetchData("deploy", { proj: component.stackName, envi: props.stage, domain: domain, endp: `http://${component.stackName}-${props.stage}.s3-website-${component.region}.amazonaws.com` }); console.log(`Your SinglePageApp is now available at: http://${component.stackName}-${props.stage}.s3-website-${component.region}.amazonaws.com`); } };*/ async function deployWithDomain() { // start the sls-config if (props.parserMode === PARSER_MODES.MODE_DOMAIN) { await require("../../../infrastructure-scripts/dist/infra-comp-utils/sls-libs").deploySls(component.stackName); } } const additionalStatements: Array<any> = forwardChildIamRoleStatements(childConfigs).concat( component.iamRoleStatements ? component.iamRoleStatements : [] ); const iamRoleAssignment = { functions: {} }; iamRoleAssignment.functions[serverName] = { role: "ServiceOrientedAppLambdaRole" } const iamPermissions = { resources: { Resources: { ServiceOrientedAppLambdaRole: { Type: "AWS::IAM::Role", Properties: { RoleName: "${self:service}-${self:provider.stage, env:STAGE, 'dev'}-ServiceOrientedAppLambdaRole", AssumeRolePolicyDocument: { Version: '"2012-10-17"', Statement: [ { Effect: "Allow", Principal: { Service: ["lambda.amazonaws.com"] }, Action: "sts:AssumeRole" } ] }, Policies: [ { PolicyName: "${self:service}-${self:provider.stage, env:STAGE, 'dev'}-ServiceOrientedAppLambdaPolicy", PolicyDocument: { Version: '"2012-10-17"', Statement: [ { Effect: "Allow", Action: [ '"logs:*"', '"cloudwatch:*"' ], Resource: '"*"' }, { Effect: "Allow", Action: [ "s3:Get*", "s3:List*", "s3:Put*", "s3:Delete*" ], Resource: { "Fn::Join": '["", ["arn:aws:s3:::", {"Ref": "StaticBucket" }, "/*"]]' } }, ].concat(additionalStatements) } } ] } }, }, } } // TODO this should rather be put into DataLayer-Plugin!!! const dataLayerService = component.dataLayerId !== undefined ? [{ method: "ANY", path: "query" }] : []; /** * ONLY add the domain config if we are in domain mode! * TODO once the domain has been added, we need to add this with every deployment */ const domainConfig = (props.parserMode === PARSER_MODES.MODE_DOMAIN || deployedDomain) && domain !== undefined && certArn !== undefined ? { // required of the SPA-domain-alias provider: { customDomainName: domain, hostedZoneName: hostedZoneName, certArn: certArn }, resources: { Resources: { WebAppCloudFrontDistribution: { Type: "AWS::CloudFront::Distribution", Properties: { DistributionConfig: { Origins: [ { DomainName: "${self:provider.staticBucket}.s3.amazonaws.com", Id: component.stackName, CustomOriginConfig: { HTTPPort: 80, HTTPSPort: 443, OriginProtocolPolicy: "https-only", } } ], Enabled: "'true'", DefaultRootObject: "index.html", CustomErrorResponses: [{ ErrorCode: 404, ResponseCode: 200, ResponsePagePath: "/index.html" }], DefaultCacheBehavior: { AllowedMethods: [ "DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT" ], TargetOriginId: component.stackName, ForwardedValues: { QueryString: "'false'", Cookies: { Forward: "none" } }, ViewerProtocolPolicy: "redirect-to-https" }, ViewerCertificate: { AcmCertificateArn: "${self:provider.certArn}", SslSupportMethod: "sni-only", }, Aliases: ["${self:provider.customDomainName}"] } } }, DnsRecord: { Type: "AWS::Route53::RecordSet", Properties: { AliasTarget: { DNSName: "!GetAtt WebAppCloudFrontDistribution.DomainName", HostedZoneId: "Z2FDTNDATAQYW2" }, HostedZoneName: "${self:provider.hostedZoneName}.", Name: "${self:provider.customDomainName}.", Type: "'A'" } } }, Outputs: { WebAppCloudFrontDistributionOutput: { Value: { "Fn::GetAtt": "[ WebAppCloudFrontDistribution, DomainName ]" } } } } } : {}; const envS3Config = { provider: { environment: { BUCKET_ID: "${self:provider.staticBucket}", } } }; return { stackType: "SOA", slsConfigs: deepmerge.all([ require("../../../infrastructure-scripts/dist/infra-comp-utils/sls-libs").toSoaSlsConfig( component.stackName, serverName, component.buildPath, component.assetsPath, component.region, dataLayerService.concat(component.services) ), // the datalayer (maybe a child-config) must load before the plugin serverless-offline! ...childConfigs.map(config => config.slsConfigs), // # allows running the stack locally on the dev-machine { plugins: ["serverless-offline", "serverless-pseudo-parameters"], custom: { "serverless-offline": { host: "0.0.0.0", port: "${self:provider.port, env:PORT, '3001'}" } } }, domainConfig, // add the IAM-Role-Statements iamPermissions, // assign the role iamRoleAssignment, // set the bucket as an env envS3Config ] ), // add the server config webpackConfigs: webpackConfigs.concat([soaWebPack, serverWebPack]), postBuilds: childConfigs.reduce((result, config) => result.concat(config.postBuilds), [createHtml, writeDomainEnv, copyAssetsPostBuild, deployWithDomain, invalidateCloudFrontCache /*, postDeploy*/]), iamRoleStatements: [], environments: environments, stackName: component.stackName, assetsPath: undefined, buildPath: component.buildPath, region: component.region, domain: domain, certArn: certArn, supportOfflineStart: true, supportCreateDomain: true } } } return result; };
the_stack
import * as child_process from "child_process"; import * as fs from "fs"; import * as yaml from "js-yaml"; import * as os from "os"; import * as path from "path"; import * as readline from "readline"; import * as shell_quote from "shell-quote"; import * as tmp from "tmp"; import * as util from "util"; import * as vscode from "vscode"; import * as extension from "../../../../extension"; import * as requests from "../../../requests"; import * as utils from "../../../utils"; import { rosApi } from "../../../../ros/ros"; import { env } from "../../../../extension"; const promisifiedExec = util.promisify(child_process.exec); interface ILaunchRequest { nodeName: string; executable: string; arguments: string[]; cwd: string; env: { [key: string]: string }; } export class LaunchResolver implements vscode.DebugConfigurationProvider { // tslint:disable-next-line: max-line-length public async resolveDebugConfigurationWithSubstitutedVariables(folder: vscode.WorkspaceFolder | undefined, config: requests.ILaunchRequest, token?: vscode.CancellationToken) { if (!path.isAbsolute(config.target) || (path.extname(config.target) !== ".launch" && path.extname(config.target) !== ".test")) { throw new Error("Launch request requires an absolute path as target."); } const delay = ms => new Promise(res => setTimeout(res, ms)); // Manage the status of the ROS core, starting one if not present // The ROS core will continue to run until the VSCode window is closed if (await rosApi.getCoreStatus() == false) { console.log("ROS Core is not active, attempting to start automatically"); rosApi.startCore(); // Wait for the core to start up to a timeout const timeout_ms: number = 30000; const interval_ms: number = 100; let timeWaited: number = 0; while (await rosApi.getCoreStatus() == false && timeWaited < timeout_ms) { timeWaited += interval_ms; await delay(interval_ms); } console.log("Waited " + timeWaited + " for ROS Core to start"); if (timeWaited >= timeout_ms) { throw new Error('Timed out (' + timeWaited / 1000 + ' seconds) waiting for ROS Core to start. Start ROSCore manually to avoid this error.'); } } const rosExecOptions: child_process.ExecOptions = { env: await extension.resolvedEnv(), }; let result = await promisifiedExec(`roslaunch --dump-params ${config.target}`, rosExecOptions); if (result.stderr) { throw (new Error(`Error from roslaunch:\r\n ${result.stderr}`)); } else if (result.stdout.length == 0) { throw (new Error(`roslaunch unexpectedly produced no output, please test by running \"roslaunch --dump-params ${config.target}\" in a ros terminal.`)); } const parameters = Object.keys(yaml.load(result.stdout)); if (parameters && parameters.length) { // only call into rosparam when necessary const tmpFile = tmp.fileSync(); fs.writeFile(`${tmpFile.name}`, result.stdout, async (error) => { if (error) { throw error; } await promisifiedExec(`rosparam load ${tmpFile.name}`, rosExecOptions); tmpFile.removeCallback(); }); } result = await promisifiedExec(`roslaunch --nodes ${config.target}`, rosExecOptions); if (result.stderr) { throw (new Error(`Error from roslaunch:\r\n ${result.stderr}`)); } else if (result.stdout.length == 0) { throw (new Error(`roslaunch unexpectedly produced no output, please test by running \"roslaunch --dump-params ${config.target}\" in a ros terminal.`)); } const nodes = result.stdout.trim().split(os.EOL); await Promise.all(nodes.map((node: string) => { return promisifiedExec(`roslaunch --args ${node} ${config.target}`, rosExecOptions); })).then((commands: Array<{ stdout: string; stderr: string; }>) => { commands.forEach((command, index) => { const launchRequest = this.generateLaunchRequest(nodes[index], command.stdout); this.executeLaunchRequest(launchRequest, false); }); }); // @todo: error handling for Promise.all // Return null as we have spawned new debug requests return null; } private generateLaunchRequest(nodeName: string, command: string): ILaunchRequest { let parsedArgs: shell_quote.ParseEntry[]; const isWindows = os.platform() === "win32"; if (isWindows) { // https://github.com/ros/ros_comm/pull/1809 // escape backslash in file path parsedArgs = shell_quote.parse(command.replace(/[\\]/g, "\\$&")); parsedArgs = shell_quote.parse(parsedArgs[2].toString().replace(/[\\]/g, "\\$&")); } else { parsedArgs = shell_quote.parse(command); } const envConfig: { [key: string]: string; } = {}; while (parsedArgs) { // https://github.com/ros/ros_comm/pull/1809 if (isWindows && parsedArgs[0].toString() === "set") { parsedArgs.shift(); } if (parsedArgs[0].toString().includes("=")) { const arg = parsedArgs.shift().toString(); envConfig[arg.substring(0, arg.indexOf("="))] = arg.substring(arg.indexOf("=") + 1); // https://github.com/ros/ros_comm/pull/1809 // "&&" is treated as Object if (isWindows && parsedArgs[0] instanceof Object) { parsedArgs.shift(); } } else { break; } } const request: ILaunchRequest = { nodeName: nodeName, executable: parsedArgs.shift().toString(), arguments: parsedArgs.map((arg) => { return arg.toString(); }), cwd: ".", env: { ...extension.env, ...envConfig, }, }; return request; } private async executeLaunchRequest(request: ILaunchRequest, stopOnEntry: boolean) { let debugConfig: ICppvsdbgLaunchConfiguration | ICppdbgLaunchConfiguration | IPythonLaunchConfiguration; if (os.platform() === "win32") { if (request.executable.toLowerCase().endsWith("python") || request.executable.toLowerCase().endsWith("python.exe")) { const pythonScript: string = request.arguments.shift(); const pythonLaunchConfig: IPythonLaunchConfiguration = { name: request.nodeName, type: "python", request: "launch", program: pythonScript, args: request.arguments, env: request.env, stopOnEntry: stopOnEntry, justMyCode: false, }; debugConfig = pythonLaunchConfig; } else if (request.executable.endsWith(".exe")) { interface ICppEnvConfig { name: string; value: string; } const envConfigs: ICppEnvConfig[] = []; for (const key in request.env) { if (request.env.hasOwnProperty(key)) { envConfigs.push({ name: key, value: request.env[key], }); } } const cppvsdbgLaunchConfig: ICppvsdbgLaunchConfiguration = { name: request.nodeName, type: "cppvsdbg", request: "launch", cwd: ".", program: request.executable, args: request.arguments, environment: envConfigs, stopAtEntry: stopOnEntry, }; debugConfig = cppvsdbgLaunchConfig; } if (!debugConfig) { throw (new Error(`Failed to create a debug configuration!`)); } const launched = await vscode.debug.startDebugging(undefined, debugConfig); if (!launched) { throw (new Error(`Failed to start debug session!`)); } } else { try { // this should be guaranteed by roslaunch fs.accessSync(request.executable, fs.constants.X_OK); } catch (errNotExecutable) { throw (new Error(`Error! ${request.executable} is not executable!`)); } try { // need to be readable to check shebang line fs.accessSync(request.executable, fs.constants.R_OK); } catch (errNotReadable) { throw (new Error(`Error! ${request.executable} is not readable!`)); } const fileStream = fs.createReadStream(request.executable); const rl = readline.createInterface({ input: fileStream, crlfDelay: Infinity, }); // we only want to read 1 line to check for shebang line let linesToRead: number = 1; rl.on("line", async (line) => { if (linesToRead <= 0) { return; } linesToRead--; if (!linesToRead) { rl.close(); } // look for Python in shebang line if (line.startsWith("#!") && line.toLowerCase().indexOf("python") !== -1) { const pythonLaunchConfig: IPythonLaunchConfiguration = { name: request.nodeName, type: "python", request: "launch", program: request.executable, args: request.arguments, env: request.env, stopOnEntry: stopOnEntry, justMyCode: false, }; debugConfig = pythonLaunchConfig; } else { interface ICppEnvConfig { name: string; value: string; } const envConfigs: ICppEnvConfig[] = []; for (const key in request.env) { if (request.env.hasOwnProperty(key)) { envConfigs.push({ name: key, value: request.env[key], }); } } const cppdbgLaunchConfig: ICppdbgLaunchConfiguration = { name: request.nodeName, type: "cppdbg", request: "launch", cwd: ".", program: request.executable, args: request.arguments, environment: envConfigs, stopAtEntry: stopOnEntry, setupCommands: [ { text: "-enable-pretty-printing", description: "Enable pretty-printing for gdb", ignoreFailures: true } ] }; debugConfig = cppdbgLaunchConfig; } if (!debugConfig) { throw (new Error(`Failed to create a debug configuration!`)); } const launched = await vscode.debug.startDebugging(undefined, debugConfig); if (!launched) { throw (new Error(`Failed to start debug session!`)); } }); } } }
the_stack
import { reaction, autorun, isObservable, configure } from "mobx" import { types, getSnapshot, applySnapshot, onPatch, applyPatch, unprotect, detach, resolveIdentifier, getRoot, cast, SnapshotOut, IAnyModelType, Instance, SnapshotOrInstance, isAlive, destroy, castToReferenceSnapshot, tryReference, isValidReference, isStateTreeNode, addDisposer } from "../../src" test("it should support prefixed paths in maps", () => { const User = types.model({ id: types.identifier, name: types.string }) const UserStore = types.model({ user: types.reference(User), users: types.map(User) }) const store = UserStore.create({ user: "17", users: { "17": { id: "17", name: "Michel" }, "18": { id: "18", name: "Veria" } } }) unprotect(store) expect(store.users.get("17")!.name).toBe("Michel") expect(store.users.get("18")!.name).toBe("Veria") expect(store.user.name).toBe("Michel") store.user = store.users.get("18")! expect(store.user.name).toBe("Veria") store.users.get("18")!.name = "Noa" expect(store.user.name).toBe("Noa") expect(getSnapshot(store)).toEqual({ user: "18", users: { "17": { id: "17", name: "Michel" }, "18": { id: "18", name: "Noa" } } } as SnapshotOut<typeof store>) }) test("it should support prefixed paths in arrays", () => { const User = types.model({ id: types.identifier, name: types.string }) const UserStore = types.model({ user: types.reference(User), users: types.array(User) }) const store = UserStore.create({ user: "17", users: [ { id: "17", name: "Michel" }, { id: "18", name: "Veria" } ] }) unprotect(store) expect(store.users[0].name).toBe("Michel") expect(store.users[1].name).toBe("Veria") expect(store.user.name).toBe("Michel") store.user = store.users[1] expect(store.user.name).toBe("Veria") store.users[1].name = "Noa" expect(store.user.name).toBe("Noa") expect(getSnapshot(store)).toEqual({ user: "18", users: [ { id: "17", name: "Michel" }, { id: "18", name: "Noa" } ] } as SnapshotOut<typeof store>) }) if (process.env.NODE_ENV !== "production") { test("identifiers are required", () => { const Todo = types.model({ id: types.identifier }) expect(Todo.is({})).toBe(false) expect(Todo.is({ id: "x" })).toBe(true) expect(() => (Todo.create as any)()).toThrowError( " `undefined` is not assignable to type: `identifier` (Value is not a valid identifier, expected a string)" ) }) test("identifiers cannot be modified", () => { const Todo = types.model({ id: types.identifier }) const todo = Todo.create({ id: "x" }) unprotect(todo) expect(() => (todo.id = "stuff")).toThrowError( "[mobx-state-tree] Tried to change identifier from 'x' to 'stuff'. Changing identifiers is not allowed." ) expect(() => applySnapshot(todo, { id: "stuff" })).toThrowError( "[mobx-state-tree] Tried to change identifier from 'x' to 'stuff'. Changing identifiers is not allowed." ) }) } test("it should resolve refs during creation, when using path", () => { const values: number[] = [] const Book = types.model({ id: types.identifier, price: types.number }) const BookEntry = types .model({ book: types.reference(Book) }) .views((self) => ({ get price() { return self.book.price * 2 } })) const Store = types.model({ books: types.array(Book), entries: types.optional(types.array(BookEntry), []) }) const s = Store.create({ books: [{ id: "3", price: 2 }] }) unprotect(s) reaction( () => s.entries.reduce((a, e) => a + e.price, 0), (v) => values.push(v) ) s.entries.push({ book: castToReferenceSnapshot(s.books[0]) }) expect(s.entries[0].price).toBe(4) expect(s.entries.reduce((a, e) => a + e.price, 0)).toBe(4) const entry = BookEntry.create({ book: castToReferenceSnapshot(s.books[0]) }) // N.B. ref is initially not resolvable! s.entries.push(entry) expect(s.entries[1].price).toBe(4) expect(s.entries.reduce((a, e) => a + e.price, 0)).toBe(8) expect(values).toEqual([4, 8]) }) test("it should resolve refs over late types", () => { const Book = types.model({ id: types.identifier, price: types.number }) const BookEntry = types .model({ book: types.reference(types.late(() => Book)) }) .views((self) => ({ get price() { return self.book.price * 2 } })) const Store = types.model({ books: types.array(Book), entries: types.array(BookEntry) }) const s = Store.create({ books: [{ id: "3", price: 2 }] }) unprotect(s) s.entries.push({ book: castToReferenceSnapshot(s.books[0]) }) expect(s.entries[0].price).toBe(4) expect(s.entries.reduce((a, e) => a + e.price, 0)).toBe(4) }) test("it should resolve refs during creation, when using generic reference", () => { const values: number[] = [] const Book = types.model({ id: types.identifier, price: types.number }) const BookEntry = types .model({ book: types.reference(Book) }) .views((self) => ({ get price() { return self.book.price * 2 } })) const Store = types.model({ books: types.array(Book), entries: types.optional(types.array(BookEntry), []) }) const s = Store.create({ books: [{ id: "3", price: 2 }] }) unprotect(s) reaction( () => s.entries.reduce((a, e) => a + e.price, 0), (v) => values.push(v) ) s.entries.push({ book: castToReferenceSnapshot(s.books[0]) }) expect(s.entries[0].price).toBe(4) expect(s.entries.reduce((a, e) => a + e.price, 0)).toBe(4) const entry = BookEntry.create({ book: castToReferenceSnapshot(s.books[0]) }) // can refer to book, even when not part of tree yet expect(getSnapshot(entry)).toEqual({ book: "3" }) s.entries.push(entry) expect(values).toEqual([4, 8]) }) test("identifiers should support subtypes of types.string and types.number", () => { const M = types.model({ id: types.refinement(types.identifierNumber, (n) => n > 5) }) expect(M.is({})).toBe(false) expect(M.is({ id: "test" })).toBe(false) expect(M.is({ id: "6" })).toBe(false) expect(M.is({ id: "4" })).toBe(false) expect(M.is({ id: 6 })).toBe(true) expect(M.is({ id: 4 })).toBe(false) const S = types.model({ mies: types.map(M), ref: types.reference(M) }) const s = S.create({ mies: { "7": { id: 7 } }, ref: "7" }) expect(s.mies.get("7")).toBeTruthy() expect(s.ref).toBe(s.mies.get("7")) }) test("string identifiers should not accept numbers", () => { const F = types.model({ id: types.identifier }) expect(F.is({ id: "4" })).toBe(true) expect(F.is({ id: 4 })).toBe(false) const F2 = types.model({ id: types.identifier }) expect(F2.is({ id: "4" })).toBe(true) expect(F2.is({ id: 4 })).toBe(false) }) test("122 - identifiers should support numbers as well", () => { const F = types.model({ id: types.identifierNumber }) expect( F.create({ id: 3 }).id ).toBe(3) expect(F.is({ id: 4 })).toBe(true) expect(F.is({ id: "4" })).toBe(false) expect(F.is({ id: "bla" })).toBe(false) }) test("self reference with a late type", () => { const Book = types.model("Book", { id: types.identifier, genre: types.string, reference: types.reference(types.late((): IAnyModelType => Book)) }) const Store = types .model("Store", { books: types.array(Book) }) .actions((self) => { function addBook(book: SnapshotOrInstance<typeof Book>) { self.books.push(book) } return { addBook } }) const s = Store.create({ books: [{ id: "1", genre: "thriller", reference: "" }] }) const book2 = Book.create({ id: "2", genre: "romance", reference: castToReferenceSnapshot(s.books[0]) }) s.addBook(book2) expect((s.books[1].reference as Instance<typeof Book>).genre).toBe("thriller") }) test("when applying a snapshot, reference should resolve correctly if value added after", () => { const Box = types.model({ id: types.identifierNumber, name: types.string }) const Factory = types.model({ selected: types.reference(Box), boxes: types.array(Box) }) expect(() => Factory.create({ selected: 1, boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ] }) ).not.toThrow() }) test("it should fail when reference snapshot is ambiguous", () => { const Box = types.model("Box", { id: types.identifierNumber, name: types.string }) const Arrow = types.model("Arrow", { id: types.identifierNumber, name: types.string }) const BoxOrArrow = types.union(Box, Arrow) const Factory = types.model({ selected: types.reference(BoxOrArrow), boxes: types.array(Box), arrows: types.array(Arrow) }) const store = Factory.create({ selected: 2, boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ], arrows: [{ id: 2, name: "arrow" }] }) expect(() => { // tslint:disable-next-line:no-unused-expression store.selected // store.boxes[1] // throws because it can't know if you mean a box or an arrow! }).toThrowError( "[mobx-state-tree] Cannot resolve a reference to type '(Box | Arrow)' with id: '2' unambigously, there are multiple candidates: /boxes/1, /arrows/0" ) unprotect(store) // first update the reference, than create a new matching item! Ref becomes ambigous now... store.selected = 1 as any // valid assignment expect(store.selected).toBe(store.boxes[0]) // unambigous identifier let err!: Error autorun(() => store.selected, { onError(e) { err = e } }) expect(store.selected).toBe(store.boxes[0]) // unambigous identifier store.arrows.push({ id: 1, name: "oops" }) expect(err.message).toBe( "[mobx-state-tree] Cannot resolve a reference to type '(Box | Arrow)' with id: '1' unambigously, there are multiple candidates: /boxes/0, /arrows/1" ) }) test("it should support array of references", () => { const Box = types.model({ id: types.identifierNumber, name: types.string }) const Factory = types.model({ selected: types.array(types.reference(Box)), boxes: types.array(Box) }) const store = Factory.create({ selected: [], boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ] }) unprotect(store) expect(() => { store.selected.push(store.boxes[0]) }).not.toThrow() expect(getSnapshot(store.selected)).toEqual([1]) expect(() => { store.selected.push(store.boxes[1]) }).not.toThrow() expect(getSnapshot(store.selected)).toEqual([1, 2]) }) test("it should restore array of references from snapshot", () => { const Box = types.model({ id: types.identifierNumber, name: types.string }) const Factory = types.model({ selected: types.array(types.reference(Box)), boxes: types.array(Box) }) const store = Factory.create({ selected: [1, 2], boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ] }) unprotect(store) expect(store.selected[0] === store.boxes[0]).toEqual(true) expect(store.selected[1] === store.boxes[1]).toEqual(true) }) test("it should support map of references", () => { const Box = types.model({ id: types.identifierNumber, name: types.string }) const Factory = types.model({ selected: types.map(types.reference(Box)), boxes: types.array(Box) }) const store = Factory.create({ selected: {}, boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ] }) unprotect(store) expect(() => { store.selected.set("from", store.boxes[0]) }).not.toThrow() expect(getSnapshot(store.selected)).toEqual({ from: 1 }) expect(() => { store.selected.set("to", store.boxes[1]) }).not.toThrow() expect(getSnapshot(store.selected)).toEqual({ from: 1, to: 2 }) }) test("it should restore map of references from snapshot", () => { const Box = types.model({ id: types.identifierNumber, name: types.string }) const Factory = types.model({ selected: types.map(types.reference(Box)), boxes: types.array(Box) }) const store = Factory.create({ selected: { from: 1, to: 2 }, boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ] }) unprotect(store) expect(store.selected.get("from") === store.boxes[0]).toEqual(true) expect(store.selected.get("to") === store.boxes[1]).toEqual(true) }) test("it should support relative lookups", () => { const Node = types.model({ id: types.identifierNumber, children: types.optional(types.array(types.late((): IAnyModelType => Node)), []) }) const root = Node.create({ id: 1, children: [ { id: 2, children: [ { id: 4 } ] }, { id: 3 } ] }) unprotect(root) expect(getSnapshot(root)).toEqual({ id: 1, children: [ { id: 2, children: [{ id: 4, children: [] }] }, { id: 3, children: [] } ] }) expect(resolveIdentifier(Node, root, 1)).toBe(root) expect(resolveIdentifier(Node, root, 4)).toBe(root.children[0].children[0]) expect(resolveIdentifier(Node, root.children[0].children[0], 3)).toBe(root.children[1]) const n2 = detach(root.children[0]) unprotect(n2) expect(resolveIdentifier(Node, n2, 2)).toBe(n2) expect(resolveIdentifier(Node, root, 2)).toBe(undefined) expect(resolveIdentifier(Node, root, 4)).toBe(undefined) expect(resolveIdentifier(Node, n2, 3)).toBe(undefined) expect(resolveIdentifier(Node, n2, 4)).toBe(n2.children[0]) expect(resolveIdentifier(Node, n2.children[0], 2)).toBe(n2) const n5 = Node.create({ id: 5 }) expect(resolveIdentifier(Node, n5, 4)).toBe(undefined) n2.children.push(n5) expect(resolveIdentifier(Node, n5, 4)).toBe(n2.children[0]) expect(resolveIdentifier(Node, n2.children[0], 5)).toBe(n5) }) test("References are non-nullable by default", () => { const Todo = types.model({ id: types.identifierNumber }) const Store = types.model({ todo: types.maybe(Todo), ref: types.reference(Todo), maybeRef: types.maybe(types.reference(Todo)) }) expect(Store.is({})).toBe(false) expect(Store.is({ ref: 3 })).toBe(true) expect(Store.is({ ref: null })).toBe(false) expect(Store.is({ ref: undefined })).toBe(false) expect(Store.is({ ref: 3, maybeRef: 3 })).toBe(true) expect(Store.is({ ref: 3, maybeRef: undefined })).toBe(true) let store = Store.create({ todo: { id: 3 }, ref: 3 }) expect(store.ref).toBe(store.todo) expect(store.maybeRef).toBe(undefined) store = Store.create({ todo: { id: 3 }, ref: 4 }) unprotect(store) if (process.env.NODE_ENV !== "production") { expect(store.maybeRef).toBe(undefined) expect(() => store.ref).toThrow( "[mobx-state-tree] Failed to resolve reference '4' to type 'AnonymousModel' (from node: /ref)" ) store.maybeRef = 3 as any // valid assignment expect(store.maybeRef).toBe(store.todo) store.maybeRef = 4 as any // valid assignment expect(() => store.maybeRef).toThrow( "[mobx-state-tree] Failed to resolve reference '4' to type 'AnonymousModel' (from node: /maybeRef)" ) store.maybeRef = undefined expect(store.maybeRef).toBe(undefined) expect(() => ((store as any).ref = undefined)).toThrow(/Error while converting/) } }) test("References are described properly", () => { const Todo = types.model({ id: types.identifierNumber }) const Store = types.model({ todo: types.maybe(Todo), ref: types.reference(Todo), maybeRef: types.maybe(types.reference(Todo)) }) expect(Store.describe()).toBe( "{ todo: ({ id: identifierNumber } | undefined?); ref: reference(AnonymousModel); maybeRef: (reference(AnonymousModel) | undefined?) }" ) }) test("References in recursive structures", () => { const Folder = types.model("Folder", { id: types.identifierNumber, name: types.string, files: types.array(types.string) }) const Tree = types .model("Tree", { // sadly, this becomes any, and further untypeable... children: types.array(types.late((): IAnyModelType => Tree)), data: types.maybeNull(types.reference(Folder)) }) .actions((self) => { function addFolder(data: SnapshotOrInstance<typeof Folder>) { const folder3 = Folder.create(data) getRoot<typeof Storage>(self).putFolderHelper(folder3) self.children.push( Tree.create({ data: castToReferenceSnapshot(folder3), children: [] }) ) } return { addFolder } }) const Storage = types .model("Storage", { objects: types.map(Folder), tree: Tree }) .actions((self) => ({ putFolderHelper(aFolder: SnapshotOrInstance<typeof Folder>) { self.objects.put(aFolder) } })) const store = Storage.create({ objects: {}, tree: { children: [], data: null } }) const folder = { id: 1, name: "Folder 1", files: ["a.jpg", "b.jpg"] } store.tree.addFolder(folder) expect(getSnapshot(store)).toEqual({ objects: { "1": { files: ["a.jpg", "b.jpg"], id: 1, name: "Folder 1" } }, tree: { children: [ { children: [], data: 1 } ], data: null } }) expect(store.objects.get("1")).toBe(store.tree.children[0].data) const folder2 = { id: 2, name: "Folder 2", files: ["c.jpg", "d.jpg"] } store.tree.children[0].addFolder(folder2) expect(getSnapshot(store)).toEqual({ objects: { "1": { files: ["a.jpg", "b.jpg"], id: 1, name: "Folder 1" }, "2": { files: ["c.jpg", "d.jpg"], id: 2, name: "Folder 2" } }, tree: { children: [ { children: [ { children: [], data: 2 } ], data: 1 } ], data: null } }) expect(store.objects.get("1")).toBe(store.tree.children[0].data) expect(store.objects.get("2")).toBe(store.tree.children[0].children[0].data) }) test("it should applyPatch references in array", () => { const Item = types.model("Item", { id: types.identifier, name: types.string }) const Folder = types .model("Folder", { id: types.identifier, objects: types.map(Item), hovers: types.array(types.reference(Item)) }) .actions((self) => { function addObject(anItem: typeof Item.Type) { self.objects.put(anItem) } function addHover(anItem: typeof Item.Type) { self.hovers.push(anItem) } function removeHover(anItem: typeof Item.Type) { self.hovers.remove(anItem) } return { addObject, addHover, removeHover } }) const folder = Folder.create({ id: "folder 1", objects: {}, hovers: [] }) folder.addObject({ id: "item 1", name: "item name 1" }) const item = folder.objects.get("item 1")! const snapshot = getSnapshot(folder) const newStore = Folder.create(snapshot) onPatch(folder, (data) => { applyPatch(newStore, data) }) folder.addHover(item) expect(getSnapshot(newStore)).toEqual({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: ["item 1"] }) folder.removeHover(item) expect(getSnapshot(newStore)).toEqual({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: [] }) }) test("it should applySnapshot references in array", () => { const Item = types.model("Item", { id: types.identifier, name: types.string }) const Folder = types.model("Folder", { id: types.identifier, objects: types.map(Item), hovers: types.array(types.reference(Item)) }) const folder = Folder.create({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: ["item 1"] }) const snapshot = JSON.parse(JSON.stringify(getSnapshot(folder))) expect(snapshot).toEqual({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: ["item 1"] }) snapshot.hovers = [] applySnapshot(folder, snapshot) expect(getSnapshot(folder)).toEqual({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: [] }) snapshot.hovers = ["item 1"] applySnapshot(folder, snapshot) expect(getSnapshot(folder)).toEqual({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: ["item 1"] }) }) test("array of references should work fine", () => { const B = types.model("Block", { id: types.identifier }) const S = types .model("Store", { blocks: types.array(B), blockRefs: types.array(types.reference(B)) }) .actions((self) => { return { order() { const res = self.blockRefs.slice() self.blockRefs.replace([res[1], res[0]]) } } }) const a = S.create({ blocks: [{ id: "1" }, { id: "2" }], blockRefs: ["1", "2"] }) a.order() expect(a.blocks[0].id).toBe("1") expect(a.blockRefs[0].id).toBe("2") }) test("should serialize references correctly", () => { const M = types.model({ id: types.identifierNumber }) const S = types.model({ mies: types.map(M), ref: types.maybe(types.reference(M)) }) const s = S.create({ mies: { 7: { id: 7 } } }) unprotect(s) expect(Array.from(s.mies.keys())).toEqual(["7"]) expect(s.mies.get("7")!.id).toBe(7) expect(s.mies.get(7 as any)).toBe(s.mies.get("7")) // maps automatically normalizes the key s.mies.put({ id: 8 }) expect(Array.from(s.mies.keys())).toEqual(["7", "8"]) s.ref = 8 as any expect(s.ref!.id).toBe(8) // resolved from number expect(getSnapshot(s).ref).toBe(8) // ref serialized as number s.ref = "7" as any // resolved from string expect(s.ref!.id).toBe(7) // resolved from string expect(getSnapshot(s).ref).toBe("7") // ref serialized as string (number would be ok as well) s.ref = s.mies.get("8")! expect(s.ref.id).toBe(8) // resolved from instance expect(getSnapshot(s).ref).toBe(8) // ref serialized as number s.ref = "9" as any // unresolvable expect(getSnapshot(s).ref).toBe("9") // snapshot preserved as it was unresolvable s.mies.set(9 as any, { id: 9 }) expect(Array.from(s.mies.keys())).toEqual(["7", "8", "9"]) expect(s.mies.get("9")!.id).toBe(9) expect(getSnapshot(s).ref).toBe("9") // ref serialized as string (number would be ok as well) }) test("#1052 - Reference returns destroyed model after subtree replacing", () => { const Todo = types.model("Todo", { id: types.identifierNumber, title: types.string }) const Todos = types.model("Todos", { items: types.array(Todo) }) const Store = types .model("Store", { todos: Todos, last: types.maybe(types.reference(Todo)), lastWithId: types.maybe(types.reference(Todo)), counter: -1 }) .actions((self) => ({ load() { self.counter++ self.todos = Todos.create({ items: [ { id: 1, title: "Get Coffee " + self.counter }, { id: 2, title: "Write simpler code " + self.counter } ] }) }, select(todo: Instance<typeof Todo>) { self.last = todo self.lastWithId = todo.id as any } })) const store = Store.create({ todos: {} }) store.load() expect(store.last).toBe(undefined) expect(store.lastWithId).toBe(undefined) const reactionFn = jest.fn() const reactionDisposer = reaction(() => store.last, reactionFn) const reactionFn2 = jest.fn() const reactionDisposer2 = reaction(() => store.lastWithId, reactionFn2) try { store.select(store.todos.items[0]) expect(isAlive(store.last!)).toBe(true) expect(isObservable(store.last)).toBe(true) expect(reactionFn).toHaveBeenCalledTimes(1) expect(store.last!.title).toBe("Get Coffee 0") expect(isAlive(store.lastWithId!)).toBe(true) expect(isObservable(store.lastWithId)).toBe(true) expect(reactionFn2).toHaveBeenCalledTimes(1) expect(store.lastWithId!.title).toBe("Get Coffee 0") store.load() expect(isAlive(store.last!)).toBe(true) expect(isObservable(store.last)).toBe(true) expect(reactionFn).toHaveBeenCalledTimes(2) expect(store.last!.title).toBe("Get Coffee 1") expect(isAlive(store.lastWithId!)).toBe(true) expect(isObservable(store.lastWithId)).toBe(true) expect(reactionFn2).toHaveBeenCalledTimes(2) expect(store.lastWithId!.title).toBe("Get Coffee 1") } finally { reactionDisposer() reactionDisposer2() } }) test("#1080 - does not crash trying to resolve a reference to a destroyed+recreated model", () => { configure({ useProxies: "never" }) const Branch = types.model("Branch", { id: types.identifierNumber, name: types.string }) const User = types.model("User", { id: types.identifierNumber, email: types.maybeNull(types.string), branches: types.maybeNull(types.array(Branch)) }) const BranchStore = types .model("BranchStore", { activeBranch: types.maybeNull(types.reference(Branch)) }) .actions((self) => ({ setActiveBranch(branchId: any) { self.activeBranch = branchId } })) const RootStore = types .model("RootStore", { user: types.maybeNull(User), branchStore: types.maybeNull(BranchStore) }) .actions((self) => ({ setUser(snapshot: typeof userSnapshot) { self.user = cast(snapshot) }, setBranchStore(snapshot: typeof branchStoreSnapshot) { self.branchStore = cast(snapshot) }, destroyUser() { destroy(self.user!) }, destroyBranchStore() { destroy(self.branchStore!) } })) const userSnapshot = { id: 1, email: "test@test.com", branches: [ { id: 1, name: "Branch 1" }, { id: 2, name: "Branch 2" } ] } const branchStoreSnapshot = {} const rootStore = RootStore.create({ user: userSnapshot, branchStore: branchStoreSnapshot }) rootStore.branchStore!.setActiveBranch(1) expect(rootStore.branchStore!.activeBranch).toEqual({ id: 1, name: "Branch 1" }) rootStore.destroyUser() rootStore.destroyBranchStore() rootStore.setUser(userSnapshot) rootStore.setBranchStore(branchStoreSnapshot) rootStore.branchStore!.setActiveBranch(2) expect(rootStore.branchStore!.activeBranch).toEqual({ id: 2, name: "Branch 2" }) }) test("tryReference / isValidReference", () => { const Todo = types.model({ id: types.identifier }) const TodoStore = types .model({ todos: types.array(Todo), ref1: types.maybe(types.reference(Todo)), ref2: types.maybeNull(types.reference(Todo)), ref3: types.maybe(types.reference(Todo)) }) .actions((self) => ({ clearRef3() { self.ref3 = undefined }, afterCreate() { addDisposer( self, reaction( () => isValidReference(() => self.ref3), (valid) => { if (!valid) { this.clearRef3() } }, { fireImmediately: true } ) ) } })) const store = TodoStore.create({ todos: [{ id: "1" }, { id: "2" }, { id: "3" }] }) expect(tryReference(() => store.ref1)).toBeUndefined() expect(tryReference(() => store.ref2)).toBeUndefined() expect(isValidReference(() => store.ref1)).toBe(false) expect(isValidReference(() => store.ref2)).toBe(false) unprotect(store) store.ref1 = store.todos[0] store.ref2 = store.todos[1] store.ref3 = store.todos[2] expect(isStateTreeNode(store.ref1)).toBe(true) expect(isStateTreeNode(store.ref2)).toBe(true) expect(tryReference(() => store.ref1)).toBeDefined() expect(tryReference(() => store.ref2)).toBeDefined() expect(isValidReference(() => store.ref1)).toBe(true) expect(isValidReference(() => store.ref2)).toBe(true) store.todos = cast([]) expect(tryReference(() => store.ref1)).toBeUndefined() expect(tryReference(() => store.ref2)).toBeUndefined() expect(isValidReference(() => store.ref1)).toBe(false) expect(isValidReference(() => store.ref2)).toBe(false) // the reaction should have triggered and set this to undefined expect(store.ref3).toBe(undefined) expect(() => tryReference(() => 5 as any)).toThrowError( "The reference to be checked is not one of node, null or undefined" ) expect(() => isValidReference(() => 5 as any)).toThrowError( "The reference to be checked is not one of node, null or undefined" ) }) test("#1162 - reference to union", () => { const M1 = types.model({ id: types.identifier, type: types.string, sum: types.string }) const M2 = types.model({ id: types.identifier, type: types.string, data: types.string }) const AnyModel = types.union( { dispatcher(snapshot) { switch (snapshot.type) { case "type1": return M1 case "type2": return M2 default: throw new Error() } } }, M1, M2 ) const Store = types.model({ arr: types.array(AnyModel), selected: types.reference(AnyModel) }) const s = Store.create({ selected: "num1", arr: [ { id: "num1", type: "type1", sum: "1" }, { id: "num2", type: "type1", sum: "2" }, { id: "num3", type: "type2", data: "3" } ] }) unprotect(s) expect(s.selected.id).toBe("num1") expect(s.selected.type).toBe("type1") expect((s.selected as Instance<typeof M1>).sum).toBe("1") s.selected = "num2" as any expect(s.selected.id).toBe("num2") expect(s.selected.type).toBe("type1") expect((s.selected as Instance<typeof M1>).sum).toBe("2") s.selected = "num3" as any expect(s.selected.id).toBe("num3") expect(s.selected.type).toBe("type2") expect((s.selected as Instance<typeof M2>).data).toBe("3") })
the_stack
import * as assert from "assert"; // based on file https://github.com/microsoft/vscode/blob/master/src/vs/base/test/common/filters.test.ts /* eslint-disable no-console */ import { createMatches, fuzzyScore, fuzzyScoreGraceful, fuzzyScoreGracefulAggressive, FuzzyScorer, IFilter, IMatch, matchesCamelCase, matchesContiguousSubString, matchesFuzzy, matchesFuzzy2, matchesPrefix, matchesStrictPrefix, matchesSubString, matchesWords, or, } from "../../appui-abstract/utils/filter/filters"; import { equalsIgnoreCase, startsWithIgnoreCase } from "../../appui-abstract/utils/filter/strings"; function filterNotOk(filter: IFilter, word: string, wordToMatchAgainst: string) { assert(!filter(word, wordToMatchAgainst), `${word} matched ${wordToMatchAgainst}`); } function filterOk(filter: IFilter, word: string, wordToMatchAgainst: string, highlights?: IMatch[]) { const r = filter(word, wordToMatchAgainst); assert(r, `${word} didn't match ${wordToMatchAgainst}`); if (highlights) { assert.deepStrictEqual(r, highlights); } } describe("Filters", () => { it("or", () => { let filter: IFilter; let counters: number[]; const newFilter = function (i: number, r: boolean): IFilter { return function (): IMatch[] { counters[i]++; return r as any; }; }; counters = [0, 0]; filter = or(newFilter(0, false), newFilter(1, false)); filterNotOk(filter, "anything", "anything"); assert.deepStrictEqual(counters, [1, 1]); counters = [0, 0]; filter = or(newFilter(0, true), newFilter(1, false)); filterOk(filter, "anything", "anything"); assert.deepStrictEqual(counters, [1, 0]); counters = [0, 0]; filter = or(newFilter(0, true), newFilter(1, true)); filterOk(filter, "anything", "anything"); assert.deepStrictEqual(counters, [1, 0]); counters = [0, 0]; filter = or(newFilter(0, false), newFilter(1, true)); filterOk(filter, "anything", "anything"); assert.deepStrictEqual(counters, [1, 1]); }); it("fuzzySeparateFilter", function () { const fuzzySeparateFilter = or(matchesPrefix, matchesCamelCase, matchesSubString); filterOk(fuzzySeparateFilter, "", ""); filterOk(fuzzySeparateFilter, "", "anything", []); filterOk(fuzzySeparateFilter, "alpha", "alpha", [{ start: 0, end: 5 }]); filterOk(fuzzySeparateFilter, "alpha", "alphasomething", [{ start: 0, end: 5 }]); filterNotOk(fuzzySeparateFilter, "alpha", "alp"); filterOk(fuzzySeparateFilter, "a", "alpha", [{ start: 0, end: 1 }]); filterNotOk(fuzzySeparateFilter, "x", "alpha"); filterOk(fuzzySeparateFilter, "A", "alpha", [{ start: 0, end: 1 }]); filterOk(fuzzySeparateFilter, "AlPh", "alPHA", [{ start: 0, end: 4 }]); filterOk(fuzzySeparateFilter, "gp", "Git: Pull", [{ start: 0, end: 1 }, { start: 5, end: 6 }]); filterOk(fuzzySeparateFilter, "g p", "Git: Pull", [{ start: 0, end: 1 }, { start: 4, end: 6 }]); filterOk(fuzzySeparateFilter, "gipu", "Git: Pull", [{ start: 0, end: 2 }, { start: 5, end: 7 }]); filterOk(fuzzySeparateFilter, "gp", "Category: Git: Pull", [{ start: 10, end: 11 }, { start: 15, end: 16 }]); filterOk(fuzzySeparateFilter, "g p", "Category: Git: Pull", [{ start: 10, end: 11 }, { start: 14, end: 16 }]); filterOk(fuzzySeparateFilter, "gipu", "Category: Git: Pull", [{ start: 10, end: 12 }, { start: 15, end: 17 }]); }); it("PrefixFilter - case sensitive", function () { filterNotOk(matchesStrictPrefix, "", ""); filterOk(matchesStrictPrefix, "", "anything", []); filterOk(matchesStrictPrefix, "alpha", "alpha", [{ start: 0, end: 5 }]); filterOk(matchesStrictPrefix, "alpha", "alphasomething", [{ start: 0, end: 5 }]); filterNotOk(matchesStrictPrefix, "alpha", "alp"); filterOk(matchesStrictPrefix, "a", "alpha", [{ start: 0, end: 1 }]); filterNotOk(matchesStrictPrefix, "x", "alpha"); filterNotOk(matchesStrictPrefix, "A", "alpha"); filterNotOk(matchesStrictPrefix, "AlPh", "alPHA"); }); it("PrefixFilter - ignore case", function () { filterOk(matchesPrefix, "alpha", "alpha", [{ start: 0, end: 5 }]); filterOk(matchesPrefix, "alpha", "alphasomething", [{ start: 0, end: 5 }]); filterNotOk(matchesPrefix, "alpha", "alp"); filterOk(matchesPrefix, "a", "alpha", [{ start: 0, end: 1 }]); filterOk(matchesPrefix, "ä", "Älpha", [{ start: 0, end: 1 }]); filterNotOk(matchesPrefix, "x", "alpha"); filterOk(matchesPrefix, "A", "alpha", [{ start: 0, end: 1 }]); filterOk(matchesPrefix, "AlPh", "alPHA", [{ start: 0, end: 4 }]); filterNotOk(matchesPrefix, "T", "4"); // see https://github.com/Microsoft/vscode/issues/22401 }); it("CamelCaseFilter", () => { filterNotOk(matchesCamelCase, "", ""); filterOk(matchesCamelCase, "", "anything", []); filterOk(matchesCamelCase, "alpha", "alpha", [{ start: 0, end: 5 }]); filterOk(matchesCamelCase, "AlPhA", "alpha", [{ start: 0, end: 5 }]); filterOk(matchesCamelCase, "alpha", "alphasomething", [{ start: 0, end: 5 }]); filterNotOk(matchesCamelCase, "alpha", "alp"); filterOk(matchesCamelCase, "c", "CamelCaseRocks", [ { start: 0, end: 1 }, ]); filterOk(matchesCamelCase, "cc", "CamelCaseRocks", [ { start: 0, end: 1 }, { start: 5, end: 6 }, ]); filterOk(matchesCamelCase, "ccr", "CamelCaseRocks", [ { start: 0, end: 1 }, { start: 5, end: 6 }, { start: 9, end: 10 }, ]); filterOk(matchesCamelCase, "cacr", "CamelCaseRocks", [ { start: 0, end: 2 }, { start: 5, end: 6 }, { start: 9, end: 10 }, ]); filterOk(matchesCamelCase, "cacar", "CamelCaseRocks", [ { start: 0, end: 2 }, { start: 5, end: 7 }, { start: 9, end: 10 }, ]); filterOk(matchesCamelCase, "ccarocks", "CamelCaseRocks", [ { start: 0, end: 1 }, { start: 5, end: 7 }, { start: 9, end: 14 }, ]); filterOk(matchesCamelCase, "cr", "CamelCaseRocks", [ { start: 0, end: 1 }, { start: 9, end: 10 }, ]); filterOk(matchesCamelCase, "fba", "FooBarAbe", [ { start: 0, end: 1 }, { start: 3, end: 5 }, ]); filterOk(matchesCamelCase, "fbar", "FooBarAbe", [ { start: 0, end: 1 }, { start: 3, end: 6 }, ]); filterOk(matchesCamelCase, "fbara", "FooBarAbe", [ { start: 0, end: 1 }, { start: 3, end: 7 }, ]); filterOk(matchesCamelCase, "fbaa", "FooBarAbe", [ { start: 0, end: 1 }, { start: 3, end: 5 }, { start: 6, end: 7 }, ]); filterOk(matchesCamelCase, "fbaab", "FooBarAbe", [ { start: 0, end: 1 }, { start: 3, end: 5 }, { start: 6, end: 8 }, ]); filterOk(matchesCamelCase, "c2d", "canvasCreation2D", [ { start: 0, end: 1 }, { start: 14, end: 16 }, ]); filterOk(matchesCamelCase, "cce", "_canvasCreationEvent", [ { start: 1, end: 2 }, { start: 7, end: 8 }, { start: 15, end: 16 }, ]); }); it("CamelCaseFilter - #19256", function () { assert(matchesCamelCase("Debug Console", "Open: Debug Console")); assert(matchesCamelCase("Debug console", "Open: Debug Console")); assert(matchesCamelCase("debug console", "Open: Debug Console")); }); it("matchesContiguousSubString", () => { filterOk(matchesContiguousSubString, "cela", "cancelAnimationFrame()", [ { start: 3, end: 7 }, ]); }); it("matchesSubString", () => { filterOk(matchesSubString, "cmm", "cancelAnimationFrame()", [ { start: 0, end: 1 }, { start: 9, end: 10 }, { start: 18, end: 19 }, ]); filterOk(matchesSubString, "abc", "abcabc", [ { start: 0, end: 3 }, ]); filterOk(matchesSubString, "abc", "aaabbbccc", [ { start: 0, end: 1 }, { start: 3, end: 4 }, { start: 6, end: 7 }, ]); }); it("matchesSubString performance (#35346)", function () { filterNotOk(matchesSubString, "aaaaaaaaaaaaaaaaaaaax", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); }); it("WordFilter", () => { filterOk(matchesWords, "alpha", "alpha", [{ start: 0, end: 5 }]); filterOk(matchesWords, "alpha", "alphasomething", [{ start: 0, end: 5 }]); filterNotOk(matchesWords, "alpha", "alp"); filterOk(matchesWords, "a", "alpha", [{ start: 0, end: 1 }]); filterNotOk(matchesWords, "x", "alpha"); filterOk(matchesWords, "A", "alpha", [{ start: 0, end: 1 }]); filterOk(matchesWords, "AlPh", "alPHA", [{ start: 0, end: 4 }]); assert(matchesWords("Debug Console", "Open: Debug Console")); filterOk(matchesWords, "gp", "Git: Pull", [{ start: 0, end: 1 }, { start: 5, end: 6 }]); filterOk(matchesWords, "g p", "Git: Pull", [{ start: 0, end: 1 }, { start: 3, end: 4 }, { start: 5, end: 6 }]); filterOk(matchesWords, "gipu", "Git: Pull", [{ start: 0, end: 2 }, { start: 5, end: 7 }]); filterOk(matchesWords, "gp", "Category: Git: Pull", [{ start: 10, end: 11 }, { start: 15, end: 16 }]); filterOk(matchesWords, "g p", "Category: Git: Pull", [{ start: 10, end: 11 }, { start: 13, end: 14 }, { start: 15, end: 16 }]); filterOk(matchesWords, "gipu", "Category: Git: Pull", [{ start: 10, end: 12 }, { start: 15, end: 17 }]); filterNotOk(matchesWords, "it", "Git: Pull"); filterNotOk(matchesWords, "ll", "Git: Pull"); filterOk(matchesWords, "git: プル", "git: プル", [{ start: 0, end: 7 }]); filterOk(matchesWords, "git プル", "git: プル", [{ start: 0, end: 4 }, { start: 5, end: 7 }]); filterOk(matchesWords, "öäk", "Öhm: Älles Klar", [{ start: 0, end: 1 }, { start: 5, end: 6 }, { start: 11, end: 12 }]); // assert.ok(matchesWords('gipu', 'Category: Git: Pull', true) === null); // assert.deepEqual(matchesWords('pu', 'Category: Git: Pull', true), [{ start: 15, end: 17 }]); filterOk(matchesWords, "bar", "foo-bar"); filterOk(matchesWords, "bar test", "foo-bar test"); filterOk(matchesWords, "fbt", "foo-bar test"); filterOk(matchesWords, "bar test", "foo-bar (test)"); filterOk(matchesWords, "foo bar", "foo (bar)"); filterNotOk(matchesWords, "bar est", "foo-bar test"); filterNotOk(matchesWords, "fo ar", "foo-bar test"); filterNotOk(matchesWords, "for", "foo-bar test"); filterOk(matchesWords, "foo bar", "foo-bar"); filterOk(matchesWords, "foo bar", "123 foo-bar 456"); filterOk(matchesWords, "foo+bar", "foo-bar"); filterOk(matchesWords, "foo-bar", "foo bar"); filterOk(matchesWords, "foo:bar", "foo:bar"); }); function assertMatches(pattern: string, word: string, decoratedWord: string | undefined, filter: FuzzyScorer, opts: { patternPos?: number, wordPos?: number, firstMatchCanBeWeak?: boolean } = {}) { const r = filter(pattern, pattern.toLowerCase(), opts.patternPos || 0, word, word.toLowerCase(), opts.wordPos || 0, opts.firstMatchCanBeWeak || false); assert.ok(!decoratedWord === !r); if (r) { const matches = createMatches(r); let actualWord = ""; let pos = 0; for (const match of matches) { actualWord += word.substring(pos, match.start); actualWord += `^${word.substring(match.start, match.end).split("").join("^")}`; pos = match.end; } actualWord += word.substring(pos); assert.strictEqual(actualWord, decoratedWord); } } it("fuzzyScore, #23215", function () { assertMatches("tit", "win.tit", "win.^t^i^t", fuzzyScore); assertMatches("title", "win.title", "win.^t^i^t^l^e", fuzzyScore); assertMatches("WordCla", "WordCharacterClassifier", "^W^o^r^dCharacter^C^l^assifier", fuzzyScore); assertMatches("WordCCla", "WordCharacterClassifier", "^W^o^r^d^Character^C^l^assifier", fuzzyScore); }); it("fuzzyScore, #23332", function () { assertMatches("dete", '"editor.quickSuggestionsDelay"', undefined, fuzzyScore); }); it("fuzzyScore, #23190", function () { assertMatches("c:\\do", "& 'C:\\Documents and Settings'", "& '^C^:^\\^D^ocuments and Settings'", fuzzyScore); assertMatches("c:\\do", "& 'c:\\Documents and Settings'", "& '^c^:^\\^D^ocuments and Settings'", fuzzyScore); }); it("fuzzyScore, #23581", function () { assertMatches("close", "css.lint.importStatement", "^css.^lint.imp^ort^Stat^ement", fuzzyScore); assertMatches("close", "css.colorDecorators.enable", "^css.co^l^orDecorator^s.^enable", fuzzyScore); assertMatches("close", "workbench.quickOpen.closeOnFocusOut", "workbench.quickOpen.^c^l^o^s^eOnFocusOut", fuzzyScore); assertTopScore(fuzzyScore, "close", 2, "css.lint.importStatement", "css.colorDecorators.enable", "workbench.quickOpen.closeOnFocusOut"); }); it("fuzzyScore, #23458", function () { assertMatches("highlight", "editorHoverHighlight", "editorHover^H^i^g^h^l^i^g^h^t", fuzzyScore); assertMatches("hhighlight", "editorHoverHighlight", "editor^Hover^H^i^g^h^l^i^g^h^t", fuzzyScore); assertMatches("dhhighlight", "editorHoverHighlight", undefined, fuzzyScore); }); it("fuzzyScore, #23746", function () { assertMatches("-moz", "-moz-foo", "^-^m^o^z-foo", fuzzyScore); assertMatches("moz", "-moz-foo", "-^m^o^z-foo", fuzzyScore); assertMatches("moz", "-moz-animation", "-^m^o^z-animation", fuzzyScore); assertMatches("moza", "-moz-animation", "-^m^o^z-^animation", fuzzyScore); }); it("fuzzyScore", () => { assertMatches("ab", "abA", "^a^bA", fuzzyScore); assertMatches("ccm", "cacmelCase", "^ca^c^melCase", fuzzyScore); assertMatches("bti", "the_black_knight", undefined, fuzzyScore); assertMatches("ccm", "camelCase", undefined, fuzzyScore); assertMatches("cmcm", "camelCase", undefined, fuzzyScore); assertMatches("BK", "the_black_knight", "the_^black_^knight", fuzzyScore); assertMatches("KeyboardLayout=", "KeyboardLayout", undefined, fuzzyScore); assertMatches("LLL", "SVisualLoggerLogsList", "SVisual^Logger^Logs^List", fuzzyScore); assertMatches("LLLL", "SVilLoLosLi", undefined, fuzzyScore); assertMatches("LLLL", "SVisualLoggerLogsList", undefined, fuzzyScore); assertMatches("TEdit", "TextEdit", "^Text^E^d^i^t", fuzzyScore); assertMatches("TEdit", "TextEditor", "^Text^E^d^i^tor", fuzzyScore); assertMatches("TEdit", "Textedit", "^T^exte^d^i^t", fuzzyScore); assertMatches("TEdit", "text_edit", "^text_^e^d^i^t", fuzzyScore); assertMatches("TEditDit", "TextEditorDecorationType", "^Text^E^d^i^tor^Decorat^ion^Type", fuzzyScore); assertMatches("TEdit", "TextEditorDecorationType", "^Text^E^d^i^torDecorationType", fuzzyScore); assertMatches("Tedit", "TextEdit", "^Text^E^d^i^t", fuzzyScore); assertMatches("ba", "?AB?", undefined, fuzzyScore); assertMatches("bkn", "the_black_knight", "the_^black_^k^night", fuzzyScore); assertMatches("bt", "the_black_knight", "the_^black_knigh^t", fuzzyScore); assertMatches("ccm", "camelCasecm", "^camel^Casec^m", fuzzyScore); assertMatches("fdm", "findModel", "^fin^d^Model", fuzzyScore); assertMatches("fob", "foobar", "^f^oo^bar", fuzzyScore); assertMatches("fobz", "foobar", undefined, fuzzyScore); assertMatches("foobar", "foobar", "^f^o^o^b^a^r", fuzzyScore); assertMatches("form", "editor.formatOnSave", "editor.^f^o^r^matOnSave", fuzzyScore); assertMatches("g p", "Git: Pull", "^Git:^ ^Pull", fuzzyScore); assertMatches("g p", "Git: Pull", "^Git:^ ^Pull", fuzzyScore); assertMatches("gip", "Git: Pull", "^G^it: ^Pull", fuzzyScore); assertMatches("gip", "Git: Pull", "^G^it: ^Pull", fuzzyScore); assertMatches("gp", "Git: Pull", "^Git: ^Pull", fuzzyScore); assertMatches("gp", "Git_Git_Pull", "^Git_Git_^Pull", fuzzyScore); assertMatches("is", "ImportStatement", "^Import^Statement", fuzzyScore); assertMatches("is", "isValid", "^i^sValid", fuzzyScore); assertMatches("lowrd", "lowWord", "^l^o^wWo^r^d", fuzzyScore); assertMatches("myvable", "myvariable", "^m^y^v^aria^b^l^e", fuzzyScore); assertMatches("no", "", undefined, fuzzyScore); assertMatches("no", "match", undefined, fuzzyScore); assertMatches("ob", "foobar", undefined, fuzzyScore); assertMatches("sl", "SVisualLoggerLogsList", "^SVisual^LoggerLogsList", fuzzyScore); assertMatches("sllll", "SVisualLoggerLogsList", "^SVisua^l^Logger^Logs^List", fuzzyScore); assertMatches("Three", "HTMLHRElement", undefined, fuzzyScore); assertMatches("Three", "Three", "^T^h^r^e^e", fuzzyScore); assertMatches("fo", "barfoo", undefined, fuzzyScore); assertMatches("fo", "bar_foo", "bar_^f^oo", fuzzyScore); assertMatches("fo", "bar_Foo", "bar_^F^oo", fuzzyScore); assertMatches("fo", "bar foo", "bar ^f^oo", fuzzyScore); assertMatches("fo", "bar.foo", "bar.^f^oo", fuzzyScore); assertMatches("fo", "bar/foo", "bar/^f^oo", fuzzyScore); assertMatches("fo", "bar\\foo", "bar\\^f^oo", fuzzyScore); }); it("fuzzyScore (first match can be weak)", function () { assertMatches("Three", "HTMLHRElement", "H^TML^H^R^El^ement", fuzzyScore, { firstMatchCanBeWeak: true }); assertMatches("tor", "constructor", "construc^t^o^r", fuzzyScore, { firstMatchCanBeWeak: true }); assertMatches("ur", "constructor", "constr^ucto^r", fuzzyScore, { firstMatchCanBeWeak: true }); assertTopScore(fuzzyScore, "tor", 2, "constructor", "Thor", "cTor"); }); it("fuzzyScore, many matches", function () { assertMatches( "aaaaaa", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "^a^a^a^a^a^aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", fuzzyScore ); }); it("Freeze when fjfj -> jfjf, https://github.com/microsoft/vscode/issues/91807", function () { assertMatches( "jfjfj", "fjfjfjfjfjfjfjfjfjfjfj", undefined, fuzzyScore ); assertMatches( "jfjfjfjfjfjfjfjfjfj", "fjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfj", undefined, fuzzyScore ); assertMatches( "jfjfjfjfjfjfjfjfjfjjfjfjfjfjfjfjfjfjfjjfjfjfjfjfjfjfjfjfjjfjfjfjfjfjfjfjfjfjjfjfjfjfjfjfjfjfjfjjfjfjfjfjfjfjfjfjfj", "fjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfj", undefined, fuzzyScore ); assertMatches( "jfjfjfjfjfjfjfjfjfj", "fJfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfj", "f^J^f^j^f^j^f^j^f^j^f^j^f^j^f^j^f^j^f^jfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfj", // strong match fuzzyScore ); assertMatches( "jfjfjfjfjfjfjfjfjfj", "fjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfj", "f^j^f^j^f^j^f^j^f^j^f^j^f^j^f^j^f^j^f^jfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfjfj", // any match fuzzyScore, { firstMatchCanBeWeak: true } ); }); it("fuzzyScore, issue #26423", function () { assertMatches("baba", "abababab", undefined, fuzzyScore); assertMatches( "fsfsfs", "dsafdsafdsafdsafdsafdsafdsafasdfdsa", undefined, fuzzyScore ); assertMatches( "fsfsfsfsfsfsfsf", "dsafdsafdsafdsafdsafdsafdsafasdfdsafdsafdsafdsafdsfdsafdsfdfdfasdnfdsajfndsjnafjndsajlknfdsa", undefined, fuzzyScore ); }); it("Fuzzy IntelliSense matching vs Haxe metadata completion, #26995", function () { assertMatches("f", ":Foo", ":^Foo", fuzzyScore); assertMatches("f", ":foo", ":^foo", fuzzyScore); }); it("Separator only match should not be weak #79558", function () { assertMatches(".", "foo.bar", "foo^.bar", fuzzyScore); }); it("Cannot set property '1' of undefined, #26511", function () { const word = new Array<void>(123).join("a"); const pattern = new Array<void>(120).join("a"); fuzzyScore(pattern, pattern.toLowerCase(), 0, word, word.toLowerCase(), 0, false); assert.ok(true); // must not explode }); it("Vscode 1.12 no longer obeys 'sortText' in completion items (from language server), #26096", function () { assertMatches(" ", " group", undefined, fuzzyScore, { patternPos: 2 }); assertMatches(" g", " group", " ^group", fuzzyScore, { patternPos: 2 }); assertMatches("g", " group", " ^group", fuzzyScore); assertMatches("g g", " groupGroup", undefined, fuzzyScore); assertMatches("g g", " group Group", " ^group^ ^Group", fuzzyScore); assertMatches(" g g", " group Group", " ^group^ ^Group", fuzzyScore, { patternPos: 1 }); assertMatches("zz", "zzGroup", "^z^zGroup", fuzzyScore); assertMatches("zzg", "zzGroup", "^z^z^Group", fuzzyScore); assertMatches("g", "zzGroup", "zz^Group", fuzzyScore); }); it("patternPos isn't working correctly #79815", function () { assertMatches(":p".substr(1), "prop", "^prop", fuzzyScore, { patternPos: 0 }); assertMatches(":p", "prop", "^prop", fuzzyScore, { patternPos: 1 }); assertMatches(":p", "prop", undefined, fuzzyScore, { patternPos: 2 }); assertMatches(":p", "proP", "pro^P", fuzzyScore, { patternPos: 1, wordPos: 1 }); assertMatches(":p", "aprop", "a^prop", fuzzyScore, { patternPos: 1, firstMatchCanBeWeak: true }); assertMatches(":p", "aprop", undefined, fuzzyScore, { patternPos: 1, firstMatchCanBeWeak: false }); }); function assertTopScore(filter: typeof fuzzyScore, pattern: string, expected: number, ...words: string[]) { let topScore = -(100 * 10); let topIdx = 0; for (let i = 0; i < words.length; i++) { const word = words[i]; const m = filter(pattern, pattern.toLowerCase(), 0, word, word.toLowerCase(), 0, false); if (m) { const [score] = m; if (score > topScore) { topScore = score; topIdx = i; } } } assert.strictEqual(topIdx, expected, `${pattern} -> actual=${words[topIdx]} <> expected=${words[expected]}`); } it("topScore - fuzzyScore", function () { assertTopScore(fuzzyScore, "cons", 2, "ArrayBufferConstructor", "Console", "console"); assertTopScore(fuzzyScore, "Foo", 1, "foo", "Foo", "foo"); // #24904 assertTopScore(fuzzyScore, "onMess", 1, "onmessage", "onMessage", "onThisMegaEscape"); assertTopScore(fuzzyScore, "CC", 1, "camelCase", "CamelCase"); assertTopScore(fuzzyScore, "cC", 0, "camelCase", "CamelCase"); // assertTopScore(fuzzyScore, 'cC', 1, 'ccfoo', 'camelCase'); // assertTopScore(fuzzyScore, 'cC', 1, 'ccfoo', 'camelCase', 'foo-cC-bar'); // issue #17836 // assertTopScore(fuzzyScore, 'TEdit', 1, 'TextEditorDecorationType', 'TextEdit', 'TextEditor'); assertTopScore(fuzzyScore, "p", 4, "parse", "posix", "pafdsa", "path", "p"); assertTopScore(fuzzyScore, "pa", 0, "parse", "pafdsa", "path"); // issue #14583 assertTopScore(fuzzyScore, "log", 3, "HTMLOptGroupElement", "ScrollLogicalPosition", "SVGFEMorphologyElement", "log", "logger"); assertTopScore(fuzzyScore, "e", 2, "AbstractWorker", "ActiveXObject", "else"); // issue #14446 assertTopScore(fuzzyScore, "workbench.sideb", 1, "workbench.editor.defaultSideBySideLayout", "workbench.sideBar.location"); // issue #11423 assertTopScore(fuzzyScore, "editor.r", 2, "diffEditor.renderSideBySide", "editor.overviewRulerlanes", "editor.renderControlCharacter", "editor.renderWhitespace"); // assertTopScore(fuzzyScore, 'editor.R', 1, 'diffEditor.renderSideBySide', 'editor.overviewRulerlanes', 'editor.renderControlCharacter', 'editor.renderWhitespace'); // assertTopScore(fuzzyScore, 'Editor.r', 0, 'diffEditor.renderSideBySide', 'editor.overviewRulerlanes', 'editor.renderControlCharacter', 'editor.renderWhitespace'); assertTopScore(fuzzyScore, "-mo", 1, "-ms-ime-mode", "-moz-columns"); // // dupe, issue #14861 assertTopScore(fuzzyScore, "convertModelPosition", 0, "convertModelPositionToViewPosition", "convertViewToModelPosition"); // // dupe, issue #14942 assertTopScore(fuzzyScore, "is", 0, "isValidViewletId", "import statement"); assertTopScore(fuzzyScore, "title", 1, "files.trimTrailingWhitespace", "window.title"); assertTopScore(fuzzyScore, "const", 1, "constructor", "const", "cuOnstrul"); }); it("Unexpected suggestion scoring, #28791", function () { assertTopScore(fuzzyScore, "_lines", 1, "_lineStarts", "_lines"); assertTopScore(fuzzyScore, "_lines", 1, "_lineS", "_lines"); assertTopScore(fuzzyScore, "_lineS", 0, "_lineS", "_lines"); }); it("HTML closing tag proposal filtered out #38880", function () { assertMatches("\t\t<", "\t\t</body>", "^\t^\t^</body>", fuzzyScore, { patternPos: 0 }); assertMatches("\t\t<", "\t\t</body>", "\t\t^</body>", fuzzyScore, { patternPos: 2 }); assertMatches("\t<", "\t</body>", "\t^</body>", fuzzyScore, { patternPos: 1 }); }); it("fuzzyScoreGraceful", () => { assertMatches("rlut", "result", undefined, fuzzyScore); assertMatches("rlut", "result", "^res^u^l^t", fuzzyScoreGraceful); assertMatches("cno", "console", "^co^ns^ole", fuzzyScore); assertMatches("cno", "console", "^co^ns^ole", fuzzyScoreGraceful); assertMatches("cno", "console", "^c^o^nsole", fuzzyScoreGracefulAggressive); assertMatches("cno", "co_new", "^c^o_^new", fuzzyScoreGraceful); assertMatches("cno", "co_new", "^c^o_^new", fuzzyScoreGracefulAggressive); }); it("List highlight filter: Not all characters from match are highlighterd #66923", () => { assertMatches("foo", "barbarbarbarbarbarbarbarbarbarbarbarbarbarbarbar_foo", "barbarbarbarbarbarbarbarbarbarbarbarbarbarbarbar_^f^o^o", fuzzyScore); }); it("Autocompletion is matched against truncated filterText to 54 characters #74133", () => { assertMatches( "foo", "ffffffffffffffffffffffffffffbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbar_foo", "ffffffffffffffffffffffffffffbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbar_^f^o^o", fuzzyScore ); assertMatches( "foo", "Gffffffffffffffffffffffffffffbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbarbar_foo", undefined, fuzzyScore ); }); it('"Go to Symbol" with the exact method name doesn\'t work as expected #84787', function () { const match = fuzzyScore(":get", ":get", 1, "get", "get", 0, true); assert.ok(Boolean(match)); }); it("Suggestion is not highlighted #85826", function () { assertMatches("SemanticTokens", "SemanticTokensEdits", "^S^e^m^a^n^t^i^c^T^o^k^e^n^sEdits", fuzzyScore); assertMatches("SemanticTokens", "SemanticTokensEdits", "^S^e^m^a^n^t^i^c^T^o^k^e^n^sEdits", fuzzyScoreGracefulAggressive); }); const patterns = ["ecti", "Element", "element", "Head", "head"]; const data = [ "HTMLAllCollection","HTMLAnchorElement","HTMLAppletElement","HTMLAreaElement","HTMLAreasCollection", "HTMLAudioElement","HTMLBRElement","HTMLBaseElement","HTMLBaseFontElement","HTMLBodyElement","HTMLBodyElementEventMap","HTMLButtonElement","HTMLCanvasElement", "HTMLCollection","HTMLCollectionBase","HTMLCollectionOf","HTMLDListElement","HTMLDataElement","HTMLDataListElement","HTMLDirectoryElement","HTMLDivElement","HTMLDocument", "HTMLElement","HTMLElementEventMap","HTMLElementTagNameMap","HTMLEmbedElement","HTMLFieldSetElement","HTMLFontElement","HTMLFormControlsCollection","HTMLFormElement", "HTMLFrameElement","HTMLFrameElementEventMap","HTMLFrameSetElement","HTMLFrameSetElementEventMap","HTMLHRElement","HTMLHeadElement","HTMLHeadingElement","HTMLHtmlElement", "HTMLIFrameElement","HTMLIFrameElementEventMap","HTMLImageElement","HTMLInputElement","HTMLLIElement","HTMLLabelElement","HTMLLegendElement","HTMLLinkElement", "HTMLMapElement","HTMLMarqueeElement","HTMLMarqueeElementEventMap","HTMLMediaElement","HTMLMediaElementEventMap","HTMLMenuElement","HTMLMetaElement","HTMLMeterElement", "HTMLModElement","HTMLOListElement","HTMLObjectElement","HTMLOptGroupElement","HTMLOptionElement","HTMLOptionsCollection","HTMLOutputElement","HTMLParagraphElement", "HTMLParamElement","HTMLPictureElement","HTMLPreElement","HTMLProgressElement","HTMLQuoteElement","HTMLScriptElement","HTMLSelectElement","HTMLSlotElement", "HTMLSourceElement","HTMLSpanElement","HTMLStyleElement","HTMLTableAlignment","HTMLTableCaptionElement","HTMLTableCellElement","HTMLTableColElement", "HTMLTableDataCellElement","HTMLTableElement","HTMLTableHeaderCellElement","HTMLTableRowElement","HTMLTableSectionElement","HTMLTemplateElement", "HTMLTextAreaElement","HTMLTimeElement","HTMLTitleElement","HTMLTrackElement","HTMLUListElement","HTMLUnknownElement","HTMLVideoElement","HTMLVideoElementEventMap", ]; function countMatches(match: IFilter, testPatterns: string[]) { let count = 0; for (const pattern of testPatterns) { for (const item of data) { count += (match(pattern, item)?.length??0); } } return count; } it("matches fuzzy simple", function () { filterOk(matchesFuzzy, "par", "HTMLParagraphElement", [{start:4,end:7}]); }); it("matches fuzzy", function () { const matches = countMatches(matchesFuzzy, patterns); assert.strictEqual(matches, 178); }); it("matches fuzzy2 simple", function () { filterOk(matchesFuzzy2, "tab", "HTMLLabelElement", [{start:1,end:2},{start:5,end:7}]); filterOk(matchesFuzzy2, "tab", "HTMLTableSectionElement", [{start:4,end:7}]); }); it("matches fuzzy2", function () { const matches = countMatches(matchesFuzzy2, patterns); assert.strictEqual(matches, 184); }); describe("string tests", () => { it("startsWithIgnoreCase", function () { assert.ok(startsWithIgnoreCase("A", "a") === true); assert.ok(startsWithIgnoreCase("B", "a") === false); assert.ok(startsWithIgnoreCase("short", "veryLong") === false); }); it("equalsIgnoreCase", function () { assert.ok(equalsIgnoreCase("A", "a") === true); assert.ok(equalsIgnoreCase("BBBBB", "bbbbb") === true); assert.ok(equalsIgnoreCase("BbbBB", "bBBbb") === true); assert.ok(equalsIgnoreCase("dog", "cat") === false); }); it("matchesWords returns null", function () { assert.ok(matchesWords("A", "") === null); }); it("matchesCamelCase returns null", function () { assert.ok(matchesCamelCase("A", "") === null); assert.ok(matchesCamelCase("A", " ") === null); assert.ok(matchesCamelCase("A", "aAaAaAaAaAaAaAaA") === null); assert.ok(matchesCamelCase("A", "NOTACAMELCASEWORD") === null); assert.ok(matchesCamelCase("A", "This-Word-is-over-sixty-characters-long-and-will-not-be-processed-too-long") === null); }); }); });
the_stack
declare class AbortController { constructor(); readonly signal: AbortSignal; abort(): void; } declare class AbortSignal extends EventTarget { constructor(); static abort(): AbortSignal; readonly aborted: boolean; } interface BasicImageTransformations { /** * Maximum width in image pixels. The value must be an integer. */ width?: number; /** * Maximum height in image pixels. The value must be an integer. */ height?: number; /** * Resizing mode as a string. It affects interpretation of width and height * options: * - scale-down: Similar to contain, but the image is never enlarged. If * the image is larger than given width or height, it will be resized. * Otherwise its original size will be kept. * - contain: Resizes to maximum size that fits within the given width and * height. If only a single dimension is given (e.g. only width), the * image will be shrunk or enlarged to exactly match that dimension. * Aspect ratio is always preserved. * - cover: Resizes (shrinks or enlarges) to fill the entire area of width * and height. If the image has an aspect ratio different from the ratio * of width and height, it will be cropped to fit. * - crop: The image will shrunk and cropped to fit within the area * specified by width and height. The image won’t be enlarged. For images * smaller than the given dimensions it’s the same as scale-down. For * images larger than the given dimensions, it’s the same as cover. * - pad: Resizes to the maximum size that fits within the given width and * height, and then fills the remaining area with a background color * (white by default). Use of this mode is not recommended, as the same * effect can be more efficiently achieved with the contain mode and the * CSS object-fit: contain property. */ fit?: "scale-down" | "contain" | "cover" | "crop" | "pad"; /** * When cropping with fit: "cover", this defines the side or point that should * be left uncropped. The value is either a string * "left", "right", "top", "bottom" or "center" (the default), * or an object {x, y} containing focal point coordinates in the original * image expressed as fractions ranging from 0.0 (top or left) to 1.0 * (bottom or right), 0.5 being the center. {fit: "cover", gravity: "top"} will * crop bottom or left and right sides as necessary, but won’t crop anything * from the top. {fit: "cover", gravity: {x:0.5, y:0.2}} will crop each side to * preserve as much as possible around a point at 20% of the height of the * source image. */ gravity?: "left" | "right" | "top" | "bottom" | "center" | BasicImageTransformationsGravityCoordinates; /** * Background color to add underneath the image. Applies only to images with * transparency (such as PNG). Accepts any CSS color (#RRGGBB, rgba(…), * hsl(…), etc.) */ background?: string; /** * Number of degrees (90, 180, 270) to rotate the image by. width and height * options refer to axes after rotation. */ rotate?: 0 | 90 | 180 | 270 | 360; } interface BasicImageTransformationsGravityCoordinates { x: number; y: number; } declare class Blob { constructor(bits?: BlobBits, options?: BlobOptions); readonly size: number; readonly type: string; slice(start?: number, end?: number, type?: string): Blob; arrayBuffer(): Promise<ArrayBuffer>; text(): Promise<string>; stream(): ReadableStream; } declare type BlobBits = (ArrayBuffer | string | Blob)[]; interface BlobOptions { type?: string; } declare abstract class Body { readonly body: ReadableStream | null; readonly bodyUsed: boolean; arrayBuffer(): Promise<ArrayBuffer>; text(): Promise<string>; json<T>(): Promise<T>; formData(): Promise<FormData>; blob(): Promise<Blob>; } declare type BodyInitializer = ReadableStream | string | ArrayBuffer | Blob | URLSearchParams | FormData; declare abstract class Cache { delete(request: Request | string, options?: CacheQueryOptions): Promise<boolean>; match(request: Request | string, options?: CacheQueryOptions): Promise<Response | undefined>; put(request: Request | string, response: Response): Promise<void>; } interface CacheQueryOptions { ignoreMethod?: boolean; } declare abstract class CacheStorage { open(cacheName: string): Promise<Cache>; readonly default: Cache; } interface CfRequestInitializerDict extends Omit<RequestInitializerDict, "cf"> { cf?: RequestInitCfProperties; } declare class CloseEvent extends Event { constructor(type: string, initializer: CloseEventInitializer); readonly code: number; readonly reason: string; readonly wasClean: boolean; } interface CloseEventInitializer { code?: number; reason?: string; wasClean?: boolean; } declare abstract class Comment { text: string; readonly removed: boolean; before(content: Content, options?: ContentOptions): Comment; after(content: Content, options?: ContentOptions): Comment; replace(content: Content, options?: ContentOptions): Comment; remove(): Comment; } interface Console { debug(...data: any[]): void; error(...data: any[]): void; info(...data: any[]): void; log(...data: any[]): void; warn(...data: any[]): void; } declare type Content = string | ReadableStream | Response; interface ContentOptions { html?: boolean; } declare abstract class Crypto { readonly subtle: SubtleCrypto; getRandomValues(buffer: ArrayBufferView): ArrayBufferView; randomUUID(): string; } declare abstract class CryptoKey { readonly type: string; readonly extractable: boolean; readonly algorithm: CryptoKeyAlgorithmVariant; readonly usages: string[]; } interface CryptoKeyAesKeyAlgorithm { name: string; length: number; } declare type CryptoKeyAlgorithmVariant = CryptoKeyKeyAlgorithm | CryptoKeyAesKeyAlgorithm | CryptoKeyHmacKeyAlgorithm | CryptoKeyRsaKeyAlgorithm | CryptoKeyEllipticKeyAlgorithm | CryptoKeyVoprfKeyAlgorithm | CryptoKeyOprfKeyAlgorithm; interface CryptoKeyEllipticKeyAlgorithm { name: string; namedCurve: string; } interface CryptoKeyHmacKeyAlgorithm { name: string; hash: CryptoKeyKeyAlgorithm; length: number; } interface CryptoKeyKeyAlgorithm { name: string; } interface CryptoKeyOprfKeyAlgorithm { name: string; namedCurve: string; } interface CryptoKeyPair { publicKey: CryptoKey; privateKey: CryptoKey; } interface CryptoKeyRsaKeyAlgorithm { name: string; modulusLength: number; publicExponent: ArrayBuffer; hash?: CryptoKeyKeyAlgorithm; } interface CryptoKeyVoprfKeyAlgorithm { name: string; hash: CryptoKeyKeyAlgorithm; namedCurve: string; } declare class DOMException extends Error { constructor(message?: string, name?: string); readonly code: number; static readonly INDEX_SIZE_ERR: number; static readonly DOMSTRING_SIZE_ERR: number; static readonly HIERARCHY_REQUEST_ERR: number; static readonly WRONG_DOCUMENT_ERR: number; static readonly INVALID_CHARACTER_ERR: number; static readonly NO_DATA_ALLOWED_ERR: number; static readonly NO_MODIFICATION_ALLOWED_ERR: number; static readonly NOT_FOUND_ERR: number; static readonly NOT_SUPPORTED_ERR: number; static readonly INUSE_ATTRIBUTE_ERR: number; static readonly INVALID_STATE_ERR: number; static readonly SYNTAX_ERR: number; static readonly INVALID_MODIFICATION_ERR: number; static readonly NAMESPACE_ERR: number; static readonly INVALID_ACCESS_ERR: number; static readonly VALIDATION_ERR: number; static readonly TYPE_MISMATCH_ERR: number; static readonly SECURITY_ERR: number; static readonly NETWORK_ERR: number; static readonly ABORT_ERR: number; static readonly URL_MISMATCH_ERR: number; static readonly QUOTA_EXCEEDED_ERR: number; static readonly TIMEOUT_ERR: number; static readonly INVALID_NODE_TYPE_ERR: number; static readonly DATA_CLONE_ERR: number; } declare abstract class Doctype { readonly name: string | null; readonly publicId: string | null; readonly systemId: string | null; } declare abstract class DocumentEnd { append(content: Content, options?: ContentOptions): DocumentEnd; } interface DurableObject { fetch(request: Request): Promise<Response>; } declare abstract class DurableObjectId { toString(): string; equals(other: DurableObjectId): boolean; readonly name?: string; } declare abstract class DurableObjectNamespace { newUniqueId(options?: DurableObjectNamespaceNewUniqueIdOptions): DurableObjectId; idFromName(name: string): DurableObjectId; idFromString(id: string): DurableObjectId; get(id: DurableObjectId): DurableObjectStub; } interface DurableObjectNamespaceNewUniqueIdOptions { jurisdiction?: string; } declare abstract class DurableObjectState { waitUntil(promise: Promise<void>): void; readonly id: DurableObjectId | string; readonly storage?: DurableObjectStorage; blockConcurrencyWhile<T>(callback: () => Promise<T>): Promise<T>; } declare abstract class DurableObjectStorage { get<T = unknown>(key: string, options?: DurableObjectStorageOperationsGetOptions): Promise<T | undefined>; get<T = unknown>(keys: string[], options?: DurableObjectStorageOperationsGetOptions): Promise<Map<string, T>>; list<T = unknown>(options?: DurableObjectStorageOperationsListOptions): Promise<Map<string, T>>; put<T>(key: string, value: T, options?: DurableObjectStorageOperationsPutOptions): Promise<void>; put<T>(entries: Record<string, T>, options?: DurableObjectStorageOperationsPutOptions): Promise<void>; delete(key: string, options?: DurableObjectStorageOperationsPutOptions): Promise<boolean>; delete(keys: string[], options?: DurableObjectStorageOperationsPutOptions): Promise<number>; deleteAll(options?: DurableObjectStorageOperationsPutOptions): Promise<void>; transaction<T>(closure: (txn: DurableObjectTransaction) => Promise<T>): Promise<T>; } interface DurableObjectStorageOperationsGetOptions { allowConcurrency?: boolean; noCache?: boolean; } interface DurableObjectStorageOperationsListOptions { start?: string; end?: string; prefix?: string; reverse?: boolean; limit?: number; allowConcurrency?: boolean; noCache?: boolean; } interface DurableObjectStorageOperationsPutOptions { allowConcurrency?: boolean; allowUnconfirmed?: boolean; noCache?: boolean; } declare abstract class DurableObjectStub extends Fetcher { readonly id: DurableObjectId; readonly name?: string; } declare abstract class DurableObjectTransaction { get<T = unknown>(key: string, options?: DurableObjectStorageOperationsGetOptions): Promise<T>; get<T = unknown>(keys: string[], options?: DurableObjectStorageOperationsGetOptions): Promise<Map<string, T>>; list<T = unknown>(options?: DurableObjectStorageOperationsListOptions): Promise<Map<string, T>>; put<T>(key: string, value: T, options?: DurableObjectStorageOperationsPutOptions): Promise<void>; put<T>(entries: Record<string, T>, options?: DurableObjectStorageOperationsPutOptions): Promise<void>; delete(key: string, options?: DurableObjectStorageOperationsPutOptions): Promise<boolean>; delete(keys: string[], options?: DurableObjectStorageOperationsPutOptions): Promise<number>; rollback(): void; } declare abstract class Element { tagName: string; readonly attributes: IterableIterator<string[]>; readonly removed: boolean; readonly namespaceURI: string; getAttribute(name: string): string | null; hasAttribute(name: string): boolean; setAttribute(name: string, value: string): Element; removeAttribute(name: string): Element; before(content: Content, options?: ContentOptions): Element; after(content: Content, options?: ContentOptions): Element; prepend(content: Content, options?: ContentOptions): Element; append(content: Content, options?: ContentOptions): Element; replace(content: Content, options?: ContentOptions): Element; remove(): Element; removeAndKeepContent(): Element; setInnerContent(content: Content, options?: ContentOptions): Element; } declare class Event { constructor(type: string, init?: EventInit); readonly type: string; readonly eventPhase: number; readonly composed: boolean; readonly bubbles: boolean; readonly cancelable: boolean; readonly defaultPrevented: boolean; readonly returnValue: boolean; readonly currentTarget?: EventTarget; readonly srcElement?: EventTarget; readonly timeStamp: number; readonly isTrusted: boolean; cancelBubble: boolean; stopImmediatePropagation(): void; preventDefault(): void; stopPropagation(): void; composedPath(): EventTarget[]; static readonly NONE: number; static readonly CAPTURING_PHASE: number; static readonly AT_TARGET: number; static readonly BUBBLING_PHASE: number; } interface EventInit { bubbles?: boolean; cancelable?: boolean; composed?: boolean; } declare type EventListener<EventType extends Event = Event> = (event: EventType) => void; interface EventListenerObject<EventType extends Event = Event> { handleEvent(event: EventType): void; } declare type EventListenerOrEventListenerObject<EventType extends Event = Event> = EventListener<EventType> | EventListenerObject<EventType>; declare class EventTarget<EventMap extends Record<string, Event> = Record<string, Event>> { constructor(); addEventListener<Type extends keyof EventMap>(type: Type, handler: EventListenerOrEventListenerObject<EventMap[Type]>, options?: EventTargetAddEventListenerOptions | boolean): void; removeEventListener<Type extends keyof EventMap>(type: Type, handler: EventListenerOrEventListenerObject<EventMap[Type]>, options?: EventTargetEventListenerOptions | boolean): void; dispatchEvent(event: EventMap[keyof EventMap]): boolean; } interface EventTargetAddEventListenerOptions { capture?: boolean; passive?: boolean; once?: boolean; } interface EventTargetEventListenerOptions { capture?: boolean; } declare abstract class ExecutionContext { waitUntil(promise: Promise<void>): void; passThroughOnException(): void; } interface ExportedHandler<Env = unknown> { fetch?: ExportedHandlerFetchHandler<Env>; scheduled?: ExportedHandlerScheduledHandler<Env>; } declare type ExportedHandlerFetchHandler<Env = unknown> = (request: Request, env: Env, ctx: ExecutionContext) => Response | Promise<Response>; declare type ExportedHandlerScheduledHandler<Env = unknown> = (controller: ScheduledController, env: Env, ctx: ExecutionContext) => void | Promise<void>; declare class FetchEvent extends Event { constructor(type: string); readonly request: Request; respondWith(promise: Response | Promise<Response>): void; passThroughOnException(): void; waitUntil(promise: Promise<void>): void; } declare abstract class Fetcher { fetch(requestOrUrl: Request | string, requestInit?: RequestInitializerDict | Request): Promise<Response>; } declare class File extends Blob { constructor(bits?: BlobBits, name?: string, options?: FileOptions); readonly name: string; readonly lastModified: number; } interface FileOptions { type?: string; lastModified?: number; } declare class FixedLengthStream extends TransformStream { constructor(expectedLength: number); } declare class FormData { constructor(); append(name: string, value: string): void; append(name: string, value: Blob, filename?: string): void; delete(name: string): void; get(name: string): File | string | null; getAll(name: string): (File | string)[]; has(name: string): boolean; set(name: string, value: string): void; set(name: string, value: Blob, filename?: string): void; entries(): IterableIterator<([key: string, value: File | string])[]>; keys(): IterableIterator<string>; values(): IterableIterator<File | string>; forEach<This = unknown>(callback: (this: This, key: string, value: File | string, parent: FormData) => void, thisArg?: This): void; [Symbol.iterator](): IterableIterator<([key: string, value: File | string])[]>; } declare class HTMLRewriter { constructor(); on(selector: string, handlers: HTMLRewriterElementContentHandlers): HTMLRewriter; onDocument(handlers: HTMLRewriterDocumentContentHandlers): HTMLRewriter; transform(response: Response): Response; } interface HTMLRewriterDocumentContentHandlers { doctype?(doctype: Doctype): void | Promise<void>; comments?(comment: Comment): void | Promise<void>; text?(text: Text): void | Promise<void>; end?(end: DocumentEnd): void | Promise<void>; } interface HTMLRewriterElementContentHandlers { element?(element: Element): void | Promise<void>; comments?(comment: Comment): void | Promise<void>; text?(text: Text): void | Promise<void>; } declare class Headers { constructor(init?: HeadersInitializer); get(name: string): string | null; getAll(name: string): string[]; has(name: string): boolean; set(name: string, value: string): void; append(name: string, value: string): void; delete(name: string): void; forEach<This = unknown>(callback: (this: This, key: string, value: string, parent: Headers) => void, thisArg?: This): void; entries(): IterableIterator<[key: string, value: string]>; keys(): IterableIterator<string>; values(): IterableIterator<string>; [Symbol.iterator](): IterableIterator<[key: string, value: string]>; } declare type HeadersInitializer = Headers | Record<string, string> | ([key: string, value: string])[]; /** * In addition to the properties on the standard Request object, * the cf object contains extra information about the request provided * by Cloudflare's edge. * * Note: Currently, settings in the cf object cannot be accessed in the * playground. */ interface IncomingRequestCfProperties { /** * (e.g. 395747) */ asn: number; botManagement?: IncomingRequestCfPropertiesBotManagement; city?: string; clientTcpRtt: number; clientTrustScore?: number; /** * The three-letter airport code of the data center that the request * hit. (e.g. "DFW") */ colo: string; continent?: string; /** * The two-letter country code in the request. This is the same value * as that provided in the CF-IPCountry header. (e.g. "US") */ country: string; httpProtocol: string; latitude?: string; longitude?: string; /** * DMA metro code from which the request was issued, e.g. "635" */ metroCode?: string; postalCode?: string; /** * e.g. "Texas" */ region?: string; /** * e.g. "TX" */ regionCode?: string; /** * e.g. "weight=256;exclusive=1" */ requestPriority: string; /** * e.g. "America/Chicago" */ timezone?: string; tlsVersion: string; tlsCipher: string; tlsClientAuth: IncomingRequestCfPropertiesTLSClientAuth; } interface IncomingRequestCfPropertiesBotManagement { score: number; staticResource: boolean; verifiedBot: boolean; } interface IncomingRequestCfPropertiesTLSClientAuth { certIssuerDNLegacy: string; certIssuerDN: string; certPresented: "0" | "1"; certSubjectDNLegacy: string; certSubjectDN: string; /** * In format "Dec 22 19:39:00 2018 GMT" */ certNotBefore: string; /** * In format "Dec 22 19:39:00 2018 GMT" */ certNotAfter: string; certSerial: string; certFingerprintSHA1: string; /** * "SUCCESS", "FAILED:reason", "NONE" */ certVerified: string; } /** * Workers KV is a global, low-latency, key-value data store. It supports exceptionally high read volumes with low-latency, * making it possible to build highly dynamic APIs and websites which respond as quickly as a cached static file would. */ declare abstract class KVNamespace { get(key: string, options?: Partial<KVNamespaceGetOptions<undefined>>): Promise<string | null>; get(key: string, type: "text"): Promise<string | null>; get<ExpectedValue = unknown>(key: string, type: "json"): Promise<ExpectedValue | null>; get(key: string, type: "arrayBuffer"): Promise<ArrayBuffer | null>; get(key: string, type: "stream"): Promise<ReadableStream | null>; get(key: string, options: KVNamespaceGetOptions<"text">): Promise<string | null>; get<ExpectedValue = unknown>(key: string, options: KVNamespaceGetOptions<"json">): Promise<ExpectedValue | null>; get(key: string, options: KVNamespaceGetOptions<"arrayBuffer">): Promise<ArrayBuffer | null>; get(key: string, options: KVNamespaceGetOptions<"stream">): Promise<ReadableStream | null>; list<Metadata = unknown>(options?: KVNamespaceListOptions): Promise<KVNamespaceListResult<Metadata>>; /** * Creates a new key-value pair, or updates the value for a particular key. * @param key key to associate with the value. A key cannot be empty, `.` or `..`. All other keys are valid. * @param value value to store. The type is inferred. The maximum size of a value is 25MB. * @returns Returns a `Promise` that you should `await` on in order to verify a successful update. * @example * await NAMESPACE.put(key, value) */ put(key: string, value: string | ArrayBuffer | ArrayBufferView | ReadableStream, options?: KVNamespacePutOptions): Promise<void>; getWithMetadata<Metadata = unknown>(key: string, options?: Partial<KVNamespaceGetOptions<undefined>>): Promise<KVNamespaceGetWithMetadataResult<string, Metadata>>; getWithMetadata<Metadata = unknown>(key: string, type: "text"): Promise<KVNamespaceGetWithMetadataResult<string, Metadata>>; getWithMetadata<ExpectedValue = unknown, Metadata = unknown>(key: string, type: "json"): Promise<KVNamespaceGetWithMetadataResult<ExpectedValue, Metadata>>; getWithMetadata<Metadata = unknown>(key: string, type: "arrayBuffer"): Promise<KVNamespaceGetWithMetadataResult<ArrayBuffer, Metadata>>; getWithMetadata<Metadata = unknown>(key: string, type: "stream"): Promise<KVNamespaceGetWithMetadataResult<ReadableStream, Metadata>>; getWithMetadata<Metadata = unknown>(key: string, options: KVNamespaceGetOptions<"text">): Promise<KVNamespaceGetWithMetadataResult<string, Metadata>>; getWithMetadata<ExpectedValue = unknown, Metadata = unknown>(key: string, options: KVNamespaceGetOptions<"json">): Promise<KVNamespaceGetWithMetadataResult<ExpectedValue, Metadata>>; getWithMetadata<Metadata = unknown>(key: string, options: KVNamespaceGetOptions<"arrayBuffer">): Promise<KVNamespaceGetWithMetadataResult<ArrayBuffer, Metadata>>; getWithMetadata<Metadata = unknown>(key: string, options: KVNamespaceGetOptions<"stream">): Promise<KVNamespaceGetWithMetadataResult<ReadableStream, Metadata>>; delete(name: string): Promise<void>; } interface KVNamespaceGetOptions<Type> { type: Type; cacheTtl?: number; } interface KVNamespaceGetWithMetadataResult<Value, Metadata> { value: Value | null; metadata: Metadata | null; } interface KVNamespaceListKey<Metadata> { name: string; expiration?: number; metadata?: Metadata; } interface KVNamespaceListOptions { limit?: number; prefix?: string | null; cursor?: string | null; } interface KVNamespaceListResult<Metadata> { keys: KVNamespaceListKey<Metadata>[]; list_complete: boolean; cursor?: string; } interface KVNamespacePutOptions { expiration?: number; expirationTtl?: number; metadata?: any | null; } declare class MessageEvent extends Event { constructor(type: string, initializer: MessageEventInitializer); readonly data: ArrayBuffer | string; } interface MessageEventInitializer { data: ArrayBuffer | string; } declare abstract class ReadableStream { readonly locked: boolean; cancel(reason?: any): Promise<void>; getReader(options?: ReadableStreamGetReaderOptions): ReadableStreamReader; pipeThrough(transform: ReadableStreamTransform, options?: ReadableStreamPipeToOptions): ReadableStream; pipeTo(destination: WritableStream, options?: ReadableStreamPipeToOptions): Promise<void>; tee(): [ReadableStream, ReadableStream]; } interface ReadableStreamGetReaderOptions { mode?: string; } interface ReadableStreamPipeToOptions { preventClose?: boolean; preventAbort?: boolean; preventCancel?: boolean; } declare abstract class ReadableStreamReader { readonly closed: Promise<void>; cancel(reason?: any): Promise<void>; read(byobBuffer?: ArrayBufferView): Promise<ReadableStreamReaderReadResult>; releaseLock(): void; } interface ReadableStreamReaderReadResult { value?: any; done: boolean; } interface ReadableStreamTransform { writable: WritableStream; readable: ReadableStream; } declare class Request extends Body { constructor(input: Request | string, init?: RequestInitializerDict | Request); clone(): Request; readonly method: string; readonly url: string; readonly headers: Headers; readonly redirect: string; readonly fetcher: Fetcher | null; readonly signal: AbortSignal; readonly cf?: IncomingRequestCfProperties; } /** * In addition to the properties you can set in the RequestInit dict * that you pass as an argument to the Request constructor, you can * set certain properties of a `cf` object to control how Cloudflare * features are applied to that new Request. * * Note: Currently, these properties cannot be tested in the * playground. */ interface RequestInitCfProperties { cacheEverything?: boolean; /** * A request's cache key is what determines if two requests are * "the same" for caching purposes. If a request has the same cache key * as some previous request, then we can serve the same cached response for * both. (e.g. 'some-key') * * Only available for Enterprise customers. */ cacheKey?: string; /** * Force response to be cached for a given number of seconds. (e.g. 300) */ cacheTtl?: number; /** * Force response to be cached for a given number of seconds based on the Origin status code. * (e.g. { '200-299': 86400, '404': 1, '500-599': 0 }) */ cacheTtlByStatus?: Record<string, number>; scrapeShield?: boolean; apps?: boolean; image?: RequestInitCfPropertiesImage; minify?: RequestInitCfPropertiesImageMinify; mirage?: boolean; /** * Redirects the request to an alternate origin server. You can use this, * for example, to implement load balancing across several origins. * (e.g.us-east.example.com) * * Note - For security reasons, the hostname set in resolveOverride must * be proxied on the same Cloudflare zone of the incoming request. * Otherwise, the setting is ignored. CNAME hosts are allowed, so to * resolve to a host under a different domain or a DNS only domain first * declare a CNAME record within your own zone’s DNS mapping to the * external hostname, set proxy on Cloudflare, then set resolveOverride * to point to that CNAME record. */ resolveOverride?: string; } interface RequestInitCfPropertiesImage extends BasicImageTransformations { /** * Device Pixel Ratio. Default 1. Multiplier for width/height that makes it * easier to specify higher-DPI sizes in <img srcset>. */ dpr?: number; /** * Quality setting from 1-100 (useful values are in 60-90 range). Lower values * make images look worse, but load faster. The default is 85. It applies only * to JPEG and WebP images. It doesn’t have any effect on PNG. */ quality?: number; /** * Output format to generate. It can be: * - avif: generate images in AVIF format. * - webp: generate images in Google WebP format. Set quality to 100 to get * the WebP-lossless format. * - json: instead of generating an image, outputs information about the * image, in JSON format. The JSON object will contain image size * (before and after resizing), source image’s MIME type, file size, etc. */ format?: "avif" | "webp" | "json"; /** * What EXIF data should be preserved in the output image. Note that EXIF * rotation and embedded color profiles are always applied ("baked in" into * the image), and aren't affected by this option. Note that if the Polish * feature is enabled, all metadata may have been removed already and this * option may have no effect. * - keep: Preserve most of EXIF metadata, including GPS location if there's * any. * - copyright: Only keep the copyright tag, and discard everything else. * This is the default behavior for JPEG files. * - none: Discard all invisible EXIF metadata. Currently WebP and PNG * output formats always discard metadata. */ metadata?: "keep" | "copyright" | "none"; /** * Overlays are drawn in the order they appear in the array (last array * entry is the topmost layer). */ draw?: RequestInitCfPropertiesImageDraw[]; } interface RequestInitCfPropertiesImageDraw extends BasicImageTransformations { /** * Absolute URL of the image file to use for the drawing. It can be any of * the supported file formats. For drawing of watermarks or non-rectangular * overlays we recommend using PNG or WebP images. */ url: string; /** * Floating-point number between 0 (transparent) and 1 (opaque). * For example, opacity: 0.5 makes overlay semitransparent. */ opacity?: number; /** * - If set to true, the overlay image will be tiled to cover the entire * area. This is useful for stock-photo-like watermarks. * - If set to "x", the overlay image will be tiled horizontally only * (form a line). * - If set to "y", the overlay image will be tiled vertically only * (form a line). */ repeat?: true | "x" | "y"; /** * Position of the overlay image relative to a given edge. Each property is * an offset in pixels. 0 aligns exactly to the edge. For example, left: 10 * positions left side of the overlay 10 pixels from the left edge of the * image it's drawn over. bottom: 0 aligns bottom of the overlay with bottom * of the background image. * * Setting both left & right, or both top & bottom is an error. * * If no position is specified, the image will be centered. */ top?: number; left?: number; bottom?: number; right?: number; } interface RequestInitCfPropertiesImageMinify { javascript?: boolean; css?: boolean; html?: boolean; } interface RequestInitializerDict { method?: string; headers?: HeadersInitializer; body?: BodyInitializer | null; redirect?: string; fetcher?: Fetcher | null; /** * cf is a union of these two types because there are multiple * scenarios in which it might be one or the other. * * IncomingRequestCfProperties is required to allow * new Request(someUrl, event.request) * * RequestInitCfProperties is required to allow * new Request(event.request, {cf: { ... } }) * fetch(someUrl, {cf: { ... } }) */ cf?: IncomingRequestCfProperties | RequestInitCfProperties; signal?: AbortSignal | null; } declare class Response extends Body { constructor(bodyInit?: BodyInitializer | null, maybeInit?: ResponseInitializerDict | Response); static redirect(url: string, status?: number): Response; clone(): Response; readonly status: number; readonly statusText: string; readonly headers: Headers; readonly ok: boolean; readonly redirected: boolean; readonly url: string; readonly webSocket: WebSocket | null; readonly cf?: Object; } interface ResponseInitializerDict { status?: number; statusText?: string; headers?: HeadersInitializer; cf?: Object; webSocket?: WebSocket | null; encodeBody?: string; } declare abstract class ScheduledController { readonly scheduledTime: number; readonly cron: string; noRetry(): void; } declare class ScheduledEvent extends Event { constructor(type: string); readonly scheduledTime: number; readonly cron: string; noRetry(): void; waitUntil(promise: Promise<void>): void; } declare class ServiceWorkerGlobalScope extends WorkerGlobalScope { constructor(); static readonly DOMException: typeof DOMException; static readonly WorkerGlobalScope: typeof WorkerGlobalScope; btoa(data: string): string; atob(data: string): string; setTimeout<Args extends any[]>(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; clearTimeout(timeoutId: number | null): void; setInterval<Args extends any[]>(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; clearInterval(timeoutId: number | null): void; queueMicrotask(task: Function): void; fetch(request: Request | string, requestInitr?: RequestInitializerDict | Request): Promise<Response>; readonly self: ServiceWorkerGlobalScope; readonly crypto: Crypto; readonly caches: CacheStorage; static readonly Event: typeof Event; static readonly FetchEvent: typeof FetchEvent; static readonly ScheduledEvent: typeof ScheduledEvent; static readonly MessageEvent: typeof MessageEvent; static readonly CloseEvent: typeof CloseEvent; static readonly ReadableStream: typeof ReadableStream; static readonly WritableStream: typeof WritableStream; static readonly TransformStream: typeof TransformStream; static readonly Headers: typeof Headers; static readonly Body: typeof Body; static readonly Request: typeof Request; static readonly Response: typeof Response; static readonly WebSocket: typeof WebSocket; static readonly WebSocketPair: typeof WebSocketPair; static readonly AbortController: typeof AbortController; static readonly AbortSignal: typeof AbortSignal; static readonly TextDecoder: typeof TextDecoder; static readonly TextEncoder: typeof TextEncoder; static readonly URL: typeof URL; static readonly URLSearchParams: typeof URLSearchParams; static readonly Blob: typeof Blob; static readonly File: typeof File; static readonly FormData: typeof FormData; static readonly Crypto: typeof Crypto; static readonly SubtleCrypto: typeof SubtleCrypto; static readonly CryptoKey: typeof CryptoKey; static readonly CacheStorage: typeof CacheStorage; static readonly Cache: typeof Cache; static readonly FixedLengthStream: typeof FixedLengthStream; static readonly HTMLRewriter: typeof HTMLRewriter; readonly console: Console; } interface StreamQueuingStrategy { highWaterMark: number; size(arg1: any): number; } declare abstract class SubtleCrypto { encrypt(algorithm: string | SubtleCryptoEncryptAlgorithm, key: CryptoKey, plainText: ArrayBuffer): Promise<ArrayBuffer>; decrypt(algorithm: string | SubtleCryptoEncryptAlgorithm, key: CryptoKey, cipherText: ArrayBuffer): Promise<ArrayBuffer>; sign(algorithm: string | SubtleCryptoSignAlgorithm, key: CryptoKey, data: ArrayBuffer): Promise<ArrayBuffer>; verify(algorithm: string | SubtleCryptoSignAlgorithm, key: CryptoKey, signature: ArrayBuffer, data: ArrayBuffer): Promise<boolean>; digest(algorithm: string | SubtleCryptoHashAlgorithm, data: ArrayBuffer): Promise<ArrayBuffer>; generateKey(algorithm: string | SubtleCryptoGenerateKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise<CryptoKey | CryptoKeyPair>; deriveKey(algorithm: string | SubtleCryptoDeriveKeyAlgorithm, baseKey: CryptoKey, derivedKeyAlgorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise<CryptoKey>; deriveBits(algorithm: string | SubtleCryptoDeriveKeyAlgorithm, baseKey: CryptoKey, length: number | null): Promise<ArrayBuffer>; importKey(format: string, keyData: ArrayBuffer | SubtleCryptoJsonWebKey, algorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise<CryptoKey>; exportKey(format: string, key: CryptoKey): Promise<ArrayBuffer | SubtleCryptoJsonWebKey>; wrapKey(format: string, key: CryptoKey, wrappingKey: CryptoKey, wrapAlgorithm: string | SubtleCryptoEncryptAlgorithm): Promise<ArrayBuffer>; unwrapKey(format: string, wrappedKey: ArrayBuffer, unwrappingKey: CryptoKey, unwrapAlgorithm: string | SubtleCryptoEncryptAlgorithm, unwrappedKeyAlgorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise<CryptoKey>; } interface SubtleCryptoDeriveKeyAlgorithm { name: string; salt?: ArrayBuffer; iterations?: number; hash?: string | SubtleCryptoHashAlgorithm; public?: CryptoKey; info?: ArrayBuffer; } interface SubtleCryptoEncryptAlgorithm { name: string; iv?: ArrayBuffer; additionalData?: ArrayBuffer; tagLength?: number; counter?: ArrayBuffer; length?: number; label?: ArrayBuffer; } interface SubtleCryptoGenerateKeyAlgorithm { name: string; hash?: string | SubtleCryptoHashAlgorithm; modulusLength?: number; publicExponent?: ArrayBuffer; length?: number; namedCurve?: string; } interface SubtleCryptoHashAlgorithm { name: string; } interface SubtleCryptoImportKeyAlgorithm { name: string; hash?: string | SubtleCryptoHashAlgorithm; length?: number; namedCurve?: string; compressed?: boolean; } interface SubtleCryptoJsonWebKey { kty: string; use?: string; key_ops?: string[]; alg?: string; ext?: boolean; crv?: string; x?: string; y?: string; d?: string; n?: string; e?: string; p?: string; q?: string; dp?: string; dq?: string; qi?: string; oth?: SubtleCryptoJsonWebKeyRsaOtherPrimesInfo[]; k?: string; } interface SubtleCryptoJsonWebKeyRsaOtherPrimesInfo { r?: string; d?: string; t?: string; } interface SubtleCryptoSignAlgorithm { name: string; hash?: string | SubtleCryptoHashAlgorithm; dataLength?: number; saltLength?: number; } declare abstract class Text { readonly text: string; readonly lastInTextNode: boolean; readonly removed: boolean; before(content: Content, options?: ContentOptions): Text; after(content: Content, options?: ContentOptions): Text; replace(content: Content, options?: ContentOptions): Text; remove(): Text; } declare class TextDecoder { constructor(label?: "utf-8" | "utf8" | "unicode-1-1-utf-8", options?: TextDecoderConstructorOptions); decode(input?: ArrayBuffer, options?: TextDecoderDecodeOptions): string; readonly encoding: string; readonly fatal: boolean; readonly ignoreBOM: boolean; } interface TextDecoderConstructorOptions { fatal: boolean; ignoreBOM: boolean; } interface TextDecoderDecodeOptions { stream: boolean; } declare class TextEncoder { constructor(); encode(input?: string): Uint8Array; encodeInto(input: string, buffer: Uint8Array): TextEncoderEncodeIntoResult; readonly encoding: string; } interface TextEncoderEncodeIntoResult { read: number; written: number; } declare class TransformStream { constructor(); readonly readable: ReadableStream; readonly writable: WritableStream; } declare class URL { constructor(url: string, base?: string); href: string; readonly origin: string; protocol: string; username: string; password: string; host: string; hostname: string; port: string; pathname: string; search: string; readonly searchParams: URLSearchParams; hash: string; toString(): string; toJSON(): string; } declare class URLSearchParams { constructor(init?: URLSearchParamsInitializer); append(name: string, value: string): void; delete(name: string): void; get(name: string): string | null; getAll(name: string): string[]; has(name: string): boolean; set(name: string, value: string): void; sort(): void; entries(): IterableIterator<[key: string, value: string]>; keys(): IterableIterator<string>; values(): IterableIterator<string>; forEach<This = unknown>(callback: (this: This, key: string, value: string, parent: URLSearchParams) => void, thisArg?: This): void; [Symbol.iterator](): IterableIterator<[key: string, value: string]>; toString(): string; } declare type URLSearchParamsInitializer = URLSearchParams | string | Record<string, string> | ([key: string, value: string])[]; declare abstract class WebSocket extends EventTarget<WebSocketEventMap> { accept(): void; send(message: ArrayBuffer | string): void; close(code?: number, reason?: string): void; } declare type WebSocketEventMap = { close: CloseEvent; message: MessageEvent; }; declare const WebSocketPair: { new(): { 0: WebSocket; 1: WebSocket; }; }; declare class WorkerGlobalScope extends EventTarget<WorkerGlobalScopeEventMap> { constructor(); static readonly EventTarget: typeof EventTarget; } declare type WorkerGlobalScopeEventMap = { fetch: FetchEvent; scheduled: ScheduledEvent; }; declare abstract class WritableStream { readonly locked: boolean; abort(reason: any): Promise<void>; getWriter(): WritableStreamWriter; } declare abstract class WritableStreamWriter { readonly closed: Promise<void>; readonly desiredSize: number | null; abort(reason: any): Promise<void>; close(): Promise<void>; write(chunk: any): Promise<void>; releaseLock(): void; } declare function addEventListener<Type extends keyof WorkerGlobalScopeEventMap>(type: Type, handler: EventListenerOrEventListenerObject<WorkerGlobalScopeEventMap[Type]>, options?: EventTargetAddEventListenerOptions | boolean): void; declare function atob(data: string): string; declare function btoa(data: string): string; declare const caches: CacheStorage; declare function clearInterval(timeoutId: number | null): void; declare function clearTimeout(timeoutId: number | null): void; declare const console: Console; declare const crypto: Crypto; declare function dispatchEvent(event: WorkerGlobalScopeEventMap[keyof WorkerGlobalScopeEventMap]): boolean; declare function fetch(request: Request | string, requestInitr?: RequestInitializerDict | Request): Promise<Response>; declare function queueMicrotask(task: Function): void; declare function removeEventListener<Type extends keyof WorkerGlobalScopeEventMap>(type: Type, handler: EventListenerOrEventListenerObject<WorkerGlobalScopeEventMap[Type]>, options?: EventTargetEventListenerOptions | boolean): void; declare const self: ServiceWorkerGlobalScope; declare function setInterval<Args extends any[]>(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; declare function setTimeout<Args extends any[]>(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number;
the_stack
/* * @copyright * Copyright © Microsoft Open Technologies, Inc. * * All Rights Reserved * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http: *www.apache.org/licenses/LICENSE-2.0 * * THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS * OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION * ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A * PARTICULAR PURPOSE, MERCHANTABILITY OR NON-INFRINGEMENT. * * See the Apache License, Version 2.0 for the specific language * governing permissions and limitations under the License. */ 'use strict'; /* Directive tells jshint that suite and test are globals defined by mocha */ /* global suite */ /* global test */ import * as assert from "assert"; import * as nock from "nock"; import * as querystring from "querystring"; import * as url from "url"; import * as adal from "../lib/adal"; const util = require('./util/util'); const cp: any = util.commonParameters; const AuthenticationContext = adal.AuthenticationContext; const MemoryCache = adal.MemoryCache; suite('device-code', function () { setup(function () { util.resetLogging(); util.clearStaticCache(); }); function setupExpectedTokenRequestResponse(httpCode: number, returnDoc: object, authorityEndpoint?: string) { var authEndpoint = util.getNockAuthorityHost(authorityEndpoint); var queryParameters: any = {}; queryParameters['grant_type'] = 'device_code'; queryParameters['client_id'] = cp.clientId; queryParameters['resource'] = cp.resource; queryParameters['code'] = cp.deviceCode; var query = querystring.stringify(queryParameters); var tokenRequest = nock(authEndpoint) .filteringRequestBody(function (body) { return util.filterQueryString(query, body); }) .post(cp.tokenUrlPath, query) .reply(httpCode, returnDoc); util.matchStandardRequestHeaders(tokenRequest); return tokenRequest; } test('happy-path-successOnFirstRequest', function (done) { var response = util.createResponse(); var tokenRequest = setupExpectedTokenRequestResponse(200, response.wireResponse); var userCodeInfo: any = { deviceCode: cp.deviceCode, interval: 1, expiresIn: 1 }; var context = new AuthenticationContext(cp.authUrl); context.acquireTokenWithDeviceCode(cp.resource, cp.clientId, userCodeInfo as any, function (err) { assert(!err, 'Receive unexpected error'); tokenRequest.done(); done(err); }); }); function setupExpectedTokenRequestResponseWithAuthPending(returnDoc: object) { var authEndpoint = util.getNockAuthorityHost(); var queryParameter: any = {}; queryParameter['grant_type'] = 'device_code'; queryParameter['client_id'] = cp.clientId; queryParameter['resource'] = cp.resource; queryParameter['code'] = cp.deviceCode; var query = querystring.stringify(queryParameter); var authPendingResponse = { error: 'authorization_pending' }; var tokenRequest = nock(authEndpoint) .filteringRequestBody(function (body) { return util.filterQueryString(query, body); }) .post(cp.tokenUrlPath, query) .reply(400, authPendingResponse) .post(cp.tokenUrlPath, query) .reply(200, returnDoc); util.matchStandardRequestHeaders(tokenRequest); return tokenRequest; } test('happy-path-pendingOnFirstRequest', function (done) { var response = util.createResponse(); var tokenRequest = setupExpectedTokenRequestResponseWithAuthPending(response.wireResponse); var userCodeInfo: any = { deviceCode: cp.deviceCode, interval: 1, expiresIn: 200 }; var context = new AuthenticationContext(cp.authUrl); context.acquireTokenWithDeviceCode(cp.resource, cp.clientId, userCodeInfo, function (err, tokenResponse) { if (!err) { assert(util.isMatchTokenResponse(response.cachedResponse, tokenResponse), 'The response did not match what was expected'); tokenRequest.done(); } done(err); }); }); test('happy-path-cancelRequest', function (done) { nock.cleanAll(); var response = util.createResponse(); setupExpectedTokenRequestResponseWithAuthPending(response.wireResponse); var userCodeInfo: any = { deviceCode: cp.deviceCode, interval: 1, expiresIn: 200 }; var context = new AuthenticationContext(cp.authUrl); context.acquireTokenWithDeviceCode(cp.resource, cp.clientId, userCodeInfo, function (err) { assert(err, 'Did not receive expected error'); assert(err.message === 'Polling_Request_Cancelled'); done(); }); context.cancelRequestToGetTokenWithDeviceCode(userCodeInfo, function (err) { assert(!err, 'Receive unexpected error.') }); }); test('bad-argument', function (done) { nock.cleanAll(); var context = new AuthenticationContext(cp.authUrl); let userCodeInfo: any = { interval: 5, expiresIn: 1000 }; context.acquireTokenWithDeviceCode(cp.resource, cp.clientId, userCodeInfo, function (err) { assert(err, 'Did not receive expected argument error'); assert(err.message === 'The userCodeInfo is missing device_code'); }); userCodeInfo = { deviceCode: 'test_device_code', expiresIn: 1000 }; context.acquireTokenWithDeviceCode(cp.resource, cp.clientId, userCodeInfo, function (err) { assert(err, 'Did not receive expected argument error'); assert(err.message === 'The userCodeInfo is missing interval'); }); userCodeInfo = { deviceCode: 'test_device_code', interval: 5 }; context.acquireTokenWithDeviceCode(cp.resource, cp.clientId, userCodeInfo, function (err) { assert(err, 'Did not receive expected argument error'); assert(err.message === 'The userCodeInfo is missing expires_in'); }); // test if usercodeInfo is null context.acquireTokenWithDeviceCode(cp.resource, cp.clientId, null as any, function (err) { assert(err, 'Did not receive expected argument error'); assert(err.message === 'The userCodeInfo parameter is required'); }); userCodeInfo = { deviceCode: 'test_device_code', interval: 5, expiresIn: 1000 }; try { context.acquireTokenWithDeviceCode(cp.resource, cp.clientId, userCodeInfo, null as any); } catch (e) { assert(e, 'Did not receive expected error. '); assert(e.message === 'acquireToken requires a function callback parameter.', 'Unexpected error message returned.'); } userCodeInfo = { deviceCode: 'test_device_code', interval: 0, expiresIn: 1000 }; context.acquireTokenWithDeviceCode(cp.resource, cp.clientId, userCodeInfo, function (err) { assert(err, 'Did not receive expected error.'); assert(err.message === 'invalid refresh interval'); }); done(); }); test('bad-argument-cancel-request', function (done) { let context = new AuthenticationContext(cp.authUrl); let userCodeInfo: any = { interval: 5, expiresIn: 1000 }; context.cancelRequestToGetTokenWithDeviceCode(userCodeInfo, function (err) { assert(err, 'Did not receive expected argument error'); assert(err.message === 'The userCodeInfo is missing device_code'); }); // test if usercodeInfo is null context.cancelRequestToGetTokenWithDeviceCode(null as any, function (err) { assert(err, 'Did not receive expected argument error'); assert(err.message === 'The userCodeInfo parameter is required'); }); userCodeInfo = { deviceCode: 'test_device_code', interval: 5, expiresIn: 1000 }; try { context.cancelRequestToGetTokenWithDeviceCode(userCodeInfo, null as any); } catch (e) { assert(e, 'Did not receive expected error. '); assert(e.message === 'acquireToken requires a function callback parameter.', 'Unexpected error message returned.'); } userCodeInfo = { deviceCode: cp.deviceCode, interval: 1, expiresIn: 200 }; context.cancelRequestToGetTokenWithDeviceCode(userCodeInfo, function (err) { assert(err, 'Did not receive expected error. '); assert(err.message === 'No acquireTokenWithDeviceCodeRequest existed to be cancelled', 'Unexpected error message returned.'); }) done(); }); test('cross-tenant-refresh-token', function (done) { var memCache = new MemoryCache(); var response = util.createResponse({ mrrt: true }); setupExpectedTokenRequestResponse(200, response.wireResponse); var userCodeInfo: any = { deviceCode: cp.deviceCode, interval: 1, expiresIn: 1 }; var context = new AuthenticationContext(cp.authUrl, false, memCache); context.acquireTokenWithDeviceCode(cp.resource, cp.clientId, userCodeInfo, function (err, tokenResponse) { assert(!err, 'Receive unexpected error'); var someOtherAuthority = url.parse(cp.evoEndpoint + '/' + 'anotherTenant'); var responseOptions = { refreshedRefresh: true, mrrt: true }; var response = util.createResponse(responseOptions); var wireResponse = response.wireResponse; wireResponse.id_token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC9jY2ViYTE0Yy02YTAwLTQ5YWMtYjgwNi04NGRlNTJiZjFkNDIvIiwiaWF0IjpudWxsLCJleHAiOm51bGwsImF1ZCI6ImU5NThjMDlhLWFjMzctNDkwMC1iNGQ3LWZiM2VlYWY3MzM4ZCIsInN1YiI6IjRnVHY0RXRvWVctRFRvdzBiRG5KZDFBQTRzZkNoQmJqZXJtcXQ2UV9aYTQiLCJ0aWQiOiJkM2I3ODEzZC0zYTAzLTQyZmEtODk2My1iOTBhNzQ1NTIyYTUiLCJvaWQiOiJhNDQzMjA0YS1hYmM5LTRjYjgtYWRjMS1jMGRmYzEyMzAwYWEiLCJ1cG4iOiJycmFuZGFsbEBycmFuZGFsbGFhZDEub25taWNyb3NvZnQuY29tIiwidW5pcXVlX25hbWUiOiJycmFuZGFsbEBycmFuZGFsbGFhZDEub25taWNyb3NvZnQuY29tIiwiZmFtaWx5X25hbWUiOiJSYW5kYWxsIiwiZ2l2ZW5fbmFtZSI6IlJpY2gifQ.r-XHRqqtxI_7IEmwciFTBJpzwetz4wrM2Is_Z8-O7lw"; //need to change tokenUrlPath for the different tenant token request, and make sure get it changed back to not affect other tests var tokenUrlPath = cp.tokenUrlPath; cp.tokenUrlPath = someOtherAuthority.pathname + cp.tokenPath + cp.extraQP; util.setupExpectedRefreshTokenRequestResponse(200, wireResponse, someOtherAuthority, response.resource); cp.tokenUrlPath = tokenUrlPath; var conextForAnotherAuthority = new AuthenticationContext(someOtherAuthority as any, false, memCache); conextForAnotherAuthority.acquireToken(response.resource, (tokenResponse as adal.TokenResponse).userId as string, response.clientId, function (error) { assert(!error, 'Receive unexpected error'); assert((memCache as any)._entries.length === 2, 'There should two cache entries in the cache'); memCache.find({ userId: (tokenResponse as adal.TokenResponse).userId, _clientId: response.clientId, _authority: cp.evoEndpoint + '/' + cp.tenant }, function (err, entry) { assert(!err, 'Unexpected error received'); assert(entry.length === 1, 'no result returned for given tenant.'); assert(entry[0].tenantId === 'cceba14c-6a00-49ac-b806-84de52bf1d42'); }); memCache.find({ userId: (tokenResponse as adal.TokenResponse).userId, _clientId: response.clientId, _authority: url.format(someOtherAuthority) }, function (err, entry) { assert(!err, 'unexpected error received'); assert(entry.length === 1, 'no result returned for given tenant.'); assert(entry[0].tenantId === 'd3b7813d-3a03-42fa-8963-b90a745522a5'); }); done(err); }); }); }); });
the_stack
import { MethodNames, ReturnTypeOrNever } from "@adpt/utils"; import ld from "lodash"; import { serializeDom } from "../dom_serialize"; import { Handle, HandleInstanceType } from "../handle"; import { AdaptElement, ElementPredicate, isApplyStyle, isMountedElement, } from "../jsx"; import { useAsync } from "./use_async"; /** * Call an instance method on the Element that `hand` refers to. * * @remarks * This hook is the primary way for a function component to call an * instance method on another component element. A hook is used in order to delay * execution of the method until the DOM is completely built. The reason this * delayed execution is needed is because during the DOM build process, the * element that `hand` refers to may not have been built yet, or `hand` may * change to point to a different element later in the build process. * By waiting until this avoids element build order issues and ensures * that handle references are no longer changing. * * Because execution of the methods is delayed, `useMethod` will always return * the `initial` value on the initial build of a component. After every * DOM build is complete, the method will be invoked during the state update * phase and the return value stored in the component's state. This state * update (or any state update) will cause the DOM to build again. Upon * rebuild, the value stored from the last method invocation in the * component's state will be returned and a new invocation will be queued. * * If the value returned by the called method continues to change, this will * cause the DOM to continue to be rebuilt again. * * As this is a hook, it **must not** be called conditionally by a component. * In cases where a handle is not always present or the method should not be * called, call `useMethod` with `null` for `hand`. * * @param hand - The handle for the element upon which to call the method * `method`. `hand` may also be `null`, in which case, `initial` is always * the return value and the other arguments are ignored. * * @param initial - The initial value that `useMethod` will return before * execution of the method has occurred. This value will **always** be returned * on the first build of the component, when no component state is present. * * @param method - Name of the instance method to call. * * @param args - Variable arguments to be passed to the method call. * * @privateremarks * This overload is used when no explicit type parameters are defined and * only two arguments are passed. * @beta */ export function useMethod< H extends Handle, Instance = HandleInstanceType<H>, MethodName extends MethodNames<Instance> = MethodNames<Instance>, Ret = ReturnTypeOrNever<Instance[MethodName]> > (hand: H | null, method: MethodName): Ret | undefined; /** * {@inheritdoc useMethod} * @privateremarks * This overload is used when no explicit type parameters are defined and * three or more arguments are passed. * @beta */ export function useMethod< Initial, H extends Handle, Instance = HandleInstanceType<H>, MethodName extends MethodNames<Instance> = MethodNames<Instance>, Ret = ReturnTypeOrNever<Instance[MethodName]> > (hand: H | null, initial: Initial, method: MethodName, ...args: any[]): Ret | Initial; /** * {@inheritdoc useMethod} * @privateremarks * This overload is used when an explicit type parameter is passed, along * with two function arguments. * @beta */ export function useMethod< OverrideReturn, H extends Handle = Handle, Instance = HandleInstanceType<H>, MethodName extends MethodNames<Instance> = MethodNames<Instance>, > (hand: Handle | null, method: MethodName): OverrideReturn | undefined; /** * {@inheritdoc useMethod} * @privateremarks * This overload is used when an explicit type parameter is passed, along * with three or more function arguments. * @beta */ export function useMethod<OverrideReturn> (hand: Handle | null, initial: OverrideReturn, method: string, ...args: any[]): OverrideReturn; // Function implementation export function useMethod< Initial, H extends Handle, Instance = HandleInstanceType<H>, MethodName extends MethodNames<Instance> = MethodNames<Instance>, Ret = ReturnTypeOrNever<Instance[MethodName]> > (hand: H | null, initialOrMethod: Initial | MethodName, method?: MethodName, ...args: any[]) { const mName = method || initialOrMethod as MethodName; const initial = method ? initialOrMethod as Initial : undefined; return useAsync<Ret | typeof initial>(async () => { if (hand == null) return initial; return callInstanceMethod<Ret | typeof initial>(hand, initial, mName, ...args); }, initial); } export function hasInstanceMethod(name: string, skip?: AdaptElement | null): ElementPredicate { return (el) => { if (el === skip) return false; if (!isMountedElement(el)) throw new Error(`Element is not an ElementImpl`); const inst = el.instance; return ld.isFunction(inst[name]); }; } export function notReplacedByStyle(): ElementPredicate { return (el) => { if (!isMountedElement(el)) throw new Error(`Element is not an ElementImpl`); const succ = el.buildData.successor; if (succ == null) return true; if (!isApplyStyle(succ)) return true; return false; }; } function _callInstanceMethod<T = any>(hand: Handle, def: T, methodName: string, args: any[], options: GetInstanceValueOptions = {}): T { const method = getInstanceValue<(...args: any[]) => T>(hand, () => def, methodName, options); if (!ld.isFunction(method)) { const mountedOrig = hand.associated ? hand.mountedOrig : null; throw new Error(`${methodName} exists but is not a function on handle instance:\n` + ((mountedOrig != null) ? serializeDom(mountedOrig) : `mountedOrig is ${mountedOrig}`)); } return method(...args); } /** * Search for the first built Element in the handle chain of `hand` and * immediately execute the instance method `methodName` on that Element's * instance. * * @remarks * If an Element is found that satisfies the search, but method `methodName` * does not exist on the Element's instance, an error is thrown. * * The exact check that is currently used when searching the handle chain is * for mounted Elements that satisfy the predicate {@link notReplacedByStyle}. * In practice, this only selects Elements that are both mounted and built. * * @returns The return value of the called instance method if `hand` is * associated and there is an Element in the handle chain that has not been * replaced by a style sheet rule. Otherwise, returns the default value `def`. * @beta */ export function callInstanceMethod<T = any>(hand: Handle, def: T, methodName: string, ...args: any[]): T { return _callInstanceMethod(hand, def, methodName, args); } /** * Search for the first built Element instance in the Handle chain of `hand` that * implements method `methodName` and immediately execute it. * * @remarks * The exact check that is currently used when searching the handle chain is * mounted Elements that have an instance method `methodName`. Because only * built Elements have an Element instance, this only selects Elements that * are mounted and built and will not select Elements that have been replaced * by a style sheet rule. * * @returns The return value of the called instance method if `hand` is * associated and there is an Element in the handle chain that has method * `methodName`. Otherwise, returns the default value `def`. * @beta */ export function callFirstInstanceWithMethod<T = any>(hand: Handle, def: T, methodName: string, ...args: any[]): T { // Walk until we find an instance that has the requested method. const options = { pred: hasInstanceMethod(methodName) }; return _callInstanceMethod(hand, def, methodName, args, options); } /** * Starting with the successor of `hand`, search for a built Element instance in * the handle chain that implements method `methodName` and immediately * execute it. * * @remarks * If `hand` is not associated with an Element, an error is thrown. * * The exact check that is currently used when searching the handle chain is * mounted Elements that have an instance method `methodName`. Because only * built Elements have an Element instance, this only selects Elements that * are mounted and built and will not select Elements that have been replaced * by a style sheet rule. * * * @returns The return value of the called instance method if `hand` is * associated and there is an Element in the handle chain (other than `hand`) * that has method `methodName`. Otherwise, returns the default value `def`. * @beta */ export function callNextInstanceWithMethod<T = any>(hand: Handle, def: T, methodName: string, ...args: any[]): T { if (!hand.associated) { // tslint:disable-next-line: max-line-length throw new Error(`Cannot find next instance when calling ${methodName}: handle is not associated with any element`); } // Skip hand.mountedOrig and start with its successor. Walk until we // find an instance that has the requested method. const options = { pred: hasInstanceMethod(methodName, hand.mountedOrig) }; return _callInstanceMethod(hand, def, methodName, args, options); } /** * Starting with the successor of `hand`, search for a built Element instance in * the handle chain that implements method `methodName` and immediately * execute it. * * @remarks * If `hand` is not associated with an Element, an error is thrown. * * The exact check that is currently used when searching the handle chain is * mounted Elements that have an instance method `methodName`. Because only * built Elements have an Element instance, this only selects Elements that * are mounted and built and will not select Elements that have been replaced * by a style sheet rule. * * @returns The return value of the called instance method if `hand` is * associated and there is an Element in the handle chain (other than `hand`) * that has method `methodName`. Otherwise, returns the default value `def`. * @deprecated * Renamed to {@link callNextInstanceWithMethod}. */ export const callNextInstanceMethod = callNextInstanceWithMethod; export const defaultGetInstanceValueOptions: GetInstanceValueOptions = { pred: notReplacedByStyle(), throwOnNoElem: false }; export interface GetInstanceValueOptions { pred?: ElementPredicate; throwOnNoElem?: boolean; } /** * Get the value of a field on an element instance * * @beta */ export function getInstanceValue<T = any>(hand: Handle, def: T, field: string, optionsIn?: GetInstanceValueOptions): T { const options = { ...defaultGetInstanceValueOptions, ...optionsIn }; const pred = options.pred; if (!hand.associated) { if (!options.throwOnNoElem) return def; throw new Error(`Cannot get instance field ${field}: Handle is not associated with element`); } const elem = hand.nextMounted(pred); if (!elem) { if (!options.throwOnNoElem) return def; throw new Error(`Cannot get instance field ${field}: Handle does not point to mounted element`); } if (!elem.instance) { throw new Error(`Internal Error: Element is mounted but instance is ${elem.instance}`); } if (!(field in elem.instance)) { throw new Error(`${field} does not exist on handle instance:\n` + serializeDom(elem)); } const val = elem.instance[field]; if (ld.isFunction(val)) { return val.bind(elem.instance); } return val; } /** * Get the value of field from the instance referenced by handled instance. * * @remarks * On first invocation, or if the handle is not associated with an element, or the field is not found, * the value of `initial` will be returned. After the element referenced by handle has been instantiated, * this hook will fetch the actual value of `field`, cause a rebuild, and then return that value * on the next call of the hook. * * @beta */ export function useInstanceValue<T>(hand: Handle, initial: T, field: string) { return useAsync<T>(async () => getInstanceValue(hand, initial, field), initial); }
the_stack
import type { Container } from "./Container"; import type { IParticles } from "../Options/Interfaces/Particles/IParticles"; import { ParticlesOptions } from "../Options/Classes/Particles/ParticlesOptions"; import { Shape } from "../Options/Classes/Particles/Shape/Shape"; import { AnimationStatus, DestroyMode, OutMode, OutModeAlt, RotateDirection, ShapeType, StartValueType, } from "../Enums"; import type { RecursivePartial } from "../Types"; import { alterHsl, clamp, colorToRgb, deepExtend, getDistance, getHslFromAnimation, getParticleBaseVelocity, getParticleDirectionAngle, getRangeMax, getRangeMin, getRangeValue, getValue, isInArray, itemFromArray, Plugins, randomInRange, setRangeValue, } from "../Utils"; import type { Stroke } from "../Options/Classes/Particles/Stroke"; import { Vector } from "./Particle/Vector"; import type { IBubbleParticleData, ICoordinates, ICoordinates3d, IDelta, IHsl, IParticle, IParticleGradientAnimation, IParticleHslAnimation, IParticleLife, IParticleNumericValueAnimation, IParticleSpin, IParticleTiltValueAnimation, IParticleValueAnimation, IRgb, IShapeValues, IParticleRetinaProps, } from "./Interfaces"; import { Vector3d } from "./Particle/Vector3d"; import { IShape } from "../Options/Interfaces/Particles/Shape/IShape"; import { IParticleRoll } from "./Interfaces/IParticleRoll"; import { IParticleWobble } from "./Interfaces/IParticleWobble"; const fixOutMode = (data: { outMode: OutMode | keyof typeof OutMode | OutModeAlt; checkModes: (OutMode | keyof typeof OutMode | OutModeAlt)[]; coord: number; maxCoord: number; setCb: (value: number) => void; radius: number; }) => { if (isInArray(data.outMode, data.checkModes) || isInArray(data.outMode, data.checkModes)) { if (data.coord > data.maxCoord - data.radius * 2) { data.setCb(-data.radius); } else if (data.coord < data.radius * 2) { data.setCb(data.radius); } } }; /** * The single particle object * @category Core */ export class Particle implements IParticle { destroyed; lastPathTime; misplaced; spawning; splitCount; unbreakable; readonly pathDelay; readonly sides; readonly options; readonly life: IParticleLife; roll?: IParticleRoll; wobble?: IParticleWobble; backColor?: IHsl; close: boolean; fill: boolean; randomIndexData?: number; gradient?: IParticleGradientAnimation; rotate?: IParticleValueAnimation<number>; tilt?: IParticleTiltValueAnimation; color?: IParticleHslAnimation; opacity?: IParticleNumericValueAnimation; strokeWidth?: number; stroke?: Stroke; strokeColor?: IParticleHslAnimation; readonly moveDecay: number; readonly direction: number; readonly position: Vector3d; readonly offset: Vector; readonly shadowColor: IRgb | undefined; readonly size: IParticleNumericValueAnimation; readonly velocity: Vector; readonly shape: ShapeType | string; readonly spin?: IParticleSpin; readonly initialPosition: Vector; readonly initialVelocity: Vector; readonly shapeData?: IShapeValues; readonly bubble: IBubbleParticleData; readonly zIndexFactor: number; readonly retina: IParticleRetinaProps; constructor( readonly id: number, readonly container: Container, position?: ICoordinates, overrideOptions?: RecursivePartial<IParticles>, readonly group?: string ) { this.fill = true; this.close = true; this.lastPathTime = 0; this.destroyed = false; this.unbreakable = false; this.splitCount = 0; this.misplaced = false; this.retina = { maxDistance: {}, }; const pxRatio = container.retina.pixelRatio; const mainOptions = container.actualOptions; const particlesOptions = new ParticlesOptions(); particlesOptions.load(mainOptions.particles); const shapeType = particlesOptions.shape.type; const reduceDuplicates = particlesOptions.reduceDuplicates; this.shape = shapeType instanceof Array ? itemFromArray(shapeType, this.id, reduceDuplicates) : shapeType; if (overrideOptions?.shape) { if (overrideOptions.shape.type) { const overrideShapeType = overrideOptions.shape.type; this.shape = overrideShapeType instanceof Array ? itemFromArray(overrideShapeType, this.id, reduceDuplicates) : overrideShapeType; } const shapeOptions = new Shape(); shapeOptions.load(overrideOptions.shape); if (this.shape) { this.shapeData = this.loadShapeData(shapeOptions, reduceDuplicates); } } else { this.shapeData = this.loadShapeData(particlesOptions.shape, reduceDuplicates); } if (overrideOptions !== undefined) { particlesOptions.load(overrideOptions); } if (this.shapeData?.particles !== undefined) { particlesOptions.load(this.shapeData?.particles); } this.fill = this.shapeData?.fill ?? this.fill; this.close = this.shapeData?.close ?? this.close; this.options = particlesOptions; this.pathDelay = getValue(this.options.move.path.delay) * 1000; const zIndexValue = getRangeValue(this.options.zIndex.value); container.retina.initParticle(this); /* size */ const sizeOptions = this.options.size, sizeRange = sizeOptions.value; this.size = { enable: sizeOptions.animation.enable, value: getValue(sizeOptions) * container.retina.pixelRatio, max: getRangeMax(sizeRange) * pxRatio, min: getRangeMin(sizeRange) * pxRatio, loops: 0, maxLoops: sizeOptions.animation.count, }; const sizeAnimation = sizeOptions.animation; if (sizeAnimation.enable) { this.size.status = AnimationStatus.increasing; switch (sizeAnimation.startValue) { case StartValueType.min: this.size.value = this.size.min; this.size.status = AnimationStatus.increasing; break; case StartValueType.random: this.size.value = randomInRange(this.size) * pxRatio; this.size.status = Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing; break; case StartValueType.max: default: this.size.value = this.size.max; this.size.status = AnimationStatus.decreasing; break; } this.size.velocity = ((this.retina.sizeAnimationSpeed ?? container.retina.sizeAnimationSpeed) / 100) * container.retina.reduceFactor; if (!sizeAnimation.sync) { this.size.velocity *= Math.random(); } } this.direction = getParticleDirectionAngle(this.options.move.direction); this.bubble = { inRange: false, }; /* animation - velocity for speed */ this.initialVelocity = this.calculateVelocity(); this.velocity = this.initialVelocity.copy(); this.moveDecay = 1 - getRangeValue(this.options.move.decay); /* position */ this.position = this.calcPosition(container, position, clamp(zIndexValue, 0, container.zLayers)); this.initialPosition = this.position.copy(); /* parallax */ this.offset = Vector.origin; const particles = container.particles; particles.needsSort = particles.needsSort || particles.lastZIndex < this.position.z; particles.lastZIndex = this.position.z; // Scale z-index factor this.zIndexFactor = this.position.z / container.zLayers; this.sides = 24; let drawer = container.drawers.get(this.shape); if (!drawer) { drawer = Plugins.getShapeDrawer(this.shape); if (drawer) { container.drawers.set(this.shape, drawer); } } if (drawer?.loadShape) { drawer?.loadShape(this); } const sideCountFunc = drawer?.getSidesCount; if (sideCountFunc) { this.sides = sideCountFunc(this); } this.life = this.loadLife(); this.spawning = this.life.delay > 0; if (this.options.move.spin.enable) { const spinPos = this.options.move.spin.position ?? { x: 50, y: 50 }; const spinCenter = { x: (spinPos.x / 100) * container.canvas.size.width, y: (spinPos.y / 100) * container.canvas.size.height, }; const pos = this.getPosition(); const distance = getDistance(pos, spinCenter); this.spin = { center: spinCenter, direction: this.velocity.x >= 0 ? RotateDirection.clockwise : RotateDirection.counterClockwise, angle: this.velocity.angle, radius: distance, acceleration: this.retina.spinAcceleration ?? getRangeValue(this.options.move.spin.acceleration), }; } this.shadowColor = colorToRgb(this.options.shadow.color); for (const updater of container.particles.updaters) { if (updater.init) { updater.init(this); } } if (drawer && drawer.particleInit) { drawer.particleInit(container, this); } for (const [, plugin] of container.plugins) { if (plugin.particleCreated) { plugin.particleCreated(this); } } } isVisible(): boolean { return !this.destroyed && !this.spawning && this.isInsideCanvas(); } isInsideCanvas(): boolean { const radius = this.getRadius(); const canvasSize = this.container.canvas.size; return ( this.position.x >= -radius && this.position.y >= -radius && this.position.y <= canvasSize.height + radius && this.position.x <= canvasSize.width + radius ); } draw(delta: IDelta): void { const container = this.container; for (const [, plugin] of container.plugins) { container.canvas.drawParticlePlugin(plugin, this, delta); } container.canvas.drawParticle(this, delta); } getPosition(): ICoordinates3d { return { x: this.position.x + this.offset.x, y: this.position.y + this.offset.y, z: this.position.z, }; } getRadius(): number { return this.bubble.radius ?? this.size.value; } getMass(): number { return (this.getRadius() ** 2 * Math.PI) / 2; } getFillColor(): IHsl | undefined { const color = this.bubble.color ?? getHslFromAnimation(this.color); if (color && this.roll && (this.backColor || this.roll.alter)) { const rolled = Math.floor((this.roll?.angle ?? 0) / (Math.PI / 2)) % 2; if (rolled) { if (this.backColor) { return this.backColor; } if (this.roll.alter) { return alterHsl(color, this.roll.alter.type, this.roll.alter.value); } } } return color; } getStrokeColor(): IHsl | undefined { return this.bubble.color ?? getHslFromAnimation(this.strokeColor) ?? this.getFillColor(); } destroy(override?: boolean): void { this.destroyed = true; this.bubble.inRange = false; if (this.unbreakable) { return; } this.destroyed = true; this.bubble.inRange = false; for (const [, plugin] of this.container.plugins) { if (plugin.particleDestroyed) { plugin.particleDestroyed(this, override); } } if (override) { return; } const destroyOptions = this.options.destroy; if (destroyOptions.mode === DestroyMode.split) { this.split(); } } /** * This method is used when the particle has lost a life and needs some value resets */ reset(): void { if (this.opacity) { this.opacity.loops = 0; } this.size.loops = 0; } private split(): void { const splitOptions = this.options.destroy.split; if (splitOptions.count >= 0 && this.splitCount++ > splitOptions.count) { return; } const rate = getRangeValue(splitOptions.rate.value); for (let i = 0; i < rate; i++) { this.container.particles.addSplitParticle(this); } } private calcPosition( container: Container, position: ICoordinates | undefined, zIndex: number, tryCount = 0 ): Vector3d { for (const [, plugin] of container.plugins) { const pluginPos = plugin.particlePosition !== undefined ? plugin.particlePosition(position, this) : undefined; if (pluginPos !== undefined) { return Vector3d.create(pluginPos.x, pluginPos.y, zIndex); } } const canvasSize = container.canvas.size; const pos = Vector3d.create( position?.x ?? Math.random() * canvasSize.width, position?.y ?? Math.random() * canvasSize.height, zIndex ); const radius = this.getRadius(); /* check position - into the canvas */ const outModes = this.options.move.outModes, fixHorizontal = (outMode: OutMode | keyof typeof OutMode | OutModeAlt) => { fixOutMode({ outMode, checkModes: [OutMode.bounce, OutMode.bounceHorizontal], coord: pos.x, maxCoord: container.canvas.size.width, setCb: (value: number) => (pos.x += value), radius, }); }, fixVertical = (outMode: OutMode | keyof typeof OutMode | OutModeAlt) => { fixOutMode({ outMode, checkModes: [OutMode.bounce, OutMode.bounceVertical], coord: pos.y, maxCoord: container.canvas.size.height, setCb: (value: number) => (pos.y += value), radius, }); }; fixHorizontal(outModes.left ?? outModes.default); fixHorizontal(outModes.right ?? outModes.default); fixVertical(outModes.top ?? outModes.default); fixVertical(outModes.bottom ?? outModes.default); if (this.checkOverlap(pos, tryCount)) { return this.calcPosition(container, undefined, zIndex, tryCount + 1); } return pos; } private checkOverlap(pos: ICoordinates, tryCount = 0): boolean { const collisionsOptions = this.options.collisions; const radius = this.getRadius(); if (!collisionsOptions.enable) { return false; } const overlapOptions = collisionsOptions.overlap; if (overlapOptions.enable) { return false; } const retries = overlapOptions.retries; if (retries >= 0 && tryCount > retries) { throw new Error("Particle is overlapping and can't be placed"); } let overlaps = false; for (const particle of this.container.particles.array) { if (getDistance(pos, particle.position) < radius + particle.getRadius()) { overlaps = true; break; } } return overlaps; } private calculateVelocity(): Vector { const baseVelocity = getParticleBaseVelocity(this.direction); const res = baseVelocity.copy(); const moveOptions = this.options.move; const rad = (Math.PI / 180) * moveOptions.angle.value; const radOffset = (Math.PI / 180) * moveOptions.angle.offset; const range = { left: radOffset - rad / 2, right: radOffset + rad / 2, }; if (!moveOptions.straight) { res.angle += randomInRange(setRangeValue(range.left, range.right)); } if (moveOptions.random && typeof moveOptions.speed === "number") { res.length *= Math.random(); } return res; } private loadShapeData(shapeOptions: IShape, reduceDuplicates: boolean): IShapeValues | undefined { const shapeData = shapeOptions.options[this.shape]; if (shapeData) { return deepExtend( {}, shapeData instanceof Array ? itemFromArray(shapeData, this.id, reduceDuplicates) : shapeData ) as IShapeValues; } } private loadLife(): IParticleLife { const container = this.container; const particlesOptions = this.options; const lifeOptions = particlesOptions.life; const life = { delay: container.retina.reduceFactor ? ((getRangeValue(lifeOptions.delay.value) * (lifeOptions.delay.sync ? 1 : Math.random())) / container.retina.reduceFactor) * 1000 : 0, delayTime: 0, duration: container.retina.reduceFactor ? ((getRangeValue(lifeOptions.duration.value) * (lifeOptions.duration.sync ? 1 : Math.random())) / container.retina.reduceFactor) * 1000 : 0, time: 0, count: particlesOptions.life.count, }; if (life.duration <= 0) { life.duration = -1; } if (life.count <= 0) { life.count = -1; } return life; } }
the_stack
import { some } from 'lodash'; import React, { useEffect } from 'react'; import { i18n } from '@kbn/i18n'; import { FormattedMessage } from '@kbn/i18n/react'; import { EuiButtonEmpty, EuiDragDropContext, euiDragDropReorder, EuiDroppable, EuiFlexGroup, EuiFlexItem, EuiFormErrorText, EuiIconTip, EuiPanel, EuiSpacer, EuiTitle } from '@elastic/eui'; import { IAggConfigs } from '../../../../src/plugins/data/public'; import { VisOptionsProps } from '../../../../src/plugins/vis_default_editor/public'; import { NumberInputOption, SelectOption } from '../../../../src/plugins/charts/public'; import { SwitchOption } from './switch'; import { TextInputOption } from './text_input'; import { totalAggregations, AggTypes } from './utils'; import { ComputedColumn, ComputedColumnEditor } from './computed_column'; export interface EnhancedTableVisParams { type: 'table'; // Computed Columns computedColumns: ComputedColumn[]; fieldColumns?: any[]; // Enhanced Settings linesComputedFilter: string; rowsComputedCss: string; hiddenColumns: string; computedColsPerSplitCol: boolean; hideExportLinks: boolean; csvExportWithTotal: boolean; csvFullExport: boolean; stripedRows: boolean; addRowNumberColumn: boolean; csvEncoding: string; // Basic Settings perPage: number | ''; showPartialRows: boolean; showMetricsAtAllLevels: boolean; sort: { columnIndex: number | null; direction: string | null; }; showTotal: boolean; totalFunc: AggTypes; totalLabel: string; // Filter Bar showFilterBar: boolean; filterCaseSensitive: boolean; filterBarHideable: boolean; filterAsYouType: boolean; filterTermsSeparately: boolean; filterHighlightResults: boolean; filterBarWidth: string; } function addComputedColumn(computedColumns, setComputedColumns) { const newComputedColumn = { label: 'Value squared', formula: 'col0 * col0', computeTotalUsingFormula: false, format: 'number', pattern: '0,0', datePattern: 'MMMM Do YYYY, HH:mm:ss.SSS', alignment: 'left', applyAlignmentOnTitle: true, applyAlignmentOnTotal: true, applyTemplate: false, applyTemplateOnTotal: true, template: '{{value}}', cellComputedCss: '', customColumnPosition: '', enabled: true, brandNew: true }; setComputedColumns(computedColumns.concat(newComputedColumn)); } function onDragEnd(source, destination, computedColumns, setComputedColumns) { if (source && destination) { const newComputedColumns = euiDragDropReorder(computedColumns, source.index, destination.index); setComputedColumns(newComputedColumns); } } function hasSplitColsBucket(aggs: IAggConfigs) { return some(aggs.aggs, function(agg) { return agg.schema === 'splitcols' && agg.enabled; }); }; function EnhancedTableOptions({ aggs, stateParams, setValidity, setValue, }: VisOptionsProps<EnhancedTableVisParams>) { const isPerPageValid = stateParams.perPage === '' || stateParams.perPage > 0; const computedColumnsError = undefined; const setComputedColumns = (newComputedColumns) => setValue('computedColumns', newComputedColumns); useEffect(() => { setValidity(isPerPageValid); }, [isPerPageValid, setValidity]); return ( <div className="enhanced-table-vis-params"> {/* COMPUTED COLUMNS SECTION */} <EuiDragDropContext onDragEnd={ ({source, destination}) => onDragEnd(source, destination, stateParams.computedColumns, setComputedColumns) }> <EuiPanel paddingSize="s"> <EuiTitle size="xs"> <h3> <FormattedMessage id="visTypeEnhancedTable.params.computedColumnsSection" defaultMessage="Computed Columns" /> </h3> </EuiTitle> <EuiSpacer size="s" /> {computedColumnsError && ( <> <EuiFormErrorText>{computedColumnsError}</EuiFormErrorText> <EuiSpacer size="s" /> </> )} <EuiDroppable droppableId="enhanced_table_computed_columns"> <> {stateParams.computedColumns.map( (computedColumn, index) => ( <ComputedColumnEditor key={index} computedColumns={stateParams.computedColumns} computedColumn={computedColumn} index={index} setComputedColumns={setComputedColumns} setValidity={setValidity} /> ))} </> </EuiDroppable> <EuiFlexGroup justifyContent="center" responsive={false}> <EuiFlexItem grow={false}> <EuiButtonEmpty size="xs" iconType="plusInCircleFilled" onClick={ () => addComputedColumn(stateParams.computedColumns, setComputedColumns)} > <FormattedMessage id="visTypeEnhancedTable.params.computedColumns.addComputedColumnLabel" defaultMessage="Add computed column" /> </EuiButtonEmpty> </EuiFlexItem> </EuiFlexGroup> </EuiPanel> </EuiDragDropContext> {/* /COMPUTED COLUMNS SECTION */} <EuiSpacer size="m" /> {/* ENHANCED SETTINGS SECTION */} <EuiPanel paddingSize="s"> <EuiTitle size="xs"> <h3> <FormattedMessage id="visTypeEnhancedTable.params.enhancedSettingsSection" defaultMessage="Enhanced Settings" /> </h3> </EuiTitle> <EuiSpacer size="m" /> <TextInputOption label={ <> <FormattedMessage id="visTypeEnhancedTable.params.linesComputedFilter" defaultMessage="Rows computed filter" /> &nbsp;( <a href="https://github.com/fbaligand/kibana-enhanced-table/blob/master/README.md#computed-settings-documentation" target="_blank">documentation</a> )&nbsp; <EuiIconTip content="Example: when 'col0 &gt; 10', only table rows having first column value greater than 10 are displayed" position="right" /> </> } placeholder="col0 > 10" paramName="linesComputedFilter" value={stateParams.linesComputedFilter} setValue={setValue} /> <TextInputOption label={ <> <FormattedMessage id="visTypeEnhancedTable.params.rowsComputedCss" defaultMessage="Rows computed CSS" /> &nbsp;( <a href="https://github.com/fbaligand/kibana-enhanced-table/blob/master/README.md#computed-settings-documentation" target="_blank">documentation</a> )&nbsp; <EuiIconTip content="This option lets to define dynamically table row CSS (like background-color CSS property), based on its column values" position="right" /> </> } placeholder="col1 < 0 ? &quot;background-color: red&quot; : &quot;&quot;" paramName="rowsComputedCss" value={stateParams.rowsComputedCss} setValue={setValue} /> <TextInputOption label={ <> <FormattedMessage id="visTypeEnhancedTable.params.hiddenColumns" defaultMessage="Hidden columns" /> &nbsp; <EuiIconTip content="Reference a column by its index (1,2,3), by its label (Example Column) or both (1,2,column_3). Write the column label as is (no surrounding quotes) and separate them using a comma. It is recommended to reference a column by its label." position="right" /> </> } placeholder="0,1,Col2 Label,..." paramName="hiddenColumns" value={stateParams.hiddenColumns} setValue={setValue} /> { hasSplitColsBucket(aggs) && <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.computedColsPerSplitCol', { defaultMessage: 'Computed/Hidden cols per split col', })} icontip={i18n.translate('visTypeEnhancedTable.params.computedColsPerSplitColIconTip', { defaultMessage: 'Example: when enabled, if there is one \'Split cols\' bucket that implies two columns (term1 and term2), one Count metric, and one computed column configured, then in the result table, there will be a computed column for term1 and another computed column for term2 (each displayed after count column)', })} paramName="computedColsPerSplitCol" value={stateParams.computedColsPerSplitCol} setValue={setValue} /> } <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.hideExportLinks', { defaultMessage: 'Hide CSV export links', })} paramName="hideExportLinks" value={stateParams.hideExportLinks} setValue={setValue} /> <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.csvExportWithTotal', { defaultMessage: 'CSV export with total row', })} paramName="csvExportWithTotal" value={stateParams.csvExportWithTotal} setValue={setValue} disabled={!stateParams.showTotal} /> { stateParams.fieldColumns && <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.csvFullExport', { defaultMessage: 'Full CSV export', })} icontip={i18n.translate('visTypeEnhancedTable.params.csvFullExportTooltip', { defaultMessage: 'If enabled, CSV export will download all data stored in Elasticsearch (not only displayed documents in table). This option is not compatible with \'Computed columns\', \'Rows computed filter\', \'Hidden columns\', \'CSV export with total row\' and \'Add row number column\' options.', })} paramName="csvFullExport" value={stateParams.csvFullExport} setValue={setValue} disabled={stateParams.computedColumns.length > 0 || !!stateParams.linesComputedFilter || !!stateParams.hiddenColumns || stateParams.csvExportWithTotal || stateParams.addRowNumberColumn} /> } <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.stripedRows', { defaultMessage: 'Striped rows', })} paramName="stripedRows" value={stateParams.stripedRows} setValue={setValue} /> <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.addRowNumberColumn', { defaultMessage: 'Add row number column', })} paramName="addRowNumberColumn" value={stateParams.addRowNumberColumn} setValue={setValue} /> <TextInputOption label={ <> <FormattedMessage id="visTypeEnhancedTable.params.csvEncoding" defaultMessage="CSV export encoding" /> &nbsp;( <a href="https://github.com/ashtuchkin/iconv-lite/wiki/Supported-Encodings" target="_blank">supported encodings</a> )&nbsp; <EuiIconTip content="Define here the content encoding you wish for CSV exports. Default value is 'utf-8'." position="right" /> </> } paramName="csvEncoding" value={stateParams.csvEncoding} setValue={setValue} /> </EuiPanel> {/* /ENHANCED SETTINGS SECTION */} <EuiSpacer size="s" /> {/* BASIC SETTINGS SECTION */} <EuiPanel paddingSize="s"> <EuiTitle size="xs"> <h3> <FormattedMessage id="visTypeEnhancedTable.params.basicSettingsSection" defaultMessage="Basic Settings" /> </h3> </EuiTitle> <EuiSpacer size="m" /> <NumberInputOption label={ <> <FormattedMessage id="visTypeTable.params.perPageLabel" defaultMessage="Max rows per page" />{' '} <EuiIconTip content="Leaving this field empty means it will use number of buckets from the response." position="right" /> </> } isInvalid={!isPerPageValid} min={1} paramName="perPage" value={stateParams.perPage} setValue={setValue} /> { !stateParams.fieldColumns && <SwitchOption label={i18n.translate('visTypeTable.params.showMetricsLabel', { defaultMessage: 'Show metrics for every bucket/level', })} paramName="showMetricsAtAllLevels" value={stateParams.showMetricsAtAllLevels} setValue={setValue} data-test-subj="showMetricsAtAllLevels" /> } { !stateParams.fieldColumns && <SwitchOption label={i18n.translate('visTypeTable.params.showPartialRowsLabel', { defaultMessage: 'Show partial rows', })} icontip={i18n.translate('visTypeTable.params.showPartialRowsTip', { defaultMessage: 'Show rows that have partial data. This will still calculate metrics for every bucket/level, even if they are not displayed.', })} paramName="showPartialRows" value={stateParams.showPartialRows} setValue={setValue} data-test-subj="showPartialRows" /> } <SwitchOption label={i18n.translate('visTypeTable.params.showTotalLabel', { defaultMessage: 'Show total', })} paramName="showTotal" value={stateParams.showTotal} setValue={setValue} /> <SelectOption label={i18n.translate('visTypeTable.params.totalFunctionLabel', { defaultMessage: 'Total function', })} disabled={!stateParams.showTotal} options={totalAggregations} paramName="totalFunc" value={stateParams.totalFunc} setValue={setValue} /> <TextInputOption label={i18n.translate('visTypeEnhancedTable.params.totalLabel', { defaultMessage: 'Total label', })} disabled={!stateParams.showTotal} paramName="totalLabel" value={stateParams.totalLabel} setValue={setValue} /> </EuiPanel> {/* /BASIC SETTINGS */} <EuiSpacer size="s" /> {/* FILTER BAR SECTION */} <EuiPanel paddingSize="s"> <EuiTitle size="xs"> <h3> <FormattedMessage id="visTypeEnhancedTable.params.filterBarSection" defaultMessage="Filter Bar" /> </h3> </EuiTitle> <EuiSpacer size="m" /> <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.showFilterBar', { defaultMessage: 'Show filter bar', })} paramName="showFilterBar" value={stateParams.showFilterBar} setValue={setValue} /> { stateParams.showFilterBar && ( <> <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.filterCaseSensitive', { defaultMessage: 'Case sensitive filter', })} paramName="filterCaseSensitive" value={stateParams.filterCaseSensitive} setValue={setValue} /> <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.filterBarHideable', { defaultMessage: 'Filter bar hideable', })} paramName="filterBarHideable" value={stateParams.filterBarHideable} setValue={setValue} /> <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.filterAsYouType', { defaultMessage: 'Filter as you type', })} paramName="filterAsYouType" value={stateParams.filterAsYouType} setValue={setValue} /> <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.filterTermsSeparately', { defaultMessage: 'Filter each term separately', })} icontip={i18n.translate('visTypeEnhancedTable.params.filterTermsSeparatelyTooltip', { defaultMessage: 'Example with filter set to \'term1 term2\': when this option is enabled, rows with one column containing \'term1\' and another column containing \'term2\' will be displayed. If disabled, only rows with one column containing \'term1 term2\' will be displayed.', })} paramName="filterTermsSeparately" value={stateParams.filterTermsSeparately} setValue={setValue} /> <SwitchOption label={i18n.translate('visTypeEnhancedTable.params.filterHighlightResults', { defaultMessage: 'Highlight results', })} paramName="filterHighlightResults" value={stateParams.filterHighlightResults} setValue={setValue} /> <TextInputOption label={i18n.translate('visTypeEnhancedTable.params.filterBarWidth', { defaultMessage: 'Total label', })} paramName="filterBarWidth" value={stateParams.filterBarWidth} setValue={setValue} /> </> )} </EuiPanel> {/* /FILTER BAR SECTION */} </div> ); } // default export required for React.Lazy // eslint-disable-next-line import/no-default-export export { EnhancedTableOptions as default };
the_stack
import { BulkFileMetadata, CreateSessionOptions } from '@extraterm/extraterm-extension-api'; import * as Electron from 'electron'; const ipc = Electron.ipcRenderer; import {BulkFileIdentifier} from '../main_process/bulk_file_handling/BulkFileStorage'; import * as Messages from '../WindowMessages'; import { Logger, getLogger } from "extraterm-logging"; import { ThemeType } from '../theme/Theme'; import { LogicalKeybindingsName, CustomKeybindingsSet } from '../keybindings/KeybindingsTypes'; import { ClipboardType } from '../WindowMessages'; import * as SharedMap from "../shared_map/SharedMap"; const _log = getLogger("WebIPC"); const DEBUG = false; const DEBUG_INCOMING = false; const DEBUG_OUTGOING = false; /** * Start IPC. */ export function start(): void { ipc.on(Messages.CHANNEL_NAME, handleAsyncIpc); } type ResolveFunc = (msg: Messages.Message) => void; // This queue is used to route incoming IPC messages to expectant promise objects. let promiseQueue: {promiseResolve: ResolveFunc, messageType: Messages.MessageType}[] = []; export type Handler = (msg: Messages.Message) => void; const defaultHandlers: { messageType: Messages.MessageType, handler: Handler }[] = []; /** * Register a default handler for a message type * * @param type the message type this handler should be used for. * @param handler the handler itself. */ export function registerDefaultHandler(type: Messages.MessageType, handler: Handler): void { defaultHandlers.push( { messageType: type, handler: handler} ); } function handleAsyncIpc(senderInfo: any, detail: any): void { const msg: Messages.Message = detail; if (DEBUG_INCOMING) { _log.debug(`incoming: ${Messages.MessageType[msg.type]} => `, msg); } const matchingPromises = promiseQueue.filter( p => p.messageType === msg.type ); const nonMatchingPromises = promiseQueue.filter( p => p.messageType !== msg.type ); promiseQueue = nonMatchingPromises; matchingPromises.forEach( tup => { tup.promiseResolve(msg); }); if (matchingPromises.length === 0) { // Fall back on the default handlers. defaultHandlers.filter( tup => tup.messageType === msg.type ).forEach( tup => { tup.handler(msg); }); } } function request(msg: Messages.Message, replyType: Messages.MessageType): Promise<Messages.Message> { if (DEBUG_OUTGOING) { _log.debug("request: ${Messages.MessageType[msg.type]} => ", msg); } ipc.send(Messages.CHANNEL_NAME, msg); return new Promise<Messages.Message>( (resolve, cancel) => { promiseQueue.push( { promiseResolve: resolve, messageType: replyType } ); }); } export function requestThemeList(): Promise<Messages.ThemeListMessage> { const msg: Messages.ThemeListRequestMessage = {type: Messages.MessageType.THEME_LIST_REQUEST}; return <Promise<Messages.ThemeListMessage>> request(msg, Messages.MessageType.THEME_LIST); } export function requestThemeContents(themeType: ThemeType): Promise<Messages.ThemeContentsMessage> { if (DEBUG) { _log.debug("requestThemeContents(): ", themeType); } const msg: Messages.ThemeContentsRequestMessage = { type: Messages.MessageType.THEME_CONTENTS_REQUEST, themeType }; return <Promise<Messages.ThemeContentsMessage>> request(msg, Messages.MessageType.THEME_CONTENTS); } export function rescanThemes(): void { const msg: Messages.ThemeRescan = {type: Messages.MessageType.THEME_RESCAN }; ipc.send(Messages.CHANNEL_NAME, msg); } export function requestPtyCreate(sessionUuid: string, sessionOptions: CreateSessionOptions): Promise<Messages.CreatedPtyMessage> { const msg: Messages.CreatePtyRequestMessage = { type: Messages.MessageType.PTY_CREATE, sessionUuid, sessionOptions }; return <Promise<Messages.CreatedPtyMessage>> request(msg, Messages.MessageType.PTY_CREATED); } export function ptyInput(id: number, data: string): void { const msg: Messages.PtyInput = {type: Messages.MessageType.PTY_INPUT, id: id, data: data }; ipc.send(Messages.CHANNEL_NAME, msg); } export function ptyOutputBufferSize(id: number, size: number): void { const msg: Messages.PtyOutputBufferSize = {type: Messages.MessageType.PTY_OUTPUT_BUFFER_SIZE, id, size }; ipc.send(Messages.CHANNEL_NAME, msg); } export function ptyResize(id: number, columns: number, rows: number): void { const msg: Messages.PtyResize = {type: Messages.MessageType.PTY_RESIZE, id: id, columns: columns, rows: rows }; ipc.send(Messages.CHANNEL_NAME, msg); } export function ptyClose(id: number): void { const msg: Messages.PtyCloseRequest = {type: Messages.MessageType.PTY_CLOSE_REQUEST, id: id }; ipc.send(Messages.CHANNEL_NAME, msg); } export async function ptyGetWorkingDirectory(id: number): Promise<string> { const msg: Messages.PtyGetWorkingDirectoryRequest = { type: Messages.MessageType.PTY_GET_WORKING_DIRECTORY_REQUEST, id }; const response = <Messages.PtyGetWorkingDirectory> await request(msg, Messages.MessageType.PTY_GET_WORKING_DIRECTORY); return response.workingDirectory; } export function devToolsRequest(open: boolean): void { const msg: Messages.DevToolsRequestMessage = { type: Messages.MessageType.DEV_TOOLS_REQUEST, open: open }; ipc.send(Messages.CHANNEL_NAME, msg); } export function clipboardWrite(text: string): void { const msg: Messages.ClipboardWriteMessage = { type: Messages.MessageType.CLIPBOARD_WRITE, text: text }; ipc.send(Messages.CHANNEL_NAME, msg); } export function clipboardReadRequest(clipboardType=ClipboardType.DEFAULT): void { const msg: Messages.ClipboardReadRequestMessage = { type: Messages.MessageType.CLIPBOARD_READ_REQUEST, clipboardType }; ipc.send(Messages.CHANNEL_NAME, msg); } export function windowCloseRequest(): void { const msg: Messages.WindowCloseRequestMessage = { type: Messages.MessageType.WINDOW_CLOSE_REQUEST }; ipc.send(Messages.CHANNEL_NAME, msg); } export function requestNewTag(): Promise<Messages.NewTagMessage> { const msg: Messages.NewTagRequestMessage = {type: Messages.MessageType.NEW_TAG_REQUEST, async: true}; return <Promise<Messages.NewTagMessage>> request(msg, Messages.MessageType.NEW_TAG); } export function requestNewTagSync(): string { const msg: Messages.NewTagRequestMessage = {type: Messages.MessageType.NEW_TAG_REQUEST, async: false}; const event = <any> ipc.sendSync(Messages.CHANNEL_NAME, msg); const newTagMessage = <Messages.NewTagMessage> event; return newTagMessage.tag; } export function windowMinimizeRequest(): void { const msg: Messages.WindowMinimizeRequestMessage = { type: Messages.MessageType.WINDOW_MINIMIZE_REQUEST }; ipc.send(Messages.CHANNEL_NAME, msg); } export function windowMaximizeRequest(): void { const msg: Messages.WindowMaximizeRequestMessage = { type: Messages.MessageType.WINDOW_MAXIMIZE_REQUEST }; ipc.send(Messages.CHANNEL_NAME, msg); } export function windowShowRequst(): Promise<Messages.WindowShowResponseMessage> { const msg: Messages.WindowShowRequestMessage = { type: Messages.MessageType.WINDOW_SHOW_REQUEST }; return <Promise<Messages.WindowShowResponseMessage>> request(msg, Messages.MessageType.WINDOW_SHOW_RESPONSE); } export function windowReady(): void { const msg: Messages.WindowReadyMessage = { type: Messages.MessageType.WINDOW_READY }; ipc.send(Messages.CHANNEL_NAME, msg); } export function createBulkFileSync(metadata: BulkFileMetadata, size: number): {identifier: BulkFileIdentifier, url: string} { const msg: Messages.BulkFileCreateMessage = {type: Messages.MessageType.BULK_FILE_CREATE, metadata, size}; const event = <any> ipc.sendSync(Messages.CHANNEL_NAME, msg); const createdBulkFileMessage = <Messages.BulkFileCreatedResponseMessage> event; return {identifier: createdBulkFileMessage.identifier, url: createdBulkFileMessage.url}; } export function writeBulkFile(identifier: BulkFileIdentifier, data: Buffer): void { const msg: Messages.BulkFileWriteMessage = {type: Messages.MessageType.BULK_FILE_WRITE, identifier, data}; ipc.send(Messages.CHANNEL_NAME, msg); } export function closeBulkFile(identifier: BulkFileIdentifier, success: boolean): void { const msg: Messages.BulkFileCloseMessage = {type: Messages.MessageType.BULK_FILE_CLOSE, identifier, success}; ipc.send(Messages.CHANNEL_NAME, msg); } export function refBulkFile(identifier: BulkFileIdentifier): void { const msg: Messages.BulkFileRefMessage = {type: Messages.MessageType.BULK_FILE_REF, identifier}; ipc.send(Messages.CHANNEL_NAME, msg); } export function derefBulkFile(identifier: BulkFileIdentifier): void { const msg: Messages.BulkFileDerefMessage = {type: Messages.MessageType.BULK_FILE_DEREF, identifier}; ipc.send(Messages.CHANNEL_NAME, msg); } export function keybindingsRequestRead(name: LogicalKeybindingsName): Promise<Messages.KeybindingsReadMessage> { const msg: Messages.KeybindingsReadRequestMessage = { type: Messages.MessageType.KEYBINDINGS_READ_REQUEST, name }; return <Promise<Messages.KeybindingsReadMessage>> request(msg, Messages.MessageType.KEYBINDINGS_READ); } export function customKeybindingsSetUpdate(customKeybindingsSet: CustomKeybindingsSet): void { const msg: Messages.KeybindingsUpdateMessage = { type: Messages.MessageType.KEYBINDINGS_UPDATE, customKeybindingsSet }; ipc.send(Messages.CHANNEL_NAME, msg); } export function enableGlobalKeybindings(enabled: boolean): void { const msg: Messages.GlobalKeybindingsEnableMessage = {type: Messages.MessageType.GLOBAL_KEYBINDINGS_ENABLE, enabled}; ipc.send(Messages.CHANNEL_NAME, msg); } export function requestTerminalTheme(id: string): Promise<Messages.TerminalThemeMessage> { const msg: Messages.TerminalThemeRequestMessage = { type: Messages.MessageType.TERMINAL_THEME_REQUEST, id }; return <Promise<Messages.TerminalThemeMessage>> request(msg, Messages.MessageType.TERMINAL_THEME); } export function requestQuitApplication(): void { const msg: Messages.QuitApplicationRequestMessage = {type: Messages.MessageType.QUIT_APPLICATION_REQUEST}; ipc.send(Messages.CHANNEL_NAME, msg); } export function newWindow(): void { const msg: Messages.NewWindowMessage = { type: Messages.MessageType.NEW_WINDOW }; ipc.send(Messages.CHANNEL_NAME, msg); } export function commandResponse(uuid: string, result: any, exception: Error): void { const msg: Messages.ExecuteCommandResponseMessage = { type: Messages.MessageType.EXECUTE_COMMAND_RESPONSE, uuid, result, exception: exception?.message }; ipc.send(Messages.CHANNEL_NAME, msg); } export function sendSharedMapEvent(ev: SharedMap.ChangeEvent): void { const msg: Messages.SharedMapEventMessage = { type: Messages.MessageType.SHARED_MAP_EVENT, event: ev }; ipc.send(Messages.CHANNEL_NAME, msg); } export function requestSharedMapDump(): Promise<Messages.SharedMapDumpMessage> { const msg: Messages.SharedMapDumpRequestMessage = { type: Messages.MessageType.SHARED_MAP_DUMP_REQUEST }; return <Promise<Messages.SharedMapDumpMessage>> request(msg, Messages.MessageType.SHARED_MAP_DUMP); }
the_stack
* Defines how to handle the text when it exceeds the element bounds * Wrap - Wraps the text to next line, when it exceeds its bounds * Ellipsis - It truncates the overflown text and represents the clipping with an ellipsis * Clip - It clips the overflow text */ export type TextOverflow = /** Wrap - Wraps the text to next line, when it exceeds its bounds */ 'Wrap' | /** Ellipsis - It truncates the overflown text and represents the clipping with an ellipsis */ 'Ellipsis' | /** Clip - It clips the overflow text */ 'Clip'; /** * Defines how to decorate the text * Overline - Decorates the text with a line above the text * Underline - Decorates the text with an underline * LineThrough - Decorates the text by striking it with a line * None - Text will not have any specific decoration */ export type TextDecoration = /** Overline - Decorates the text with a line above the text */ 'Overline' | /** Underline - Decorates the text with an underline */ 'Underline' | /** LineThrough - Decorates the text by striking it with a line */ 'LineThrough' | /** None - Text will not have any specific decoration */ 'None'; /** * Defines how the text has to be aligned * Left - Aligns the text at the left of the text bounds * Right - Aligns the text at the right of the text bounds * Center - Aligns the text at the center of the text bounds * Justify - Aligns the text in a justified manner */ export type TextAlign = /** Left - Aligns the text at the left of the text bounds */ 'Left' | /** Right - Aligns the text at the right of the text bounds */ 'Right' | /** Center - Aligns the text at the center of the text bounds */ 'Center' | /** Justify - Aligns the text in a justified manner */ 'Justify'; /** * Defines how to wrap the text when it exceeds the element bounds * WrapWithOverflow - Wraps the text so that no word is broken * Wrap - Wraps the text and breaks the word, if necessary * NoWrap - Text will no be wrapped */ export type TextWrap = /** WrapWithOverflow - Wraps the text so that no word is broken */ 'WrapWithOverflow' | /** Wrap - Wraps the text and breaks the word, if necessary */ 'Wrap' | /** NoWrap - Text will no be wrapped */ 'NoWrap'; /** * Defines how the diagram elements have to be aligned with respect to its immediate parent * * Stretch - Stretches the diagram element throughout its immediate parent * * Top - Aligns the diagram element at the top of its immediate parent * * Bottom - Aligns the diagram element at the bottom of its immediate parent * * Center - Aligns the diagram element at the center of its immediate parent * * Auto - Aligns the diagram element based on the characteristics of its immediate parent */ export type VerticalAlignment = /** * Stretch - Stretches the diagram element throughout its immediate parent */ 'Stretch' | /** * Top - Aligns the diagram element at the top of its immediate parent */ 'Top' | /** * Bottom - Aligns the diagram element at the bottom of its immediate parent */ 'Bottom' | /** * Center - Aligns the diagram element at the center of its immediate parent */ 'Center' | /** * Auto - Aligns the diagram element based on the characteristics of its immediate parent */ 'Auto'; /** * Defines how the diagram elements have to be aligned with respect to its immediate parent * * Stretch - Stretches the diagram element throughout its immediate parent * * Left - Aligns the diagram element at the left of its immediate parent * * Right - Aligns the diagram element at the right of its immediate parent * * Center - Aligns the diagram element at the center of its immediate parent * * Auto - Aligns the diagram element based on the characteristics of its immediate parent */ export type HorizontalAlignment = /** * Stretch - Stretches the diagram element throughout its immediate parent */ 'Stretch' | /** * Left - Aligns the diagram element at the left of its immediate parent */ 'Left' | /** * Right - Aligns the diagram element at the right of its immediate parent */ 'Right' | /** * Center - Aligns the diagram element at the center of its immediate parent */ 'Center' | /** * Auto - Aligns the diagram element based on the characteristics of its immediate parent */ 'Auto'; /** * Defines the reference with respect to which the diagram elements have to be aligned * Point - Diagram elements will be aligned with respect to a point * Object - Diagram elements will be aligned with respect to its immediate parent */ export type RelativeMode = /** Point - Diagram elements will be aligned with respect to a point */ 'Point' | /** Object - Diagram elements will be aligned with respect to its immediate parent */ 'Object'; /** * Defines the type of the gradient * Linear - Sets the type of the gradient as Linear * Radial - Sets the type of the gradient as Radial */ export type GradientType = /** None - Sets the type of the gradient as None */ 'None' | /** Linear - Sets the type of the gradient as Linear */ 'Linear' | /** Radial - Sets the type of the gradient as Radial */ 'Radial'; /** * Defines the unit mode * Absolute - Sets the unit mode type as Absolute * Fraction - Sets the unit mode type as Fraction */ export type UnitMode = /** Absolute - Sets the unit mode type as Absolute */ 'Absolute' | /** Fraction - Sets the unit mode type as Fraction */ 'Fraction'; /** * Defines the container/canvas transform * Self - Sets the transform type as Self * Parent - Sets the transform type as Parent */ export enum RotateTransform { /** Self - Sets the transform type as Self */ Self = 1, /** Parent - Sets the transform type as Parent */ Parent = 2 } /** Enables/Disables The element actions * None - Diables all element actions are none * ElementIsPort - Enable element action is port * ElementIsGroup - Enable element action as Group * @private */ export enum ElementAction { /** Disables all element actions are none */ None = 0, /** Enable the element action is Port */ ElementIsPort = 1 << 1, /** Enable the element action as Group */ ElementIsGroup = 1 << 2, } /** * Defines how the annotations have to be aligned with respect to its immediate parent * Center - Aligns the annotation at the center of a connector segment * Before - Aligns the annotation before a connector segment * After - Aligns the annotation after a connector segment */ export type AnnotationAlignment = /** * Center - Aligns the annotation at the center of a connector segment */ 'Center' | /** * Before - Aligns the annotation before a connector segment */ 'Before' | /** * After - Aligns the annotation after a connector segment */ 'After'; /** * Defines the type of the annotation * Shape - Sets the annotation type as Shape * Path - Sets the annotation type as Path */ export type AnnotationTypes = /** * Shape - Sets the annotation type as Shape */ 'Shape' | /** * Path - Sets the annotation type as Path */ 'Path'; /** * Defines the decorator shape of the connector * None - Sets the decorator shape as None * Arrow - Sets the decorator shape as Arrow * Diamond - Sets the decorator shape as Diamond * Butt - Sets the decorator shape as Butt * Path - Sets the decorator shape as Path * OpenArrow - Sets the decorator shape as OpenArrow * Circle - Sets the decorator shape as Circle * Square - Sets the decorator shape as Square * Fletch - Sets the decorator shape as Fletch * OpenFetch - Sets the decorator shape as OpenFetch * IndentedArrow - Sets the decorator shape as Indented Arrow * OutdentedArrow - Sets the decorator shape as Outdented Arrow * DoubleArrow - Sets the decorator shape as DoubleArrow */ export type DecoratorShapes = /** None - Sets the decorator shape as None */ 'None' | /** Arrow - Sets the decorator shape as Arrow */ 'Arrow' | /** Diamond - Sets the decorator shape as Diamond */ 'Diamond' | /** Butt - Sets the decorator shape as Butt */ 'Butt' | /** OpenArrow - Sets the decorator shape as OpenArrow */ 'OpenArrow' | /** Circle - Sets the decorator shape as Circle */ 'Circle' | /** Square - Sets the decorator shape as Square */ 'Square' | /** Fletch - Sets the decorator shape as Fletch */ 'Fletch' | /** OpenFetch - Sets the decorator shape as OpenFetch */ 'OpenFetch' | /** IndentedArrow - Sets the decorator shape as Indented Arrow */ 'IndentedArrow' | /** OutdentedArrow - Sets the decorator shape as Outdented Arrow */ 'OutdentedArrow' | /** DoubleArrow - Sets the decorator shape as DoubleArrow */ 'DoubleArrow' | /** Custom - Sets the decorator shape as Custom */ 'Custom'; /** * Enables/Disables shape of the uml classifier shapes * * Package - Indicates the scope is public. * * Class - Indicates the scope is protected. * * Interface - Indicates the scope is private. * * Enumeration - Indicates the scope is package. * * CollapsedPackage - Indicates the scope is public. * * Inheritance - Indicates the scope is protected. * * Association - Indicates the scope is private. * * Aggregation - Indicates the scope is package. * * Composition - Indicates the scope is public. * * Realization - Indicates the scope is protected. * * DirectedAssociation - Indicates the scope is private. * * Dependency - Indicates the scope is package. */ export type ClassifierShape = 'Class' | 'Interface' | 'Enumeration' | 'Inheritance' | 'Association' | 'Aggregation' | 'Composition' | 'Realization' | 'Dependency'; /** * Defines the direction the uml connectors * * Default - Indicates the direction is Default. * * Directional - Indicates the direction is single Directional. * * BiDirectional - Indicates the direction is BiDirectional. */ export type AssociationFlow = 'Default' | 'Directional' | 'BiDirectional'; /** * Define the Multiplicity of uml connector shapes * * OneToOne - Indicates the connector multiplicity is OneToOne. * * OneToMany - Indicates the connector multiplicity is OneToMany. * * ManyToOne - Indicates the connector multiplicity is ManyToOne. * * ManyToOne - Indicates the connector multiplicity is ManyToOne. */ export type Multiplicity = 'OneToOne' | 'OneToMany' | 'ManyToOne' | 'ManyToOne'; /** * Defines the segment type of the connector * Straight - Sets the segment type as Straight */ export type Segments = /** Straight - Sets the segment type as Straight */ 'Straight'; /** * Defines the constraints to enable/disable certain features of connector. * * None - Interaction of the connectors cannot be done. * * Select - Selects the connector. * * Delete - Delete the connector. * * Drag - Drag the connector. * * DragSourceEnd - Drag the source end of the connector. * * DragTargetEnd - Drag the target end of the connector. * * DragSegmentThump - Drag the segment thumb of the connector. * * AllowDrop - Allow to drop a node. * * Bridging - Creates bridge on intersection of two connectors. * * BridgeObstacle - * * InheritBridging - Creates bridge on intersection of two connectors. * * PointerEvents - Sets the pointer events. * * Tooltip - Displays a tooltip for the connectors. * * InheritToolTip - Displays a tooltip for the connectors. * * Interaction - Features of the connector used for interaction. * * ReadOnly - Enables ReadOnly * * Default - Default features of the connector. * @aspNumberEnum * @IgnoreSingular */ export enum ConnectorConstraints { /** Disable all connector Constraints. */ None = 1 << 0, /** Enables connector to be selected. */ Select = 1 << 1, /** Enables connector to be Deleted. */ Delete = 1 << 2, /** Enables connector to be Dragged. */ Drag = 1 << 3, /** Enables connectors source end to be selected. */ DragSourceEnd = 1 << 4, /** Enables connectors target end to be selected. */ DragTargetEnd = 1 << 5, /** Enables control point and end point of every segment in a connector for editing. */ DragSegmentThumb = 1 << 6, /** Enables AllowDrop constraints to the connector. */ AllowDrop = 1 << 7, /** Enables bridging to the connector. */ Bridging = 1 << 8, /** Enables or Disables Bridge Obstacles with overlapping of connectors. */ BridgeObstacle = 1 << 9, /** Enables bridging to the connector. */ InheritBridging = 1 << 10, /** Used to set the pointer events. */ PointerEvents = 1 << 11, /** Enables or disables tool tip for the connectors */ Tooltip = 1 << 12, /** Enables or disables tool tip for the connectors */ InheritTooltip = 1 << 13, /** Enables Interaction. */ Interaction = 1 << 1 | 1 << 3 | 1 << 4 | 1 << 5 | 1 << 6 | 1 << 12, /** Enables ReadOnly */ ReadOnly = 1 << 14, /** Enables all constraints. */ Default = 1 << 1 | 1 << 2 | 1 << 3 | 1 << 4 | 1 << 5 | 1 << 9 | 1 << 10 | 1 << 11 | 1 << 13 } /** * Defines the objects direction * Left - Sets the direction type as Left * Right - Sets the direction type as Right * Top - Sets the direction type as Top * Bottom - Sets the direction type as Bottom */ export type Direction = /** Left - Sets the direction type as Left */ 'Left' | /** Right - Sets the direction type as Right */ 'Right' | /** Top - Sets the direction type as Top */ 'Top' | /** Bottom - Sets the direction type as Bottom */ 'Bottom'; /** * Defines the orientation of the layout * * TopToBottom - Renders the layout from top to bottom * * BottomToTop - Renders the layout from bottom to top * * LeftToRight - Renders the layout from left to right * * RightToLeft - Renders the layout from right to left */ export type LayoutOrientation = /** * TopToBottom - Renders the layout from top to bottom */ 'TopToBottom' | /** * BottomToTop - Renders the layout from bottom to top */ 'BottomToTop' | /** * LeftToRight - Renders the layout from left to right */ 'LeftToRight' | /** * RightToLeft - Renders the layout from right to left */ 'RightToLeft'; /** * Detect the status of Crud operation performed in the diagram */ export type Status = 'None' | 'New' | 'Update'; /** Enables/Disables the handles of the selector * Rotate - Enable Rotate Thumb * ConnectorSource - Enable Connector source point * ConnectorTarget - Enable Connector target point * ResizeNorthEast - Enable ResizeNorthEast Resize * ResizeEast - Enable ResizeEast Resize * ResizeSouthEast - Enable ResizeSouthEast Resize * ResizeSouth - Enable ResizeSouth Resize * ResizeSouthWest - Enable ResizeSouthWest Resize * ResizeWest - Enable ResizeWest Resize * ResizeNorthWest - Enable ResizeNorthWest Resize * ResizeNorth - Enable ResizeNorth Resize * Default - Enables all constraints * @private */ export enum ThumbsConstraints { /** Enable Rotate Thumb */ Rotate = 1 << 1, /** Enable Connector source point */ ConnectorSource = 1 << 2, /** Enable Connector target point */ ConnectorTarget = 1 << 3, /** Enable ResizeNorthEast Resize */ ResizeNorthEast = 1 << 4, /** Enable ResizeEast Resize */ ResizeEast = 1 << 5, /** Enable ResizeSouthEast Resize */ ResizeSouthEast = 1 << 6, /** Enable ResizeSouth Resize */ ResizeSouth = 1 << 7, /** Enable ResizeSouthWest Resize */ ResizeSouthWest = 1 << 8, /** Enable ResizeWest Resize */ ResizeWest = 1 << 9, /** Enable ResizeNorthWest Resize */ ResizeNorthWest = 1 << 10, /** Enable ResizeNorth Resize */ ResizeNorth = 1 << 11, /** Enables all constraints */ Default = 1 << 1 | 1 << 2 | 1 << 3 | 1 << 4 | 1 << 5 | 1 << 6 | 1 << 7 | 1 << 8 | 1 << 9 | 1 << 10 | 1 << 11, } /** * Defines the visibility of the selector handles * None - Hides all the selector elements * ConnectorSourceThumb - Shows/hides the source thumb of the connector * ConnectorTargetThumb - Shows/hides the target thumb of the connector * ResizeSouthEast - Shows/hides the bottom right resize handle of the selector * ResizeSouthWest - Shows/hides the bottom left resize handle of the selector * ResizeNorthEast - Shows/hides the top right resize handle of the selector * ResizeNorthWest - Shows/hides the top left resize handle of the selector * ResizeEast - Shows/hides the middle right resize handle of the selector * ResizeWest - Shows/hides the middle left resize handle of the selector * ResizeSouth - Shows/hides the bottom center resize handle of the selector * ResizeNorth - Shows/hides the top center resize handle of the selector * Rotate - Shows/hides the rotate handle of the selector * UserHandles - Shows/hides the user handles of the selector * Resize - Shows/hides all resize handles of the selector * @aspNumberEnum * @IgnoreSingular */ export enum SelectorConstraints { /** Hides all the selector elements */ None = 1 << 0, /** Shows/hides the source thumb of the connector */ ConnectorSourceThumb = 1 << 1, /** Shows/hides the target thumb of the connector */ ConnectorTargetThumb = 1 << 2, /** Shows/hides the bottom right resize handle of the selector */ ResizeSouthEast = 1 << 3, /** Shows/hides the bottom left resize handle of the selector */ ResizeSouthWest = 1 << 4, /** Shows/hides the top right resize handle of the selector */ ResizeNorthEast = 1 << 5, /** Shows/hides the top left resize handle of the selector */ ResizeNorthWest = 1 << 6, /** Shows/hides the middle right resize handle of the selector */ ResizeEast = 1 << 7, /** Shows/hides the middle left resize handle of the selector */ ResizeWest = 1 << 8, /** Shows/hides the bottom center resize handle of the selector */ ResizeSouth = 1 << 9, /** Shows/hides the top center resize handle of the selector */ ResizeNorth = 1 << 10, /** Shows/hides the rotate handle of the selector */ Rotate = 1 << 11, /** Shows/hides the user handles of the selector */ UserHandle = 1 << 12, /** Shows/hides the default tooltip of nodes and connectors */ ToolTip = 1 << 13, /** Shows/hides all resize handles of the selector */ ResizeAll = ResizeSouthEast | ResizeSouthWest | ResizeNorthEast | ResizeNorthWest | ResizeEast | ResizeWest | ResizeSouth | ResizeNorth | ConnectorSourceThumb | ConnectorTargetThumb, /** Shows all handles of the selector */ All = ResizeAll | UserHandle | Rotate | ToolTip } /** * Defines how to handle the empty space and empty lines of a text * PreserveAll - Preserves all empty spaces and empty lines * CollapseSpace - Collapses the consequent spaces into one * CollapseAll - Collapses all consequent empty spaces and empty lines */ export type WhiteSpace = /** PreserveAll - Preserves all empty spaces and empty lines */ 'PreserveAll' | /** CollapseSpace - Collapses the consequent spaces into one */ 'CollapseSpace' | /** CollapseAll - Collapses all consequent empty spaces and empty lines */ 'CollapseAll'; /** @private */ export enum NoOfSegments { Zero, One, Two, Three, Four, Five } /** * None - Alignment value will be set as none * XMinYMin - smallest X value of the view port and smallest Y value of the view port * XMidYMin - midpoint X value of the view port and smallest Y value of the view port * XMaxYMin - maximum X value of the view port and smallest Y value of the view port * XMinYMid - smallest X value of the view port and midpoint Y value of the view port * XMidYMid - midpoint X value of the view port and midpoint Y value of the view port * XMaxYMid - maximum X value of the view port and midpoint Y value of the view port * XMinYMax - smallest X value of the view port and maximum Y value of the view port * XMidYMax - midpoint X value of the view port and maximum Y value of the view port * XMaxYMax - maximum X value of the view port and maximum Y value of the view port */ //collection of alignment values for the image export type ImageAlignment = /** None - Alignment value will be set as none */ 'None' | /** XMinYMin - smallest X value of the view port and smallest Y value of the view port */ 'XMinYMin' | /** XMidYMin - midpoint X value of the view port and smallest Y value of the view port */ 'XMidYMin' | /** XMaxYMin - maximum X value of the view port and smallest Y value of the view port */ 'XMaxYMin' | /** XMinYMid - smallest X value of the view port and midpoint Y value of the view port */ 'XMinYMid' | /** XMidYMid - midpoint X value of the view port and midpoint Y value of the view port */ 'XMidYMid' | /** XMaxYMid - maximum X value of the view port and midpoint Y value of the view port */ 'XMaxYMid' | /** XMinYMax - smallest X value of the view port and maximum Y value of the view port */ 'XMinYMax' | /** XMidYMax - midpoint X value of the view port and maximum Y value of the view port */ 'XMidYMax' | /** XMaxYMax - maximum X value of the view port and maximum Y value of the view port */ 'XMaxYMax'; /** * Defines the diagrams stretch * None - Sets the stretch type for diagram as None * Stretch - Sets the stretch type for diagram as Stretch * Meet - Sets the stretch type for diagram as Meet * Slice - Sets the stretch type for diagram as Slice */ export type Stretch = /** None - Sets the stretch type for diagram as None */ 'None' | /** Stretch - Sets the stretch type for diagram as Stretch */ 'Stretch' | /** Meet - Sets the stretch type for diagram as Meet */ 'Meet' | /** Slice - Sets the stretch type for diagram as Slice */ 'Slice'; /** * None - Scale value will be set as None for the image * Meet - Scale value Meet will be set for the image * Slice - Scale value Slice will be set for the image */ // collections of scale values for the image export type Scale = /** None - Scale value will be set as None for the image */ 'None' | /** Meet - Scale value Meet will be set for the image */ 'Meet' | /** Slice - Scale value Slice will be set for the image */ 'Slice';
the_stack
import assert = require('assert'); import path = require('path'); import os = require('os'); import process = require('process'); import fs = require('fs'); import * as ttm from 'azure-pipelines-task-lib/mock-test'; describe('JenkinsDownloadArtifacts L0 Suite', function () { this.timeout(parseInt(process.env.TASK_TEST_TIMEOUT) || 20000); before((done) => { process.env['ENDPOINT_AUTH_ID1'] = '{\"scheme\":\"UsernamePassword\", \"parameters\": {\"username\": \"uname\", \"password\": \"pword\"}}'; process.env['ENDPOINT_AUTH_PARAMETER_ID1_USERNAME'] = 'uname'; process.env['ENDPOINT_AUTH_PARAMETER_ID1_PASSWORD'] = 'pword'; process.env['ENDPOINT_URL_ID1'] = 'bogusURL'; done(); }); /* tslint:disable:no-empty */ after(function () { }); /* tslint:enable:no-empty */ it('run JenkinsDownloadArtifacts with no server endpoint', (done) => { const tp: string = path.join(__dirname, 'L0NoServerEndpoint.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdOutContained('Input required: serverEndpoint')); assert(tr.failed, 'task should have failed'); done(); //assert(tr.ran(gradleWrapper + ' build'), 'it should have run gradlew build'); //assert(tr.invokedToolCount === 1, 'should have only run gradle 1 time'); //assert(tr.stderr.length === 0, 'should not have written to stderr'); //assert(tr.succeeded, 'task should have succeeded'); //assert(tr.stdout.indexOf('GRADLE_OPTS is now set to -Xmx2048m') > 0); } catch (err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('run JenkinsDownloadArtifacts with no save to', (done) => { const tp: string = path.join(__dirname, 'L0NoSaveTo.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdOutContained('Input required: saveTo'), 'should have written to stderr'); assert(tr.failed, 'task should have failed'); done(); } catch (err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('run JenkinsDownloadArtifacts with no job name', (done) => { const tp: string = path.join(__dirname, 'L0NoJobName.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdOutContained('Input required: jobName'), 'should have written to stderr'); assert(tr.failed, 'task should have failed'); done(); } catch (err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should download commits from legacy project build', (done) => { const tp: string = path.join(__dirname, 'L0ShouldDownloadCommitsFromLegacyProjectBuild.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf("GettingCommitsFromSingleBuild") !== -1, "Failed to fetch commits from single build"); assert(tr.stdout.indexOf('20/api/json?tree=number,result,actions[remoteUrls],changeSet[kind,items[commitId,date,msg,author[fullName]]]') !== -1, "API parameter to fetch commits have changed"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should download commits from single build', (done) => { const tp: string = path.join(__dirname, 'L0DownloadCommitsFromSingleBuild.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf("GettingCommitsFromSingleBuild") !== -1, "Failed to fetch commits from single build"); assert(tr.stdout.indexOf('20/api/json?tree=number,result,actions[remoteUrls],changeSet[kind,items[commitId,date,msg,author[fullName]]]') !== -1, "API parameter to fetch commits have changed"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Validate github commit url', (done) => { const tp: string = path.join(__dirname, 'L0ValidateGitHubCommitUrl.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('Translated url git@github.com:user/TestRepo.git/commit/3cbfc14e3f482a25e5122323f3273b89677d9875 to https://github.com/user/TestRepo/commit/3cbfc14e3f482a25e5122323f3273b89677d9875') !== -1, tr.stdout); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Validate gitlab commit url', (done) => { const tp: string = path.join(__dirname, 'L0ValidateGitLabCommitUrl.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('Translated url git@gitlab.com:admin/projectk.git/commit/3cbfc14e3f482a25e5122323f3273b89677d9875 to https://gitlab.com/admin/projectk/commit/3cbfc14e3f482a25e5122323f3273b89677d9875') !== -1, tr.stdout); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Validate bitbucket commit url', (done) => { const tp: string = path.join(__dirname, 'L0ValidateBitBucketCommitUrl.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('Translated url http://bitbucket.org/commits/3cbfc14e3f482a25e5122323f3273b89677d9875 after fixing the query path based on the provider') !== -1, tr.stdout); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Validate http commit url', (done) => { const tp: string = path.join(__dirname, 'L0ValidateHttpCommitUrl.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('Translated url https://github.com/user/TestRepo/commit/3cbfc14e3f482a25e5122323f3273b89677d9875 to https://github.com/user/TestRepo/commit/3cbfc14e3f482a25e5122323f3273b89677d9875') !== -1, tr.stdout); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Validate invalid commit url', (done) => { const tp: string = path.join(__dirname, 'L0ValidateInvalidCommitUrl.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('Translated url ssh://user@server/project.git/commit/3cbfc14e3f482a25e5122323f3273b89677d9875 to') !== -1, tr.stdout); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should download commits from build range', (done) => { const tp: string = path.join(__dirname, 'L0DownloadCommitsFromBuildRange.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('FoundBuildIndex') !== -1, "Failed to find the build index"); assert(tr.stdout.indexOf('api/json?tree=builds[number,result,actions[remoteUrls],changeSet[kind,items[commitId,date,msg,author[fullName]]]]{2,4}') !== -1 , "API parameter to fetch commits range have changed"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should download rollback commits', (done) => { const tp: string = path.join(__dirname, 'L0RollbackCommitsShouldBeDownloaded.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('FoundBuildIndex') !== -1, "Failed to find the build index"); assert(tr.stdout.indexOf('api/json?tree=builds[number,result,actions[remoteUrls],changeSet[kind,items[commitId,date,msg,author[fullName]]]]{2,4}') !== -1 , "API parameter to fetch commits range have changed"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('No commits should be downloaded if both the jobId is same', (done) => { const tp: string = path.join(__dirname, 'L0NoCommitsShouldBeDownloaded.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('FoundBuildIndex') === -1, "Should not try to find the build range"); assert(tr.stdout.indexOf('changeSet[kind,items[commitId,date,msg,author[fullName]]]') === -1 , "Should not call jenkins api to fetch commits"); assert(tr.stdout.indexOf('JenkinsNoCommitsToFetch') !== -1, "No commits should be downloaded"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('run JenkinsDownloadArtifacts for propagated artifacts with Artifact Provider not as Azure Storage', (done) => { const tp: string = path.join(__dirname, 'L0UnkownArtifactProvider.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try{ tr.run(); assert(tr.stdOutContained('loc_mock_ArtifactProviderNotSupported'), tr.stderr); assert(tr.failed, 'task should have failed'); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('run JenkinsDownloadArtifacts for propagated artifacts with no azure server endpoint', (done) => { const tp: string = path.join(__dirname, 'L0NoAzureEndpointFailure.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try{ tr.run(); assert(tr.stdOutContained('Input required: ConnectedServiceNameARM')); assert(tr.failed, 'task should have failed'); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('run JenkinsDownloadArtifacts for propagated artifacts should run successfully', (done) => { const tp: string = path.join(__dirname, 'L0DownloadArtifactsFromAzureStorage.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try{ tr.run(); assert(tr.stdout.indexOf('loc_mock_ArtifactSuccessfullyDownloaded') !== -1, tr.stdout); assert(tr.succeeded, 'task should have succedded.'); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Job type should be fetched even if its mentioned in the task input', (done) => { const tp: string = path.join(__dirname, 'L0JobTypeShouldAlwaysBeFetched.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('Trying to get job type') !== -1, "Should try to find the job type"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should fail if invalid buildId mentioned for MultiBranch job type', (done) => { const tp: string = path.join(__dirname, 'L0ShouldFailIfInvalidBuildIdMentionedForMultiBranch.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('InvalidBuildId') !== -1, tr.stdout); assert(tr.failed, 'task should have failed'); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should fail if invalid buildId mentioned for Freestyle job type', (done) => { const tp: string = path.join(__dirname, 'L0ShouldFailIfInvalidBuildIdMentionedForFreeStyleJob.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.stdout.indexOf('InvalidBuildId') !== -1, tr.stdout); assert(tr.failed, 'task should have failed'); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should find jobId and branchName if its multibranch pipeline project', (done) => { const tp: string = path.join(__dirname, 'L0ShouldCorrectlyDetectMultiBranchPipelineProject.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); let expectedMessage: string = "Found Jenkins job details jobName:multibranchproject, jobType:org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProject, buildId:20, IsMultiBranchPipeline:true, MultiBranchPipelineName:mybranch"; assert(tr.stdout.indexOf(expectedMessage) !== -1, "Should correctly find the jobId and branchName if its multibranch project"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should find jobId and branchName if its freestyle pipeline project', (done) => { const tp: string = path.join(__dirname, 'L0ShouldCorrectlyDetectFreeStyleProject.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); let expectedMessage: string = "Found Jenkins job details jobName:myfreestyleproject, jobType:hudson.model.FreeStyleProject, buildId:10, IsMultiBranchPipeline:false, MultiBranchPipelineName:undefined"; assert(tr.stdout.indexOf(expectedMessage) !== -1, "Should correctly find the jobId if its freestyle project"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should fetch the LastSuccesful build correctly when its Freestyle project', (done) => { const tp: string = path.join(__dirname, 'L0ShouldCorrectlyDetectLatestBuildForFreeStyleProject.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); let expectedMessage: string = "Found Jenkins job details jobName:myfreestyleproject, jobType:hudson.model.FreeStyleProject, buildId:100, IsMultiBranchPipeline:false, MultiBranchPipelineName:undefined"; assert(tr.stdout.indexOf(expectedMessage) !== -1, "Should correctly find the Latest jobId if its freestyle project"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should fetch the LastSuccesful build correctly when its MultiBranch Pipeline project', (done) => { const tp: string = path.join(__dirname, 'L0ShouldCorrectlyDetectLatestBuildForMultiBranchPipelineProject.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); let expectedMessage: string = "Found Jenkins job details jobName:mymultibranchproject, jobType:org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProject, buildId:200, IsMultiBranchPipeline:true, MultiBranchPipelineName:branch1"; assert(tr.stdout.indexOf(expectedMessage) !== -1, "Should correctly find the Latest jobId if its multibranch project"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should have the correct url when downloading commits from multibranch pipeline project', (done) => { const tp: string = path.join(__dirname, 'L0FindingBuildRangeShouldHaveCorrectUrlIfItsMultiBranchPipelineProject.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); let expectedFindingBuildIndexApi: string = "http://url/job/testmultibranchproject//job/master/api/json?tree=allBuilds[number]"; assert(tr.stdout.indexOf(expectedFindingBuildIndexApi) !== -1, "Should correctly find the build range when its multibranch project"); let expectedDownloadCommitsApi: string = "http://url/job/testmultibranchproject//job/master/api/json?tree=builds[number,result,actions[remoteUrls],changeSet[kind,items[commitId,date,msg,author[fullName]]]]{2,4}"; assert(tr.stdout.indexOf(expectedDownloadCommitsApi) !== -1 , "API to download multibranch pipeline job's commits is not correct"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should throw if the start and end builds are from different branch in multibranch pipeline project', (done) => { const tp: string = path.join(__dirname, 'L0ShouldThrowIfBuildsAreFromDifferentBranchInMultiBranchProject.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); assert(tr.succeeded, 'task should not have failed'); // We don't fail the task if the downloading commits failed assert(tr.stdout.indexOf('CommitsAndWorkItemsDownloadFailed') !== -1, "Download Commits should have failed") let buildIndexApi: string = "http://url/job/testmultibranchproject//job/master/api/json?tree=allBuilds[number]"; assert(tr.stdout.indexOf(buildIndexApi) === -1, "Should not try to find the build range"); let downloadCommitsApi: string = "tree=builds[number,result,actions[remoteUrls],changeSet[kind,items[commitId,date,msg,author[fullName]]]]{2,4}"; assert(tr.stdout.indexOf(downloadCommitsApi) === -1 , "Should not try to download the commits"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should have the correct url if the job is under folder', (done) => { const tp: string = path.join(__dirname, 'L0FolderJobShouldHaveCorrectUrl.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); let expectedMessage: string = "Found Jenkins job details jobName:folder1/folder2/testmultibranchproject, jobType:org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProject, buildId:20, IsMultiBranchPipeline:true, MultiBranchPipelineName:master"; assert(tr.stdout.indexOf(expectedMessage) != -1, "Should correctly find the Latest job is inside a folder"); let buildIndexApi: string = "http://url/job/folder1/job/folder2/job/testmultibranchproject//job/master/20/api/json?tree=number,result,actions[remoteUrls],changeSet[kind,items[commitId,date,msg,author[fullName]]]"; assert(tr.stdout.indexOf(buildIndexApi) != -1, "Url for folder job should be correct"); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); it('Should retry if JenkinsClient encounters an error', (done) => { const tp: string = path.join(__dirname, 'L0ShouldRetryCorrectlyWhenErrorHappens.js'); const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp); try { tr.run(); let expectedMessage: string = "RetryingOperation DownloadJsonContent 1"; assert(tr.stdout.indexOf(expectedMessage) != -1, tr.stdout); done(); } catch(err) { console.log(tr.stdout); console.log(tr.stderr); console.log(err); done(err); } }); });
the_stack
import 'jest-playwright-preset'; import { join } from 'path'; import { ImageSnapshotConfigurator, ImageSnapshotThresholdConfig, MultiBrowserImageSnapshotThresholds } from './helpers/visu/image-snapshot-config'; import { PageTester } from './helpers/visu/bpmn-page-utils'; import { clickOnButton, getContainerCenter, mousePanning, mouseZoom, Point } from './helpers/test-utils'; import { overlayEdgePositionValues, overlayShapePositionValues } from '../helpers/overlays'; import { OverlayEdgePosition, OverlayPosition, OverlayShapePosition } from '../../src/component/registry'; import { ensureIsArray } from '../../src/component/helpers/array-utils'; class ImageSnapshotThresholds extends MultiBrowserImageSnapshotThresholds { constructor() { super({ chromium: 0.000005, firefox: 0.0004, webkit: 0 }); } getChromiumThresholds(): Map<string, ImageSnapshotThresholdConfig> { // if no dedicated information, set minimal threshold to make test pass on Github Workflow // linux threshold are set for Ubuntu return new Map<string, ImageSnapshotThresholdConfig>([ [ // also use for navigation tests 'overlays.start.flow.task.gateway', { macos: 0.001, // max 0.09371109158465839% windows: 0.0015, // max 0.11569306287013695% }, ], [ 'overlays.edges.associations.complex.paths', { linux: 0.0012, // 0.11107953745526089% macos: 0.0011, // max 0.10450139836080119% windows: 0.001, // max 0.09070811014064706% }, ], [ 'overlays.edges.message.flows.complex.paths', { macos: 0.002, // 0.19008048545454678% windows: 0.0002, // 0.017926897177755752% }, ], [ 'overlays.edges.sequence.flows.complex.paths', { linux: 0.0008, // max 0.07684414701126795% macos: 0.0007, // max 0.06361357947514133% windows: 0.0006, // max 0.05408166283820126% }, ], ]); } getFirefoxThresholds(): Map<string, ImageSnapshotThresholdConfig> { return new Map<string, ImageSnapshotThresholdConfig>([ [ // also use for navigation tests 'overlays.start.flow.task.gateway', { linux: 0.0044, // max 0.43536497668036356% macos: 0.0071, // max 0.7027949859673144% windows: 0.0027, // max 0.26051371171855736% }, ], [ 'overlays.edges.associations.complex.paths', { linux: 0.0012, // max 0.11544442258832888% macos: 0.0029, // max 0.2883299813273288% windows: 0.0038, // max 0.37867717015809266% }, ], [ 'overlays.edges.message.flows.complex.paths', { linux: 0.0032, // 0.29508961424412616% macos: 0.004, // 0.36434716534193834% windows: 0.004, // 0.37268987984115926% }, ], [ 'overlays.edges.sequence.flows.complex.paths', { linux: 0.0014, // max 0.13950493094400107% macos: 0.0027, // max 0.26624249108074816% windows: 0.0026, // max 0.25710970853788373% }, ], ]); } protected getWebkitThresholds(): Map<string, ImageSnapshotThresholdConfig> { return new Map<string, ImageSnapshotThresholdConfig>([ // also use for navigation tests [ 'overlays.start.flow.task.gateway', { macos: 0.0059, // max 0.5852809894618671% }, ], [ 'overlays.edges.associations.complex.paths', { macos: 0.0035, // max 0.3442305874630902% }, ], [ 'overlays.edges.message.flows.complex.paths', { macos: 0.0028, // max 0.2624477963090066% }, ], [ 'overlays.edges.sequence.flows.complex.paths', { macos: 0.0011, // max 0.10016873792552117% }, ], ]); } } async function addOverlays(bpmnElementIds: string | string[], positions: OverlayPosition | OverlayPosition[]): Promise<void> { positions = ensureIsArray<OverlayPosition>(positions); for (const bpmnElementId of ensureIsArray<string>(bpmnElementIds)) { await page.fill('#bpmn-id-input', bpmnElementId); for (const position of positions) { await clickOnButton(position); } } } async function addStylingOverlay(bpmnElementIds: string[], style: string): Promise<void> { for (const bpmnElementId of bpmnElementIds) { await page.fill('#bpmn-id-input', bpmnElementId); await clickOnButton(style); } } async function removeAllOverlays(bpmnElementId: string): Promise<void> { await page.fill('#bpmn-id-input', bpmnElementId); await clickOnButton('clear'); } const imageSnapshotConfigurator = new ImageSnapshotConfigurator(new ImageSnapshotThresholds(), 'overlays'); // to have mouse pointer visible during headless test - add 'showMousePointer: true' as parameter const pageTester = new PageTester({ pageFileName: 'overlays', expectedPageTitle: 'BPMN Visualization - Overlays' }); describe('BPMN Shapes with overlays', () => { const bpmnDiagramName = 'overlays.start.flow.task.gateway'; function getShapeDir(dir: string): string { return join(dir, `on.shape`); } it.each(overlayShapePositionValues)(`add overlay on StartEvent, Gateway and Task on %s`, async (position: OverlayShapePosition) => { await pageTester.loadBPMNDiagramInRefreshedPage(bpmnDiagramName); await addOverlays(['StartEvent_1', 'Activity_1', 'Gateway_1'], position); const image = await page.screenshot({ fullPage: true }); const config = imageSnapshotConfigurator.getConfig(bpmnDiagramName); expect(image).toMatchImageSnapshot({ ...config, customSnapshotIdentifier: `add.overlay.on.position.${position}`, customSnapshotsDir: getShapeDir(config.customSnapshotsDir), customDiffDir: getShapeDir(config.customDiffDir), }); }); it(`remove all overlays of Shape`, async () => { await pageTester.loadBPMNDiagramInRefreshedPage(bpmnDiagramName); await addOverlays('Activity_1', ['top-left', 'bottom-left', 'middle-right']); await removeAllOverlays('Activity_1'); const image = await page.screenshot({ fullPage: true }); const config = imageSnapshotConfigurator.getConfig(bpmnDiagramName); expect(image).toMatchImageSnapshot({ ...config, customSnapshotIdentifier: 'remove.all.overlays.of.shape', customSnapshotsDir: getShapeDir(config.customSnapshotsDir), customDiffDir: getShapeDir(config.customDiffDir), }); }); }); describe('BPMN Edges with overlays', () => { describe.each([ ['overlays.edges.associations.complex.paths', 'association', ['Association_1opueuo', 'Association_0n43f9f', 'Association_01t0kyz']], [ 'overlays.edges.message.flows.complex.paths', 'message', [ // incoming and outgoing flows of the 2 pools starting from the right 'Flow_0skfnol', 'Flow_0ssridu', 'Flow_0s4cl7e', 'Flow_0zz7yh1', // flows in the middle of the diagram 'Flow_0vsaa9d', 'Flow_17olevz', 'Flow_0qhtw2k', // flows on the right 'Flow_0mmisr0', 'Flow_1l8ze06', ], ], ['overlays.edges.sequence.flows.complex.paths', 'sequence', ['Flow_039xs1c', 'Flow_0m2ldux', 'Flow_1r3oti3', 'Flow_1byeukq']], ])('diagram %s', (bpmnDiagramName: string, edgeKind: string, bpmnElementIds: string[]) => { function getEdgeDir(dir: string): string { return join(dir, `on.edge`); } function getEdgePositionDir(dir: string, position: OverlayEdgePosition): string { return join(getEdgeDir(dir), `on-position-${position}`); } it.each(overlayEdgePositionValues)(`add overlay on ${edgeKind} flow on %s`, async (position: OverlayEdgePosition) => { await pageTester.loadBPMNDiagramInRefreshedPage(bpmnDiagramName); await addOverlays(bpmnElementIds, position); const image = await page.screenshot({ fullPage: true }); const config = imageSnapshotConfigurator.getConfig(bpmnDiagramName); expect(image).toMatchImageSnapshot({ ...config, customSnapshotIdentifier: `add.overlay.on.${edgeKind}.flow`, customSnapshotsDir: getEdgePositionDir(config.customSnapshotsDir, position), customDiffDir: getEdgePositionDir(config.customDiffDir, position), }); }); it(`remove all overlays of ${edgeKind} flow`, async () => { await pageTester.loadBPMNDiagramInRefreshedPage(bpmnDiagramName); const id = bpmnElementIds.shift(); await addOverlays(id, ['start', 'middle', 'end']); await removeAllOverlays(id); const image = await page.screenshot({ fullPage: true }); const config = imageSnapshotConfigurator.getConfig(bpmnDiagramName); expect(image).toMatchImageSnapshot({ ...config, customSnapshotIdentifier: `remove.all.overlays.of.${edgeKind}.flow`, customSnapshotsDir: getEdgeDir(config.customSnapshotsDir), customDiffDir: getEdgeDir(config.customDiffDir), }); }); }); }); describe('Overlay navigation', () => { const bpmnDiagramName = 'overlays.start.flow.task.gateway'; let containerCenter: Point; beforeEach(async () => { const bpmnContainerElementHandle = await pageTester.loadBPMNDiagramInRefreshedPage(bpmnDiagramName); containerCenter = await getContainerCenter(bpmnContainerElementHandle); await addOverlays('StartEvent_1', 'bottom-center'); await addOverlays('Activity_1', 'middle-right'); await addOverlays('Gateway_1', 'top-right'); await addOverlays('Flow_1', 'start'); }); it('panning', async () => { await mousePanning({ originPoint: containerCenter, destinationPoint: { x: containerCenter.x + 150, y: containerCenter.y + 40 } }); const image = await page.screenshot({ fullPage: true }); const config = imageSnapshotConfigurator.getConfig(bpmnDiagramName); expect(image).toMatchImageSnapshot({ ...config, customSnapshotIdentifier: 'panning', }); }); it(`zoom out`, async () => { await mouseZoom(1, { x: containerCenter.x + 200, y: containerCenter.y }, 100); const image = await page.screenshot({ fullPage: true }); const config = imageSnapshotConfigurator.getConfig(bpmnDiagramName); expect(image).toMatchImageSnapshot({ ...config, customSnapshotIdentifier: 'zoom.out', }); }); }); describe('Overlay style', () => { const bpmnDiagramName = 'overlays.start.flow.task.gateway'; const snapshotPath = 'with.custom.style'; // Configure thresholds by types of overlay styles - we use the same bpmn diagram in all tests class OverlayStylesImageSnapshotThresholds extends MultiBrowserImageSnapshotThresholds { constructor() { // don't set defaults as we defined thresholds for all style variants super({ chromium: 0, firefox: 0, webkit: 0 }); } getChromiumThresholds(): Map<string, ImageSnapshotThresholdConfig> { // if no dedicated information, set minimal threshold to make test pass on Github Workflow // linux threshold are set for Ubuntu return new Map<string, ImageSnapshotThresholdConfig>([ [ 'fill', { linux: 0.000005, // 0.00041653196235502676% macos: 0.0002, // 0.015144311713777281% windows: 0.0003, // 0.021176489211183203% }, ], [ 'font', { linux: 0.00001, // 0.0003248438377401186% macos: 0.006, // 0.5500536579274629% windows: 0.0033, // 0.3214844457131605% }, ], [ 'stroke', { linux: 0.000005, // 0.00041653196235502676% macos: 0.0018, // 0.1787779478926499% windows: 0.0022, // 0.21848079010937665% }, ], ]); } getFirefoxThresholds(): Map<string, ImageSnapshotThresholdConfig> { return new Map<string, ImageSnapshotThresholdConfig>([ [ 'fill', { linux: 0.0016, // 0.15729572870969433 macos: 0.0038, // 0.3723534417182983% windows: 0.0036, // 0.03575426016920735% }, ], [ // TODO very large thresholds on Firefox linux/macOS for font overlay styles 'font', { linux: 0.0125, // 1.2469257287752389% macos: 0.0256, // 2.550083023327932% windows: 0.0088, // 0.8742520092084982% }, ], [ 'stroke', { linux: 0.0013, // 0.1259742349527526% macos: 0.0036, // 0.35056620525392157% windows: 0.0024, // 0.23796610634385656% }, ], ]); } protected getWebkitThresholds(): Map<string, ImageSnapshotThresholdConfig> { return new Map<string, ImageSnapshotThresholdConfig>([ [ 'fill', { macos: 0.0017, // 0.16625642718750555% }, ], [ 'font', { macos: 0.007, // 0.6973031829030196% }, ], [ 'stroke', { macos: 0.0033, // 0.3243565433802331% }, ], ]); } } const imageSnapshotConfigurator = new ImageSnapshotConfigurator(new OverlayStylesImageSnapshotThresholds(), 'overlays'); it.each(['fill', 'font', 'stroke'])(`add overlay with custom %s`, async (style: string) => { await pageTester.loadBPMNDiagramInRefreshedPage(bpmnDiagramName); await addStylingOverlay(['StartEvent_1', 'Activity_1', 'Gateway_1', 'Flow_1'], style); const image = await page.screenshot({ fullPage: true }); const config = imageSnapshotConfigurator.getConfig(style); expect(image).toMatchImageSnapshot({ ...config, customSnapshotIdentifier: `add.overlay.with.custom.${style}`, customSnapshotsDir: join(config.customSnapshotsDir, snapshotPath), customDiffDir: join(config.customDiffDir, snapshotPath), }); }); });
the_stack
// //https://github.com/vadimg/js_bintrees // //Copyright (C) 2011 by Vadim Graboys // //Permission is hereby granted, free of charge, to any person obtaining a copy //of this software and associated documentation files (the "Software"), to deal //in the Software without restriction, including without limitation the rights //to use, copy, modify, merge, publish, distribute, sublicense, and/or sell //copies of the Software, and to permit persons to whom the Software is //furnished to do so, subject to the following conditions: // //The above copyright notice and this permission notice shall be included in //all copies or substantial portions of the Software. // //THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR //IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, //FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE //AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER //LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, //OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN //THE SOFTWARE. export class TreeBase { _root; size; _comparator; // removes all nodes from the tree clear() { this._root = null; this.size = 0; }; // returns node data if found, null otherwise find(data) { var res = this._root; while (res !== null) { var c = this._comparator(data, res.data); if (c === 0) { return res.data; } else { res = res.get_child(c > 0); } } return null; }; // returns iterator to node if found, null otherwise findIter = function (data) { var res = this._root; var iter = this.iterator(); while (res !== null) { var c = this._comparator(data, res.data); if (c === 0) { iter._cursor = res; return iter; } else { iter._ancestors.push(res); res = res.get_child(c > 0); } } return null; }; // Returns an interator to the tree node immediately before (or at) the element lowerBound(data) { return this._bound(data, this._comparator); }; // Returns an interator to the tree node immediately after (or at) the element upperBound(data) { var cmp = this._comparator; function reverse_cmp(a, b) { return cmp(b, a); } return this._bound(data, reverse_cmp); }; // returns null if tree is empty min() { var res = this._root; if (res === null) { return null; } while (res.left !== null) { res = res.left; } return res.data; }; // returns null if tree is empty max() { var res = this._root; if (res === null) { return null; } while (res.right !== null) { res = res.right; } return res.data; }; // returns a null iterator // call next() or prev() to point to an element iterator(): Iterator { return new Iterator(this); }; // calls cb on each node's data, in order each(cb) { var it = this.iterator(), data; while ((data = it.next()) !== null) { cb(data); } }; // calls cb on each node's data, in reverse order reach(cb) { var it = this.iterator(), data; while ((data = it.prev()) !== null) { cb(data); } }; // used for lowerBound and upperBound _bound(data, cmp) { var cur = this._root; var iter = this.iterator(); while (cur !== null) { var c = this._comparator(data, cur.data); if (c === 0) { iter._cursor = cur; return iter; } iter._ancestors.push(cur); cur = cur.get_child(c > 0); } for (var i = iter._ancestors.length - 1; i >= 0; --i) { cur = iter._ancestors[i]; if (cmp(data, cur.data) > 0) { iter._cursor = cur; iter._ancestors.length = i; return iter; } } iter._ancestors.length = 0; return iter; }; } export class Iterator { _tree; _ancestors; _cursor; constructor(tree) { this._tree = tree; this._ancestors = []; this._cursor = null; } data() { return this._cursor !== null ? this._cursor.data : null; }; // if null-iterator, returns first node // otherwise, returns next node next() { if (this._cursor === null) { var root = this._tree._root; if (root !== null) { this._minNode(root); } } else { if (this._cursor.right === null) { // no greater node in subtree, go up to parent // if coming from a right child, continue up the stack var save; do { save = this._cursor; if (this._ancestors.length) { this._cursor = this._ancestors.pop(); } else { this._cursor = null; break; } } while (this._cursor.right === save); } else { // get the next node from the subtree this._ancestors.push(this._cursor); this._minNode(this._cursor.right); } } return this._cursor !== null ? this._cursor.data : null; }; // if null-iterator, returns last node // otherwise, returns previous node prev() { if (this._cursor === null) { var root = this._tree._root; if (root !== null) { this._maxNode(root); } } else { if (this._cursor.left === null) { var save; do { save = this._cursor; if (this._ancestors.length) { this._cursor = this._ancestors.pop(); } else { this._cursor = null; break; } } while (this._cursor.left === save); } else { this._ancestors.push(this._cursor); this._maxNode(this._cursor.left); } } return this._cursor !== null ? this._cursor.data : null; }; _minNode(start) { while (start.left !== null) { this._ancestors.push(start); start = start.left; } this._cursor = start; }; _maxNode(start) { while (start.right !== null) { this._ancestors.push(start); start = start.right; } this._cursor = start; }; } class Node { data; left; right; red; constructor(data) { this.data = data; this.left = null; this.right = null; this.red = true; } get_child(dir) { return dir ? this.right : this.left; }; set_child(dir, val) { if (dir) { this.right = val; } else { this.left = val; } }; } export class RBTree<T> extends TreeBase { _root; _comparator; size; constructor(comparator: (a: T, b: T) => number) { super(); this._root = null; this._comparator = comparator; this.size = 0; } // returns true if inserted, false if duplicate insert(data) { var ret = false; if (this._root === null) { // empty tree this._root = new Node(data); ret = true; this.size++; } else { var head = new Node(undefined); // fake tree root var dir = false; var last = false; // setup var gp = null; // grandparent var ggp = head; // grand-grand-parent var p = null; // parent var node = this._root; ggp.right = this._root; // search down while (true) { if (node === null) { // insert new node at the bottom node = new Node(data); p.set_child(dir, node); ret = true; this.size++; } else if (RBTree.is_red(node.left) && RBTree.is_red(node.right)) { // color flip node.red = true; node.left.red = false; node.right.red = false; } // fix red violation if (RBTree.is_red(node) && RBTree.is_red(p)) { var dir2 = ggp.right === gp; if (node === p.get_child(last)) { ggp.set_child(dir2, RBTree.single_rotate(gp, !last)); } else { ggp.set_child(dir2, RBTree.double_rotate(gp, !last)); } } var cmp = this._comparator(node.data, data); // stop if found if (cmp === 0) { break; } last = dir; dir = cmp < 0; // update helpers if (gp !== null) { ggp = gp; } gp = p; p = node; node = node.get_child(dir); } // update root this._root = head.right; } // make root black this._root.red = false; return ret; }; // returns true if removed, false if not found remove(data) { if (this._root === null) { return false; } var head = new Node(undefined); // fake tree root var node = head; node.right = this._root; var p = null; // parent var gp = null; // grand parent var found = null; // found item var dir = true; while (node.get_child(dir) !== null) { var last = dir; // update helpers gp = p; p = node; node = node.get_child(dir); var cmp = this._comparator(data, node.data); dir = cmp > 0; // save found node if (cmp === 0) { found = node; } // push the red node down if (!RBTree.is_red(node) && !RBTree.is_red(node.get_child(dir))) { if (RBTree.is_red(node.get_child(!dir))) { var sr = RBTree.single_rotate(node, dir); p.set_child(last, sr); p = sr; } else if (!RBTree.is_red(node.get_child(!dir))) { var sibling = p.get_child(!last); if (sibling !== null) { if (!RBTree.is_red(sibling.get_child(!last)) && !RBTree.is_red(sibling.get_child(last))) { // color flip p.red = false; sibling.red = true; node.red = true; } else { var dir2 = gp.right === p; if (RBTree.is_red(sibling.get_child(last))) { gp.set_child(dir2, RBTree.double_rotate(p, last)); } else if (RBTree.is_red(sibling.get_child(!last))) { gp.set_child(dir2, RBTree.single_rotate(p, last)); } // ensure correct coloring var gpc = gp.get_child(dir2); gpc.red = true; node.red = true; gpc.left.red = false; gpc.right.red = false; } } } } } // replace and remove if found if (found !== null) { found.data = node.data; p.set_child(p.right === node, node.get_child(node.left === null)); this.size--; } // update root and make it black this._root = head.right; if (this._root !== null) { this._root.red = false; } return found !== null; }; static is_red(node) { return node !== null && node.red; } static single_rotate(root, dir) { var save = root.get_child(!dir); root.set_child(!dir, save.get_child(dir)); save.set_child(dir, root); root.red = true; save.red = false; return save; } static double_rotate(root, dir) { root.set_child(!dir, RBTree.single_rotate(root.get_child(!dir), !dir)); return RBTree.single_rotate(root, dir); } }
the_stack
import { JupyterFrontEnd } from '@jupyterlab/application'; import { MainAreaWidget } from '@jupyterlab/apputils'; import { nullTranslator, TranslationBundle } from '@jupyterlab/translation'; import { LabIcon, copyIcon } from '@jupyterlab/ui-components'; import { Menu } from '@lumino/widgets'; import type * as CodeMirror from 'codemirror'; import type * as lsProtocol from 'vscode-languageserver-protocol'; import diagnosticsSvg from '../../../style/icons/diagnostics.svg'; import { CodeDiagnostics as LSPDiagnosticsSettings } from '../../_diagnostics'; import { LSPConnection } from '../../connection'; import { PositionConverter } from '../../converter'; import { CodeMirrorIntegration } from '../../editor_integration/codemirror'; import { FeatureSettings } from '../../feature'; import { DiagnosticSeverity, DiagnosticTag } from '../../lsp'; import { IEditorPosition, IVirtualPosition } from '../../positioning'; import { DefaultMap, uris_equal } from '../../utils'; import { CodeMirrorVirtualEditor } from '../../virtual/codemirror_editor'; import { VirtualDocument } from '../../virtual/document'; import { jumpToIcon } from '../jump_to'; import { DIAGNOSTICS_LISTING_CLASS, DiagnosticsDatabase, DiagnosticsListing, IDiagnosticsRow, IEditorDiagnostic } from './listing'; export const diagnosticsIcon = new LabIcon({ name: 'lsp:diagnostics', svgstr: diagnosticsSvg }); const CMD_COLUMN_VISIBILITY = 'lsp-set-column-visibility'; const CMD_JUMP_TO_DIAGNOSTIC = 'lsp-jump-to-diagnostic'; const CMD_COPY_DIAGNOSTIC = 'lsp-copy-diagnostic'; const CMD_IGNORE_DIAGNOSTIC_CODE = 'lsp-ignore-diagnostic-code'; const CMD_IGNORE_DIAGNOSTIC_MSG = 'lsp-ignore-diagnostic-message'; /** * Escape pattern to form a base of a regular expression. * The snippet comes from https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#Escaping * and is in the Public Domain (CC0): * > Any copyright is dedicated to the Public Domain. * > http://creativecommons.org/publicdomain/zero/1.0/ */ function escapeRegExp(string: string) { return string.replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&'); } class DiagnosticsPanel { private _content: DiagnosticsListing | null = null; private _widget: MainAreaWidget<DiagnosticsListing> | null = null; feature: DiagnosticsCM; is_registered = false; trans: TranslationBundle; constructor(trans: TranslationBundle) { this.trans = trans; } get widget() { if (this._widget == null || this._widget.content.model == null) { if (this._widget && !this._widget.isDisposed) { this._widget.dispose(); } this._widget = this.initWidget(); } return this._widget; } get content() { return this.widget.content; } protected initWidget() { this._content = new DiagnosticsListing( new DiagnosticsListing.Model(this.trans) ); this._content.model.diagnostics = new DiagnosticsDatabase(); this._content.addClass('lsp-diagnostics-panel-content'); const widget = new MainAreaWidget({ content: this._content }); widget.id = 'lsp-diagnostics-panel'; widget.title.label = this.trans.__('Diagnostics Panel'); widget.title.closable = true; widget.title.icon = diagnosticsIcon; return widget; } update() { // if not attached, do not bother to update if (!this.widget.isAttached) { return; } this.widget.content.update(); } register(app: JupyterFrontEnd) { const widget = this.widget; let get_column = (id: string) => { // TODO: a hashmap in the panel itself? for (let column of widget.content.columns) { if (column.id === id) { return column; } } return undefined; }; /** Columns Menu **/ let columns_menu = new Menu({ commands: app.commands }); columns_menu.title.label = this.trans.__('Panel columns'); app.commands.addCommand(CMD_COLUMN_VISIBILITY, { execute: args => { let column = get_column(args['id'] as string)!; column.is_visible = !column.is_visible; widget.update(); }, label: args => this.trans.__(args['id'] as string), isToggled: args => { let column = get_column(args['id'] as string); return column ? column.is_visible : false; } }); for (let column of widget.content.columns) { columns_menu.addItem({ command: CMD_COLUMN_VISIBILITY, args: { id: column.id } }); } app.contextMenu.addItem({ selector: '.' + DIAGNOSTICS_LISTING_CLASS + ' th', submenu: columns_menu, type: 'submenu' }); /** Diagnostics Menu **/ let ignore_diagnostics_menu = new Menu({ commands: app.commands }); ignore_diagnostics_menu.title.label = this.trans.__( 'Ignore diagnostics like this' ); let get_row = (): IDiagnosticsRow | undefined => { let tr = app.contextMenuHitTest( node => node.tagName.toLowerCase() == 'tr' ); if (!tr) { return; } return this.widget.content.get_diagnostic(tr.dataset.key!); }; ignore_diagnostics_menu.addItem({ command: CMD_IGNORE_DIAGNOSTIC_CODE }); ignore_diagnostics_menu.addItem({ command: CMD_IGNORE_DIAGNOSTIC_MSG }); app.commands.addCommand(CMD_IGNORE_DIAGNOSTIC_CODE, { execute: () => { const row = get_row(); if (!row) { console.warn( 'LPS: diagnostics row not found for ignore code execute()' ); return; } const diagnostic = row.data.diagnostic; let current = this.content.model.settings.composite.ignoreCodes; this.content.model.settings.set('ignoreCodes', [ ...current, diagnostic.code ]); this.feature.refreshDiagnostics(); }, isVisible: () => { const row = get_row(); if (!row) { return false; } const diagnostic = row.data.diagnostic; return !!diagnostic.code; }, label: () => { const row = get_row(); if (!row) { return ''; } const diagnostic = row.data.diagnostic; return this.trans.__( 'Ignore diagnostics with "%1" code', diagnostic.code ); } }); app.commands.addCommand(CMD_IGNORE_DIAGNOSTIC_MSG, { execute: () => { const row = get_row(); if (!row) { console.warn( 'LPS: diagnostics row not found for ignore message execute()' ); return; } const diagnostic = row.data.diagnostic; let current = this.content.model.settings.composite.ignoreMessagesPatterns; this.content.model.settings.set('ignoreMessagesPatterns', [ ...current, escapeRegExp(diagnostic.message) ]); this.feature.refreshDiagnostics(); }, isVisible: () => { const row = get_row(); if (!row) { return false; } const diagnostic = row.data.diagnostic; return !!diagnostic.message; }, label: () => { const row = get_row(); if (!row) { return ''; } const diagnostic = row.data.diagnostic; return this.trans.__( 'Ignore diagnostics with "%1" message', diagnostic.message ); } }); app.commands.addCommand(CMD_JUMP_TO_DIAGNOSTIC, { execute: () => { const row = get_row(); if (!row) { console.warn('LPS: diagnostics row not found for jump execute()'); return; } this.widget.content.jump_to(row); }, label: this.trans.__('Jump to location'), icon: jumpToIcon }); app.commands.addCommand(CMD_COPY_DIAGNOSTIC, { execute: () => { const row = get_row(); if (!row) { console.warn('LPS: diagnostics row not found for copy execute()'); return; } const message = row.data.diagnostic.message; navigator.clipboard .writeText(message) .then(() => { this.content.model.status_message.set( this.trans.__('Successfully copied "%1" to clipboard', message) ); }) .catch(() => { console.warn( 'Could not copy with clipboard.writeText interface, falling back' ); window.prompt( this.trans.__( 'Your browser protects clipboard from write operations; please copy the message manually' ), message ); }); }, label: this.trans.__("Copy diagnostics' message"), icon: copyIcon }); app.contextMenu.addItem({ selector: '.' + DIAGNOSTICS_LISTING_CLASS + ' tbody tr', command: CMD_COPY_DIAGNOSTIC }); app.contextMenu.addItem({ selector: '.' + DIAGNOSTICS_LISTING_CLASS + ' tbody tr', command: CMD_JUMP_TO_DIAGNOSTIC }); app.contextMenu.addItem({ selector: '.' + DIAGNOSTICS_LISTING_CLASS + ' tbody tr', submenu: ignore_diagnostics_menu, type: 'submenu' }); this.is_registered = true; } } export const diagnostics_panel = new DiagnosticsPanel( nullTranslator.load('jupyterlab_lsp') ); export const diagnostics_databases = new WeakMap< CodeMirrorVirtualEditor, DiagnosticsDatabase >(); export class DiagnosticsCM extends CodeMirrorIntegration { private last_response: lsProtocol.PublishDiagnosticsParams; get settings() { return super.settings as FeatureSettings<LSPDiagnosticsSettings>; } register(): void { // this.connection_handlers.set('diagnostic', this.handleDiagnostic); // TODO: unregister this.connection.serverNotifications[ 'textDocument/publishDiagnostics' ].connect(this.handleDiagnostic); this.wrapper_handlers.set('focusin', this.switchDiagnosticsPanelSource); this.unique_editor_ids = new DefaultMap(() => this.unique_editor_ids.size); this.settings.changed.connect(this.refreshDiagnostics, this); this.adapter.adapterConnected.connect(() => this.switchDiagnosticsPanelSource() ); this.virtual_document.foreign_document_closed.connect( (document, context) => { this.clearDocumentDiagnostics(context.foreign_document); } ); super.register(); } clearDocumentDiagnostics(document: VirtualDocument) { this.diagnostics_db.set(document, []); } private unique_editor_ids: DefaultMap<CodeMirror.Editor, number>; private marked_diagnostics: Map<string, CodeMirror.TextMarker> = new Map(); /** * Allows access to the most recent diagnostics in context of the editor. * * One can use VirtualEditorForNotebook.find_cell_by_editor() to find * the corresponding cell in notebook. * Can be used to implement a Panel showing diagnostics list. * * Maps virtual_document.uri to IEditorDiagnostic[]. */ public get diagnostics_db(): DiagnosticsDatabase { // Note that virtual_editor can change at runtime (kernel restart) if (!diagnostics_databases.has(this.virtual_editor)) { diagnostics_databases.set(this.virtual_editor, new DiagnosticsDatabase()); } return diagnostics_databases.get(this.virtual_editor)!; } switchDiagnosticsPanelSource = () => { diagnostics_panel.trans = this.adapter.trans; if ( diagnostics_panel.content.model.virtual_editor === this.virtual_editor && diagnostics_panel.content.model.diagnostics == this.diagnostics_db ) { return; } diagnostics_panel.content.model.diagnostics = this.diagnostics_db; diagnostics_panel.content.model.virtual_editor = this.virtual_editor; diagnostics_panel.content.model.adapter = this.adapter; diagnostics_panel.content.model.settings = this.settings; diagnostics_panel.content.model.status_message = this.status_message; diagnostics_panel.feature = this; diagnostics_panel.update(); }; protected collapseOverlappingDiagnostics( diagnostics: lsProtocol.Diagnostic[] ): Map<lsProtocol.Range, lsProtocol.Diagnostic[]> { // because Range is not a primitive type, the equality of the objects having // the same parameters won't be compared (thus considered equal) in Map. // instead, a intermediate step of mapping through a stringified representation of Range is needed: // an alternative would be using nested [start line][start character][end line][end character] structure, // which would increase the code complexity, but reduce memory use and may be slightly faster. type RangeID = string; const range_id_to_range = new Map<RangeID, lsProtocol.Range>(); const range_id_to_diagnostics = new Map<RangeID, lsProtocol.Diagnostic[]>(); function get_range_id(range: lsProtocol.Range): RangeID { return ( range.start.line + ',' + range.start.character + ',' + range.end.line + ',' + range.end.character ); } diagnostics.forEach((diagnostic: lsProtocol.Diagnostic) => { let range = diagnostic.range; let range_id = get_range_id(range); range_id_to_range.set(range_id, range); if (range_id_to_diagnostics.has(range_id)) { let ranges_list = range_id_to_diagnostics.get(range_id)!; ranges_list.push(diagnostic); } else { range_id_to_diagnostics.set(range_id, [diagnostic]); } }); let map = new Map<lsProtocol.Range, lsProtocol.Diagnostic[]>(); range_id_to_diagnostics.forEach( (range_diagnostics: lsProtocol.Diagnostic[], range_id: RangeID) => { let range = range_id_to_range.get(range_id)!; map.set(range, range_diagnostics); } ); return map; } get defaultSeverity(): lsProtocol.DiagnosticSeverity { return DiagnosticSeverity[this.settings.composite.defaultSeverity]; } private filterDiagnostics( diagnostics: lsProtocol.Diagnostic[] ): lsProtocol.Diagnostic[] { const ignoredDiagnosticsCodes = new Set( this.settings.composite.ignoreCodes ); const ignoredMessagesRegExp = this.settings.composite.ignoreMessagesPatterns.map( pattern => new RegExp(pattern) ); return diagnostics.filter(diagnostic => { let code = diagnostic.code; if ( typeof code !== 'undefined' && // pygls servers return code null if value is missing (rather than undefined) // which is a departure from the LSP specs: https://microsoft.github.io/language-server-protocol/specification#diagnostic // there is an open issue: https://github.com/openlawlibrary/pygls/issues/124 // and PR: https://github.com/openlawlibrary/pygls/pull/132 // this also affects hover tooltips. code !== null && ignoredDiagnosticsCodes.has(code.toString()) ) { return false; } let message = diagnostic.message; if ( message && ignoredMessagesRegExp.some(pattern => pattern.test(message)) ) { return false; } return true; }); } setDiagnostics(response: lsProtocol.PublishDiagnosticsParams) { let diagnostics_list: IEditorDiagnostic[] = []; // Note: no deep equal for Sets or Maps in JS const markers_to_retain: Set<string> = new Set(); // add new markers, keep track of the added ones // TODO: test case for severity class always being set, even if diagnostic has no severity let diagnostics_by_range = this.collapseOverlappingDiagnostics( this.filterDiagnostics(response.diagnostics) ); diagnostics_by_range.forEach( (diagnostics: lsProtocol.Diagnostic[], range: lsProtocol.Range) => { const start = PositionConverter.lsp_to_cm( range.start ) as IVirtualPosition; const end = PositionConverter.lsp_to_cm(range.end) as IVirtualPosition; const last_line_number = this.virtual_document.last_virtual_line - this.virtual_document.blank_lines_between_cells; if (start.line > last_line_number) { this.console.log( `Out of range diagnostic (${start.line} line > ${last_line_number}) was skipped `, diagnostics ); return; } else { let last_line = this.virtual_document.last_line; if (start.line == last_line_number && start.ch > last_line.length) { this.console.log( `Out of range diagnostic (${start.ch} character > ${last_line.length} at line ${last_line_number}) was skipped `, diagnostics ); return; } } let document: VirtualDocument; try { // assuming that we got a response for this document let start_in_root = this.transform_virtual_position_to_root_position(start); document = this.virtual_editor.document_at_root_position(start_in_root); } catch (e) { this.console.warn( `Could not place inspections from ${response.uri}`, ` inspections: `, diagnostics, 'error: ', e ); return; } // This may happen if the response came delayed // and the user already changed the document so // that now this regions is in another virtual document! if (this.virtual_document !== document) { this.console.log( `Ignoring inspections from ${response.uri}`, ` (this region is covered by a another virtual document: ${document.uri})`, ` inspections: `, diagnostics ); return; } if ( document.virtual_lines .get(start.line)! .skip_inspect.indexOf(document.id_path) !== -1 ) { this.console.log( 'Ignoring inspections silenced for this document:', diagnostics ); return; } let ce_editor = document.get_editor_at_virtual_line(start); let cm_editor = this.virtual_editor.ce_editor_to_cm_editor.get(ce_editor)!; let start_in_editor = document.transform_virtual_to_editor(start); let end_in_editor: IEditorPosition | null; if (start_in_editor === null) { this.console.warn( 'Start in editor could not be be determined for', diagnostics ); return; } // some servers return strange positions for ends try { end_in_editor = document.transform_virtual_to_editor(end); } catch (err) { this.console.warn('Malformed range for diagnostic', end); end_in_editor = { ...start_in_editor, ch: start_in_editor.ch + 1 }; } if (end_in_editor === null) { this.console.warn( 'End in editor could not be be determined for', diagnostics ); return; } let range_in_editor = { start: start_in_editor, end: end_in_editor }; // what a pity there is no hash in the standard library... // we could use this: https://stackoverflow.com/a/7616484 though it may not be worth it: // the stringified diagnostic objects are only about 100-200 JS characters anyway, // depending on the message length; this could be reduced using some structure-aware // stringifier; such a stringifier could also prevent the possibility of having a false // negative due to a different ordering of keys // obviously, the hash would prevent recovery of info from the key. let diagnostic_hash = JSON.stringify({ // diagnostics without ranges diagnostics: diagnostics.map(diagnostic => [ diagnostic.severity, diagnostic.message, diagnostic.code, diagnostic.source, diagnostic.relatedInformation ]), // the apparent marker position will change in the notebook with every line change for each marker // after the (inserted/removed) line - but such markers should not be invalidated, // i.e. the invalidation should be performed in the cell space, not in the notebook coordinate space, // thus we transform the coordinates and keep the cell id in the hash range: range_in_editor, editor: this.unique_editor_ids.get(cm_editor) }); for (let diagnostic of diagnostics) { diagnostics_list.push({ diagnostic, editor: cm_editor, range: range_in_editor }); } markers_to_retain.add(diagnostic_hash); if (!this.marked_diagnostics.has(diagnostic_hash)) { const highestSeverityCode = diagnostics .map(diagnostic => diagnostic.severity || this.defaultSeverity) .sort()[0]; const severity = DiagnosticSeverity[highestSeverityCode]; const classNames = [ 'cm-lsp-diagnostic', 'cm-lsp-diagnostic-' + severity ]; const tags: lsProtocol.DiagnosticTag[] = []; for (let diagnostic of diagnostics) { if (diagnostic.tags) { tags.push(...diagnostic.tags); } } for (const tag of new Set(tags)) { classNames.push('cm-lsp-diagnostic-tag-' + DiagnosticTag[tag]); } let options: CodeMirror.TextMarkerOptions = { title: diagnostics .map(d => d.message + (d.source ? ' (' + d.source + ')' : '')) .join('\n'), className: classNames.join(' ') }; let marker; try { marker = cm_editor .getDoc() .markText(start_in_editor, end_in_editor, options); } catch (e) { this.console.warn( 'Marking inspection (diagnostic text) failed:', diagnostics, e ); return; } this.marked_diagnostics.set(diagnostic_hash, marker); } } ); // remove the markers which were not included in the new message this.removeUnusedDiagnosticMarkers(markers_to_retain); this.diagnostics_db.set(this.virtual_document, diagnostics_list); } public handleDiagnostic = ( connection: LSPConnection, response: lsProtocol.PublishDiagnosticsParams ) => { // use optional chaining operator because the diagnostics message may come late (after the document was disposed) if (!uris_equal(response.uri, this.virtual_document?.document_info?.uri)) { return; } if (this.virtual_document.last_virtual_line === 0) { return; } /* TODO: gutters */ try { this.last_response = response; this.setDiagnostics(response); diagnostics_panel.update(); } catch (e) { this.console.warn(e); } }; public refreshDiagnostics() { if (this.last_response) { this.setDiagnostics(this.last_response); } diagnostics_panel.update(); } protected removeUnusedDiagnosticMarkers(to_retain: Set<string>) { this.marked_diagnostics.forEach( (marker: CodeMirror.TextMarker, diagnostic_hash: string) => { if (!to_retain.has(diagnostic_hash)) { this.marked_diagnostics.delete(diagnostic_hash); marker.clear(); } } ); } remove(): void { this.settings.changed.disconnect(this.refreshDiagnostics, this); // remove all markers this.removeUnusedDiagnosticMarkers(new Set()); this.diagnostics_db.clear(); diagnostics_databases.delete(this.virtual_editor); this.unique_editor_ids.clear(); if ( diagnostics_panel.content.model.virtual_editor === this.virtual_editor ) { diagnostics_panel.content.model.virtual_editor = null; diagnostics_panel.content.model.diagnostics = null; diagnostics_panel.content.model.adapter = null; } diagnostics_panel.update(); super.remove(); } }
the_stack
import * as XMLJS from "xml-js"; import { Attribute, Characteristic, Characteristics, CompoundPredicate, DataDictionary, False, FieldName, MiningField, Model, Output, OutputField, PMML, PMML2XML, Predicate, Scorecard, SimplePredicate, True, XML2PMML, } from "@kogito-tooling/pmml-editor-marshaller"; import { SCORE_CARD_BASIC_COMPLEX_PARTIAL_SCORE, SCORE_CARD_COMPOUND_PREDICATE, SCORE_CARD_NESTED_COMPLEX_PARTIAL_SCORE, SCORE_CARD_NESTED_COMPOUND_PREDICATE, SCORE_CARD_PROTOTYPES, SCORE_CARD_SIMPLE_PREDICATE, SCORE_CARD_SIMPLE_PREDICATE_SINGLE, } from "./TestData_ScoreCards"; describe("Scorecard tests", () => { type PredicateAssertion = (predicate: Predicate) => void; test("Scorecard::DataDictionary", () => { const pmml: PMML = XML2PMML(SCORE_CARD_SIMPLE_PREDICATE); expect(pmml).not.toBeNull(); const dataDictionary: DataDictionary = pmml.DataDictionary; expect(dataDictionary.DataField.length).toBe(3); expect(dataDictionary.DataField[0].name).toBe("input1"); expect(dataDictionary.DataField[0].dataType).toBe("double"); expect(dataDictionary.DataField[0].optype).toBe("continuous"); expect(dataDictionary.DataField[1].name).toBe("input2"); expect(dataDictionary.DataField[1].dataType).toBe("double"); expect(dataDictionary.DataField[1].optype).toBe("continuous"); expect(dataDictionary.DataField[2].name).toBe("score"); expect(dataDictionary.DataField[2].dataType).toBe("double"); expect(dataDictionary.DataField[2].optype).toBe("continuous"); }); test("Scorecard::Models", () => { const pmml: PMML = XML2PMML(SCORE_CARD_SIMPLE_PREDICATE); expect(pmml).not.toBeNull(); expect(pmml.models).not.toBeUndefined(); const models: Model[] = pmml.models ?? []; expect(models.length).toBe(1); const model: Model = models[0]; expect(model).toBeInstanceOf(Scorecard); const scorecard: Scorecard = model as Scorecard; expect(scorecard.modelName).toBe("SimpleScorecard"); expect(scorecard.functionName).toBe("regression"); expect(scorecard.useReasonCodes).toBeTruthy(); expect(scorecard.reasonCodeAlgorithm).toBe("pointsBelow"); expect(scorecard.initialScore).toBe(5); expect(scorecard.baselineMethod).toBe("other"); expect(scorecard.baselineScore).toBe(6); }); test("Scorecard::Models::No modelName", () => { const xml: string = `<PMML xmlns="http://www.dmg.org/PMML-4_4" version="4.4"> <Header/> <DataDictionary/> <Scorecard/> </PMML>`; const pmml: PMML = XML2PMML(xml); expect(pmml).not.toBeNull(); expect(pmml.models).not.toBeUndefined(); const models: Model[] = pmml.models ?? []; expect(models.length).toBe(1); const model: Model = models[0]; expect(model).toBeInstanceOf(Scorecard); const scorecard: Scorecard = model as Scorecard; expect(scorecard.modelName).toBeUndefined(); }); test("Scorecard::MiningSchema", () => { const pmml: PMML = XML2PMML(SCORE_CARD_SIMPLE_PREDICATE); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; expect(scorecard.MiningSchema.MiningField.length).toBe(3); const miningFields: MiningField[] = scorecard.MiningSchema.MiningField; expect(miningFields[0].name).toBe("input1"); expect(miningFields[0].usageType).toBe("active"); expect(miningFields[0].invalidValueTreatment).toBe("asMissing"); expect(miningFields[1].name).toBe("input2"); expect(miningFields[1].usageType).toBe("active"); expect(miningFields[1].invalidValueTreatment).toBe("asMissing"); expect(miningFields[2].name).toBe("score"); expect(miningFields[2].usageType).toBe("target"); expect(miningFields[2].invalidValueTreatment).toBeUndefined(); }); test("Scorecard::Output", () => { const pmml: PMML = XML2PMML(SCORE_CARD_SIMPLE_PREDICATE); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; expect(scorecard.Output?.OutputField.length).toBe(3); const outputFields: OutputField[] = scorecard.Output?.OutputField as OutputField[]; expect(outputFields[0].name).toBe("Score"); expect(outputFields[0].feature).toBe("predictedValue"); expect(outputFields[0].dataType).toBe("double"); expect(outputFields[0].optype).toBe("continuous"); expect(outputFields[1].name).toBe("Reason Code 1"); expect(outputFields[1].rank).toBe(1); expect(outputFields[1].feature).toBe("reasonCode"); expect(outputFields[1].dataType).toBe("string"); expect(outputFields[1].optype).toBe("categorical"); expect(outputFields[2].name).toBe("Reason Code 2"); expect(outputFields[2].rank).toBe(2); expect(outputFields[2].feature).toBe("reasonCode"); expect(outputFields[2].dataType).toBe("string"); expect(outputFields[2].optype).toBe("categorical"); }); test("Scorecard::Characteristics", () => { const pmml: PMML = XML2PMML(SCORE_CARD_SIMPLE_PREDICATE); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; expect(scorecard.Characteristics.Characteristic.length).toBe(2); const characteristics: Characteristic[] = scorecard.Characteristics?.Characteristic as Characteristic[]; expect(characteristics[0].name).toBe("input1Score"); expect(characteristics[0].baselineScore).toBe(4); expect(characteristics[0].reasonCode).toBe("Input1ReasonCode"); expect(characteristics[1].name).toBe("input2Score"); expect(characteristics[1].baselineScore).toBe(8); expect(characteristics[1].reasonCode).toBe("Input2ReasonCode"); }); test("Scorecard::Characteristics:Attributes", () => { const pmml: PMML = XML2PMML(SCORE_CARD_SIMPLE_PREDICATE); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; const characteristics: Characteristic[] = scorecard.Characteristics?.Characteristic as Characteristic[]; expect(characteristics[0].Attribute?.length).toBe(2); expect(characteristics[0].Attribute[0].partialScore).toBe(-12); expect(characteristics[0].Attribute[1].partialScore).toBe(50); expect(characteristics[1].Attribute?.length).toBe(2); expect(characteristics[1].Attribute[0].partialScore).toBe(-8); expect(characteristics[1].Attribute[1].partialScore).toBe(32); }); test("Scorecard::Characteristics:Attributes::SimplePredicate", () => { const pmml: PMML = XML2PMML(SCORE_CARD_SIMPLE_PREDICATE); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; const characteristics: Characteristic[] = scorecard.Characteristics?.Characteristic as Characteristic[]; const characteristic0Attributes: Attribute[] = characteristics[0].Attribute as Attribute[]; const characteristic1Attributes: Attribute[] = characteristics[1].Attribute as Attribute[]; assertSimplePredicate(characteristic0Attributes[0].predicate, { field: "input1" as FieldName, operator: "lessOrEqual", value: "10", }); assertSimplePredicate(characteristic0Attributes[1].predicate, { field: "input1" as FieldName, operator: "greaterThan", value: "10", }); assertSimplePredicate(characteristic1Attributes[0].predicate, { field: "input2" as FieldName, operator: "lessOrEqual", value: "-5", }); assertSimplePredicate(characteristic1Attributes[1].predicate, { field: "input2" as FieldName, operator: "greaterThan", value: "-5", }); }); test("Scorecard::Characteristics:Attributes::SimplePredicate::Single", () => { const pmml: PMML = XML2PMML(SCORE_CARD_SIMPLE_PREDICATE_SINGLE); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; const characteristics: Characteristic[] = scorecard.Characteristics?.Characteristic as Characteristic[]; const characteristic0Attributes: Attribute[] = characteristics[0].Attribute as Attribute[]; assertSimplePredicate(characteristic0Attributes[0].predicate, { field: "input1" as FieldName, operator: "lessOrEqual", value: "10", }); //Check round-trip const xml: string = PMML2XML(pmml); expect(xml).not.toBeNull(); const pmml2: PMML = XML2PMML(xml); expect(pmml).toEqual(pmml2); }); test("Scorecard::Characteristics:Attributes::CompoundPredicate", () => { const pmml: PMML = XML2PMML(SCORE_CARD_COMPOUND_PREDICATE); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; expect(pmml.DataDictionary.DataField.length).toBe(5); expect(scorecard.MiningSchema.MiningField.length).toBe(5); expect(scorecard.Output?.OutputField.length).toBe(4); expect(scorecard.Characteristics.Characteristic.length).toBe(3); const characteristics: Characteristic[] = scorecard.Characteristics?.Characteristic as Characteristic[]; const characteristic0: Characteristic = characteristics[0]; const characteristic1: Characteristic = characteristics[1]; const characteristic2: Characteristic = characteristics[2]; //Characteristics expect(characteristic0.name).toBe("characteristic1Score"); expect(characteristic0.baselineScore).toBe(-5.5); expect(characteristic0.reasonCode).toBe("characteristic1ReasonCode"); expect(characteristic1.name).toBe("characteristic2Score"); expect(characteristic1.baselineScore).toBe(11); expect(characteristic1.reasonCode).toBe("characteristic2ReasonCode"); expect(characteristic2.name).toBe("characteristic3Score"); expect(characteristic2.baselineScore).toBe(25); expect(characteristic2.reasonCode).toBe("characteristic3ReasonCode"); const characteristic0Attributes: Attribute[] = characteristics[0].Attribute as Attribute[]; const characteristic1Attributes: Attribute[] = characteristics[1].Attribute as Attribute[]; const characteristic2Attributes: Attribute[] = characteristics[2].Attribute as Attribute[]; //Characteristic 0, Attributes expect(characteristic0Attributes.length).toBe(3); expect(characteristic0Attributes[2].predicate).toBeInstanceOf(True); //Characteristic 0, Attribute 0 assertCompoundPredicate(characteristic0Attributes[0].predicate, { booleanOperator: "and" }, [ (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input1" as FieldName, operator: "lessOrEqual", value: "-5", }); }, (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input2" as FieldName, operator: "lessOrEqual", value: "-5", }); }, ]); //Characteristic 0, Attribute 1 assertCompoundPredicate(characteristic0Attributes[1].predicate, { booleanOperator: "and" }, [ (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input1" as FieldName, operator: "greaterThan", value: "-5", }); }, (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input2" as FieldName, operator: "greaterThan", value: "-5", }); }, ]); //Characteristic 1, Attributes expect(characteristic1Attributes.length).toBe(4); expect(characteristic1Attributes[2].predicate).toBeInstanceOf(False); expect(characteristic1Attributes[3].predicate).toBeInstanceOf(True); //Characteristic 1, Attribute 0 assertCompoundPredicate(characteristic1Attributes[0].predicate, { booleanOperator: "or" }, [ (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input3" as FieldName, operator: "equal", value: "classA", }); }, (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input4" as FieldName, operator: "equal", value: "classA", }); }, ]); //Characteristic 1, Attribute 1 assertCompoundPredicate(characteristic1Attributes[1].predicate, { booleanOperator: "or" }, [ (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input3" as FieldName, operator: "equal", value: "classB", }); }, (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input4" as FieldName, operator: "equal", value: "classB", }); }, ]); //Characteristic 2, Attributes expect(characteristic2Attributes.length).toBe(2); expect(characteristic2Attributes[1].predicate).toBeInstanceOf(True); //Characteristic 2, Attribute 0 assertCompoundPredicate(characteristic2Attributes[0].predicate, { booleanOperator: "xor" }, [ (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input3" as FieldName, operator: "equal", value: "classA", }); }, (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input4" as FieldName, operator: "equal", value: "classA", }); }, ]); }); test("Scorecard::Characteristics:Attributes::NestedCompoundPredicate", () => { const pmml: PMML = XML2PMML(SCORE_CARD_NESTED_COMPOUND_PREDICATE); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; expect(pmml.DataDictionary.DataField.length).toBe(3); expect(scorecard.MiningSchema.MiningField.length).toBe(3); expect(scorecard.Output?.OutputField.length).toBe(3); expect(scorecard.Characteristics.Characteristic.length).toBe(2); const characteristics: Characteristic[] = scorecard.Characteristics?.Characteristic as Characteristic[]; const characteristic0: Characteristic = characteristics[0]; const characteristic1: Characteristic = characteristics[1]; //Characteristics expect(characteristic0.name).toBe("characteristic1Score"); expect(characteristic0.baselineScore).toBe(21.8); expect(characteristic0.reasonCode).toBe("characteristic1ReasonCode"); expect(characteristic1.name).toBe("characteristic2Score"); expect(characteristic1.baselineScore).toBe(11); expect(characteristic1.reasonCode).toBe("characteristic2ReasonCode"); const characteristic0Attributes: Attribute[] = characteristics[0].Attribute as Attribute[]; const characteristic1Attributes: Attribute[] = characteristics[1].Attribute as Attribute[]; //Characteristic 0, Attributes expect(characteristic0Attributes.length).toBe(2); expect(characteristic0Attributes[1].predicate).toBeInstanceOf(True); //Characteristic 0, Attribute 0 assertCompoundPredicate(characteristic0Attributes[0].predicate, { booleanOperator: "and" }, [ (predicate: Predicate) => { assertCompoundPredicate(predicate, { booleanOperator: "and" }, [ (cp: Predicate) => { expect(cp).toBeInstanceOf(True); }, (cp: Predicate) => { assertSimplePredicate(cp, { field: "input1" as FieldName, operator: "greaterThan", value: "-15", }); }, (cp: Predicate) => { assertSimplePredicate(cp, { field: "input1" as FieldName, operator: "lessOrEqual", value: "25.4", }); }, ]); }, (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input2" as FieldName, operator: "notEqual", value: "classA", }); }, ]); //Characteristic 1, Attributes expect(characteristic1Attributes.length).toBe(3); expect(characteristic1Attributes[2].predicate).toBeInstanceOf(True); //Characteristic 1, Attribute 0 assertCompoundPredicate(characteristic1Attributes[0].predicate, { booleanOperator: "or" }, [ (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input1" as FieldName, operator: "lessOrEqual", value: "-20", }); }, (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input2" as FieldName, operator: "equal", value: "classA", }); }, ]); //Characteristic 1, Attribute 1 assertCompoundPredicate(characteristic1Attributes[1].predicate, { booleanOperator: "or" }, [ (predicate: Predicate) => { assertCompoundPredicate(predicate, { booleanOperator: "and" }, [ (cp: Predicate) => { assertCompoundPredicate(cp, { booleanOperator: "and" }, [ (cp2: Predicate) => { assertSimplePredicate(cp2, { field: "input1" as FieldName, operator: "greaterOrEqual", value: "5", }); }, (cp2: Predicate) => { assertSimplePredicate(cp2, { field: "input1" as FieldName, operator: "lessThan", value: "12", }); }, ]); }, (cp: Predicate) => { assertSimplePredicate(cp, { field: "input2" as FieldName, operator: "equal", value: "classB", }); }, ]); }, (predicate: Predicate) => { assertSimplePredicate(predicate, { field: "input2" as FieldName, operator: "equal", value: "classC", }); }, ]); }); test("Scorecard::BasicComplexPartialScore", () => { const pmml: PMML = XML2PMML(SCORE_CARD_BASIC_COMPLEX_PARTIAL_SCORE); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; expect(scorecard.Characteristics.Characteristic.length).toBe(2); const characteristics: Characteristic[] = scorecard.Characteristics?.Characteristic as Characteristic[]; const characteristic0: Characteristic = characteristics[0]; const characteristic1: Characteristic = characteristics[1]; //Characteristics expect(characteristic0.name).toBe("characteristic1Score"); expect(characteristic0.baselineScore).toBe(20); expect(characteristic0.reasonCode).toBe("characteristic1ReasonCode"); expect(characteristic1.name).toBe("characteristic2Score"); expect(characteristic1.baselineScore).toBe(5); expect(characteristic1.reasonCode).toBe("characteristic2ReasonCode"); const characteristic0Attributes: Attribute[] = characteristics[0].Attribute as Attribute[]; const characteristic1Attributes: Attribute[] = characteristics[1].Attribute as Attribute[]; //Characteristic 0, Attributes expect(characteristic0Attributes.length).toBe(2); expect(characteristic0Attributes[0].predicate).toBeInstanceOf(SimplePredicate); expect(characteristic0Attributes[1].predicate).toBeInstanceOf(True); //Characteristic 0, Attributes' ComplexPartialScores expect(characteristic0Attributes[0].ComplexPartialScore).not.toBeUndefined(); expect(characteristic0Attributes[1].ComplexPartialScore).toBeUndefined(); const c0a0cps: string = XMLJS.json2xml(JSON.stringify(characteristic0Attributes[0].ComplexPartialScore)); expect(c0a0cps).toBe(`<Apply function="+"><FieldRef field="input1"/><FieldRef field="input2"/></Apply>`); //Characteristic 1, Attributes expect(characteristic1Attributes.length).toBe(2); expect(characteristic1Attributes[0].predicate).toBeInstanceOf(SimplePredicate); expect(characteristic1Attributes[1].predicate).toBeInstanceOf(True); //Characteristic 1, Attributes' ComplexPartialScores expect(characteristic1Attributes[0].ComplexPartialScore).not.toBeUndefined(); expect(characteristic1Attributes[1].ComplexPartialScore).toBeUndefined(); const c1a0cps: string = XMLJS.json2xml(JSON.stringify(characteristic1Attributes[0].ComplexPartialScore)); expect(c1a0cps).toBe(`<Apply function="*"><FieldRef field="input1"/><FieldRef field="input2"/></Apply>`); //Check round-trip const xml: string = PMML2XML(pmml); expect(xml).not.toBeNull(); const pmml2: PMML = XML2PMML(xml); expect(pmml).toEqual(pmml2); }); test("Scorecard::NestedComplexPartialScore", () => { const pmml: PMML = XML2PMML(SCORE_CARD_NESTED_COMPLEX_PARTIAL_SCORE); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; expect(scorecard.Characteristics.Characteristic.length).toBe(2); const characteristics: Characteristic[] = scorecard.Characteristics?.Characteristic as Characteristic[]; const characteristic0: Characteristic = characteristics[0]; const characteristic1: Characteristic = characteristics[1]; //Characteristics expect(characteristic0.name).toBe("characteristic1Score"); expect(characteristic0.baselineScore).toBe(20); expect(characteristic0.reasonCode).toBe("characteristic1ReasonCode"); expect(characteristic1.name).toBe("characteristic2Score"); expect(characteristic1.baselineScore).toBe(5); expect(characteristic1.reasonCode).toBe("characteristic2ReasonCode"); const characteristic0Attributes: Attribute[] = characteristics[0].Attribute as Attribute[]; const characteristic1Attributes: Attribute[] = characteristics[1].Attribute as Attribute[]; //Characteristic 0, Attributes expect(characteristic0Attributes.length).toBe(2); expect(characteristic0Attributes[0].predicate).toBeInstanceOf(SimplePredicate); expect(characteristic0Attributes[1].predicate).toBeInstanceOf(True); //Characteristic 0, Attributes' ComplexPartialScores expect(characteristic0Attributes[0].ComplexPartialScore).not.toBeUndefined(); expect(characteristic0Attributes[1].ComplexPartialScore).toBeUndefined(); const c0a0cps: string = XMLJS.json2xml(JSON.stringify(characteristic0Attributes[0].ComplexPartialScore)); expect(c0a0cps).toBe( `<Apply function="-"><Apply function="+"><FieldRef field="input1"/><FieldRef field="input2"/></Apply><Constant>5</Constant></Apply>` ); //Characteristic 1, Attributes expect(characteristic1Attributes.length).toBe(2); expect(characteristic1Attributes[0].predicate).toBeInstanceOf(SimplePredicate); expect(characteristic1Attributes[1].predicate).toBeInstanceOf(True); //Characteristic 1, Attributes' ComplexPartialScores expect(characteristic1Attributes[0].ComplexPartialScore).not.toBeUndefined(); expect(characteristic1Attributes[1].ComplexPartialScore).toBeUndefined(); const c1a0cps: string = XMLJS.json2xml(JSON.stringify(characteristic1Attributes[0].ComplexPartialScore)); expect(c1a0cps).toBe( `<Apply function="*"><Constant>2</Constant><Apply function="*"><FieldRef field="input1"/><Apply function="/"><FieldRef field="input2"/><Constant>2</Constant></Apply></Apply></Apply>` ); //Check round-trip const xml: string = PMML2XML(pmml); expect(xml).not.toBeNull(); const pmml2: PMML = XML2PMML(xml); expect(pmml).toEqual(pmml2); }); test("Scorecard::prototype::preservation", () => { const pmml: PMML = XML2PMML(SCORE_CARD_PROTOTYPES); const models: Model[] = pmml.models ?? []; const scorecard: Scorecard = models[0] as Scorecard; const miningSchema = scorecard.MiningSchema; const miningFields: MiningField[] = scorecard.MiningSchema.MiningField; const output: Output = scorecard.Output as Output; const outputFields: OutputField[] = output.OutputField; const characteristics: Characteristics = scorecard.Characteristics; const characteristicFields: Characteristic[] = characteristics.Characteristic; expect(Object.getPrototypeOf(scorecard)).toBe(Scorecard.prototype); expect(Object.getPrototypeOf(miningSchema)).toBe(Object.prototype); expect(Object.getPrototypeOf(miningFields)).toBe(Array.prototype); expect(Object.getPrototypeOf(miningFields[0])).toBe(Object.prototype); miningFields.push({ name: "mf" as FieldName }); expect(Object.getPrototypeOf(miningFields[1])).toBe(Object.prototype); expect(Object.getPrototypeOf(output)).toBe(Object.prototype); expect(Object.getPrototypeOf(outputFields)).toBe(Array.prototype); expect(Object.getPrototypeOf(outputFields[0])).toBe(Object.prototype); outputFields.push({ name: "mf" as FieldName, dataType: "string" }); expect(Object.getPrototypeOf(outputFields[1])).toBe(Object.prototype); expect(Object.getPrototypeOf(characteristics)).toBe(Object.prototype); expect(Object.getPrototypeOf(characteristicFields)).toBe(Array.prototype); expect(Object.getPrototypeOf(characteristicFields[0])).toBe(Object.prototype); characteristicFields.push({ name: "test", Attribute: [] }); expect(Object.getPrototypeOf(characteristicFields[1])).toBe(Object.prototype); }); test("Scorecard::Models::isScorable::true", () => { const scorecard: Scorecard = makeScorecardWithIsScorable(true); expect(scorecard.isScorable).toBeTruthy(); }); test("Scorecard::Models::isScorable::false", () => { const scorecard: Scorecard = makeScorecardWithIsScorable(false); expect(scorecard.isScorable).toBeFalsy(); }); function makeScorecardWithIsScorable(isScorable: boolean): Scorecard { const xml: string = `<PMML xmlns="http://www.dmg.org/PMML-4_4" version="4.4"> <Header/> <DataDictionary/> <Scorecard isScorable="${isScorable}"/> </PMML>`; const pmml: PMML = XML2PMML(xml); expect(pmml).not.toBeNull(); expect(pmml.models).not.toBeUndefined(); const models: Model[] = pmml.models ?? []; expect(models.length).toBe(1); const model: Model = models[0]; expect(model).toBeInstanceOf(Scorecard); return model as Scorecard; } test("Scorecard::Models::useReasonCodes::true", () => { const scorecard: Scorecard = makeScorecardWithUseReasonCodes(true); expect(scorecard.useReasonCodes).toBeTruthy(); }); test("Scorecard::Models::useReasonCodes::false", () => { const scorecard: Scorecard = makeScorecardWithUseReasonCodes(false); expect(scorecard.useReasonCodes).toBeFalsy(); }); function makeScorecardWithUseReasonCodes(useReasonCodes: boolean): Scorecard { const xml: string = `<PMML xmlns="http://www.dmg.org/PMML-4_4" version="4.4"> <Header/> <DataDictionary/> <Scorecard useReasonCodes="${useReasonCodes}"/> </PMML>`; const pmml: PMML = XML2PMML(xml); expect(pmml).not.toBeNull(); expect(pmml.models).not.toBeUndefined(); const models: Model[] = pmml.models ?? []; expect(models.length).toBe(1); const model: Model = models[0]; expect(model).toBeInstanceOf(Scorecard); return model as Scorecard; } function assertSimplePredicate(actualPredicate: Predicate | undefined, expectedPredicate: SimplePredicate): void { expect(actualPredicate).toBeInstanceOf(SimplePredicate); const actualSimplePredicate: SimplePredicate = actualPredicate as SimplePredicate; expect(actualSimplePredicate.field).toEqual(expectedPredicate.field); expect(actualSimplePredicate.operator).toEqual(expectedPredicate.operator); expect(actualSimplePredicate.value).toEqual(expectedPredicate.value); } function assertCompoundPredicate( actualPredicate: Predicate | undefined, expectedPredicate: CompoundPredicate, assertions: PredicateAssertion[] ): void { expect(actualPredicate).toBeInstanceOf(CompoundPredicate); const actualCompoundPredicate: CompoundPredicate = actualPredicate as CompoundPredicate; const actualCompoundPredicatePredicates: Predicate[] = actualCompoundPredicate.predicates as Predicate[]; expect(actualCompoundPredicate.booleanOperator).toEqual(expectedPredicate.booleanOperator); expect(actualCompoundPredicate.predicates?.length).toBe(assertions.length); for (let i: number = 0; i < assertions.length; i++) { assertions[i](actualCompoundPredicatePredicates[i]); } } });
the_stack
import { UserConfig } from 'emmet'; import { AbbreviationTrackingController, EditorProxy, AbbreviationTracker, AbbreviationTrackerType, JSX_PREFIX, StopTrackingParams, StartTrackingParams } from '@emmetio/action-utils'; import { getInternalState, getCaret, toRange, errorSnippet, replaceWithSnippet } from './lib/utils'; import { isSupported, isJSX, isCSS, isHTML, isXML, syntaxFromPos, docSyntax, syntaxInfo, enabledForSyntax, getSyntaxType } from './lib/syntax'; import { getOptions, extract, expand } from './lib/emmet'; import getOutputOptions from './lib/output'; import getEmmetConfig from './lib/config'; export interface CompletionItem { text: string; displayText: string; hint(): void; from: CodeMirror.Position; to: CodeMirror.Position; } /** Class name for Emmet abbreviation marker in editor */ const markClass = 'emmet-abbreviation'; /** Class name for Emmet abbreviation preview in editor */ const previewClass = 'emmet-abbreviation-preview'; class CMEditorProxy implements EditorProxy { public cm: CodeMirror.Editor; public marker: CodeMirror.TextMarker | null = null; public preview: CodeMirror.Editor | null = null; public forcedMarker: HTMLElement | null = null; get id() { return getInternalState(this.cm).id; } substr(from?: number, to?: number) { const value = this.cm.getValue(); if (from === undefined && to === undefined) { return value; } return value.slice(from || 0, to); } replace(value: string, from: number, to: number) { this.cm.replaceRange(value, this.cm.posFromIndex(from), this.cm.posFromIndex(to)); } syntax() { return docSyntax(this.cm); } size() { return this.cm.getValue().length; } config(pos: number): UserConfig { return getOptions(this.cm, pos); } outputOptions(pos: number, inline?: boolean) { return getOutputOptions(this.cm, pos, inline); } previewConfig(config: UserConfig) { return { ...config, options: { ...config.options, 'output.field': previewField, 'output.indent': ' ', 'output.baseIndent': '' } }; } allowTracking(pos: number) { return allowTracking(this.cm, pos); } mark(tracker: AbbreviationTracker): void { const { cm } = this; this.disposeMarker(); const [from, to] = toRange(cm, tracker.range); this.marker = cm.markText(from, to, { inclusiveLeft: true, inclusiveRight: true, clearWhenEmpty: false, className: markClass }); if (tracker.forced && !this.forcedMarker) { this.forcedMarker = document.createElement('div'); this.forcedMarker.className = `${markClass}-marker`; cm.addWidget(from, this.forcedMarker, false); } } unmark(): void { this.disposeMarker(); this.hidePreview(); } showPreview(tracker: AbbreviationTracker) { const { cm } = this; const config = getEmmetConfig(cm); // Check if we should display preview if (!enabledForSyntax(config.preview, syntaxInfo(cm, tracker.range[0]))) { return; } let content: string | undefined; let isError = false; if (tracker.type === AbbreviationTrackerType.Error) { content = errorSnippet(tracker.error); isError = true; } else if (tracker.forced || !tracker.simple) { content = tracker.preview; } if (content) { if (!this.preview) { const previewElem = document.createElement('div'); previewElem.className = previewClass; const pos = cm.posFromIndex(tracker.range[0]); if (config.attachPreview) { config.attachPreview(cm, previewElem, pos); } else { cm.addWidget(pos, previewElem, false); } // @ts-ignore this.preview = new this.cm.constructor(previewElem, { mode: cm.getOption('mode'), readOnly: 'nocursor', lineNumbers: false }) as CodeMirror.Editor; const errElement = document.createElement('div'); errElement.className = `${previewClass}-error`; previewElem.appendChild(errElement); } const wrapper = this.preview.getWrapperElement().parentElement!; wrapper.classList.toggle('has-error', isError); if (isError) { wrapper.querySelector(`.${previewClass}-error`)!.innerHTML = content; } else { this.preview.setValue(content); } } else { this.hidePreview(); } } hidePreview() { if (this.preview) { this.preview.getWrapperElement().parentElement!.remove(); this.preview = null; } } /** * Check if given syntax is a CSS dialect (including SCSS, LESS etc) */ isCSS(syntax: string): boolean { return isCSS(syntax); } syntaxType(syntax: string) { return getSyntaxType(syntax); } /** * Check if given syntax is a HTML dialect. HTML dialects also support embedded * stylesheets in `<style>` tga or `style=""` attribute */ isHTML(syntax: string): boolean { return isHTML(syntax); } /** * Check if given syntax is a XML dialect. Unlike HTML, XML dialects doesn’t * support embedded stylesheets */ isXML(syntax: string): boolean { return isXML(syntax); } /** * Check if given syntax is a JSX dialect */ isJSX(syntax: string) { return isJSX(syntax); } /** * Runs given callback in context of given editor */ run<R>(editor: CodeMirror.Editor, callback: () => R): R { const { cm } = this; this.cm = editor; const result = callback(); this.cm = cm; return result; } private disposeMarker() { if (this.marker) { this.marker.clear(); this.marker = null; } if (this.forcedMarker) { this.forcedMarker.remove(); this.forcedMarker = null; } } } function previewField(index: number, placeholder: string) { return placeholder; } const proxy = new CMEditorProxy(); const controller = new AbbreviationTrackingController<CMEditorProxy>(); export default function initAbbreviationTracker(editor: CodeMirror.Editor) { const onChange = (ed: CodeMirror.Editor) => { proxy.run(ed, () => { controller.handleChange(proxy, getCaret(ed)); }); }; const onSelectionChange = (ed: CodeMirror.Editor) => { proxy.run(ed, () => { const caret = getCaret(ed); if (!isEnabled(ed, caret)) { return; } const tracker = controller.handleSelectionChange(proxy, caret); if (tracker) { if (contains(tracker, caret)) { proxy.showPreview(tracker); } else { proxy.hidePreview(); } } }); }; editor.on('change', onChange); editor.on('focus', onSelectionChange); editor.on('cursorActivity', onSelectionChange); return () => { proxy.run(editor, () => controller.disposeEditor(proxy)); editor.off('change', onChange); editor.off('focus', onSelectionChange); editor.off('cursorActivity', onSelectionChange); }; } /** * Runs given function in context of abbreviation tracker */ export function runInTrackerContext<R>(editor: CodeMirror.Editor, callback: (controller: AbbreviationTrackingController<CMEditorProxy>, proxy: CMEditorProxy) => R): R { return proxy.run(editor, () => callback(controller, proxy)); } /** * Check if abbreviation tracking is allowed in editor at given location */ export function allowTracking(editor: CodeMirror.Editor, pos: number): boolean { if (isEnabled(editor, pos)) { const syntax = syntaxFromPos(editor, pos); return syntax ? isSupported(syntax) || isJSX(syntax) : false; } return false; } /** * Check if Emmet auto-complete is enabled */ export function isEnabled(editor: CodeMirror.Editor, pos: number): boolean { const config = getEmmetConfig(editor); return enabledForSyntax(config.mark, syntaxInfo(editor, pos)); } /** * If allowed, tries to extract abbreviation from given completion context * @param forceValid Enforces tracker to be valid, e.g. do not track abbreviation * if it’s not valid */ export function extractTracker(editor: CodeMirror.Editor, pos: number, forceValid?: boolean): AbbreviationTracker | undefined { return proxy.run(editor, () => { const syntax = proxy.syntax(); const prefix = proxy.isJSX(syntax) ? JSX_PREFIX : ''; const config = controller.getActivationContext(proxy, pos); const abbr = extract(proxy.substr(), pos, getSyntaxType(config?.syntax), { prefix }); if (abbr) { const tracker = controller.startTracking(proxy, abbr.start, abbr.end, { offset: prefix.length, config }); if (tracker) { if (tracker.type === AbbreviationTrackerType.Error && forceValid) { controller.stopTracking(proxy, { force: true }); return; } proxy.showPreview(tracker); } return tracker; } }); } /** * Returns abbreviation tracker for given editor, if any */ export function getTracker(editor: CodeMirror.Editor): AbbreviationTracker | undefined { return proxy.run(editor, () => controller.getTracker(proxy)); } /** * Start abbreviation tracking in given editor for given range */ export function startTracking(editor: CodeMirror.Editor, start: number, pos: number, params?: Partial<StartTrackingParams>) { return proxy.run(editor, () => { const tracker = controller.startTracking(proxy, start, pos, params); if (tracker) { proxy.showPreview(tracker); } return tracker; }); } /** * Stops abbreviation tracking in given editor */ export function stopTracking(editor: CodeMirror.Editor, params?: Partial<StopTrackingParams>) { return proxy.run(editor, () => controller.stopTracking(proxy, params)); } /** * Returns completion item, suitable for auto-hint CodeMirror module, * with tracked abbreviation for it */ export function getCompletion(editor: CodeMirror.Editor, pos: number): CompletionItem | undefined { const tracker = getTracker(editor) || extractTracker(editor, pos); if (tracker && contains(tracker, pos) && tracker.type === AbbreviationTrackerType.Abbreviation) { const { abbreviation, preview } = tracker; return { text: abbreviation, displayText: preview, hint: () => { stopTracking(editor); const snippet = expand(editor, abbreviation, tracker.config); replaceWithSnippet(editor, tracker.range, snippet); }, from: editor.posFromIndex(tracker.range[0]), to: editor.posFromIndex(tracker.range[1]), } as CompletionItem; } } /** * Restore tracker on undo, if possible */ export function restoreOnUndo(editor: CodeMirror.Editor, pos: number, abbr: string) { proxy.run(editor, () => { const lastTracker = controller.getStoredTracker(proxy); if (lastTracker) { const shouldRestore = lastTracker.type === AbbreviationTrackerType.Abbreviation && abbr === lastTracker.abbreviation && lastTracker.range[0] === pos; if (shouldRestore) { controller.restoreTracker(proxy, pos); } } }) } /** * Check if tracker range contains given position */ export function contains(tracker: AbbreviationTracker, pos: number): boolean { return pos >= tracker.range[0] && pos <= tracker.range[1]; }
the_stack
import { assert, createPromise } from '@secret-agent/commons/utils'; import type { ILocationStatus, ILocationTrigger, IPipelineStatus, } from '@secret-agent/interfaces/Location'; import { LocationStatus, LocationTrigger, PipelineStatus } from '@secret-agent/interfaces/Location'; import INavigation, { LoadStatus, NavigationReason } from '@secret-agent/interfaces/INavigation'; import type ICommandMeta from '@secret-agent/interfaces/ICommandMeta'; import type IWaitForOptions from '@secret-agent/interfaces/IWaitForOptions'; import type IResolvablePromise from '@secret-agent/interfaces/IResolvablePromise'; import { CanceledPromiseError } from '@secret-agent/commons/interfaces/IPendingWaitEvent'; import * as moment from 'moment'; import type { IBoundLog } from '@secret-agent/interfaces/ILog'; import type FrameNavigations from './FrameNavigations'; export default class FrameNavigationsObserver { private readonly navigations: FrameNavigations; // this is the default "starting" point for a wait-for location change if a previous command id is not specified private defaultWaitForLocationCommandId = 0; private waitingForLoadTimeout: NodeJS.Timeout; private resourceIdResolvable: IResolvablePromise<number>; private statusTriggerResolvable: IResolvablePromise<void>; private statusTrigger: ILocationStatus; private statusTriggerStartCommandId: number; private logger: IBoundLog; constructor(navigations: FrameNavigations) { this.navigations = navigations; this.logger = navigations.logger.createChild(module); navigations.on('status-change', this.onLoadStatusChange.bind(this)); } // this function will find the "starting command" to look for waitForLocation(change/reload) public willRunCommand(newCommand: ICommandMeta, previousCommands: ICommandMeta[]) { let last: ICommandMeta; for (const command of previousCommands) { // if this is a goto, set this to the "waitForLocation(change/reload)" command marker if (command.name === 'goto') this.defaultWaitForLocationCommandId = command.id; // find the last "waitFor" command that is not followed by another waitFor if (last?.name.startsWith('waitFor') && !command.name.startsWith('waitFor')) { this.defaultWaitForLocationCommandId = command.id; } last = command; } // handle cases like waitForLocation two times in a row if ( newCommand.name === 'waitForLocation' && last && last.name.startsWith('waitFor') && last.name !== 'waitForMillis' ) { this.defaultWaitForLocationCommandId = newCommand.id; } } public waitForLocation(status: ILocationTrigger, options: IWaitForOptions = {}): Promise<void> { assert(LocationTrigger[status], `Invalid location status: ${status}`); // determine if this location trigger has already been satisfied const sinceCommandId = Number.isInteger(options.sinceCommandId) ? options.sinceCommandId : this.defaultWaitForLocationCommandId; if (this.hasLocationTrigger(status, sinceCommandId)) { return Promise.resolve(); } // otherwise set pending return this.createStatusTriggeredPromise(status, options.timeoutMs, sinceCommandId); } public waitForLoad(status: IPipelineStatus, options: IWaitForOptions = {}): Promise<void> { assert(PipelineStatus[status], `Invalid load status: ${status}`); if (options.sinceCommandId) { throw new Error('Not implemented'); } const top = this.navigations.top; if (top) { if (top.stateChanges.has(status as LoadStatus)) { return; } if (status === LocationStatus.DomContentLoaded && top.stateChanges.has(LoadStatus.Load)) { return; } if (status === LocationStatus.PaintingStable && this.getPaintStableStatus().isStable) { return; } } const promise = this.createStatusTriggeredPromise(status, options.timeoutMs); if (top) this.onLoadStatusChange(); return promise; } public waitForReady(): Promise<void> { return this.waitForLoad(LocationStatus.HttpResponded); } public async waitForNavigationResourceId(): Promise<number> { const top = this.navigations.top; this.resourceIdResolvable = top?.resourceId; const resourceId = await this.resourceIdResolvable?.promise; if (top?.navigationError) { throw top.navigationError; } return resourceId; } public cancelWaiting(cancelMessage: string): void { clearTimeout(this.waitingForLoadTimeout); for (const promise of [this.resourceIdResolvable, this.statusTriggerResolvable]) { if (!promise || promise.isResolved) continue; const canceled = new CanceledPromiseError(cancelMessage); canceled.stack += `\n${'------LOCATION'.padEnd(50, '-')}\n${promise.stack}`; promise.reject(canceled); } } public getPaintStableStatus(): { isStable: boolean; timeUntilReadyMs?: number } { const top = this.navigations.top; if (!top) return { isStable: false }; // need to wait for both load + painting stable, or wait 3 seconds after either one const loadDate = top.stateChanges.get(LoadStatus.Load); const contentPaintedDate = top.stateChanges.get(LoadStatus.ContentPaint); if (contentPaintedDate) return { isStable: true }; if (!loadDate && !contentPaintedDate) return { isStable: false }; // NOTE: LargestContentfulPaint, which currently drives PaintingStable will NOT trigger if the page // doesn't have any "contentful" items that are eligible (image, headers, divs, paragraphs that fill the page) // have contentPaintedDate date, but no load const timeUntilReadyMs = moment().diff(contentPaintedDate ?? loadDate, 'milliseconds'); return { isStable: timeUntilReadyMs >= 3e3, timeUntilReadyMs: Math.min(3e3, 3e3 - timeUntilReadyMs), }; } private onLoadStatusChange(): void { if ( this.statusTrigger === LocationTrigger.change || this.statusTrigger === LocationTrigger.reload ) { if (this.hasLocationTrigger(this.statusTrigger, this.statusTriggerStartCommandId)) { this.resolvePendingStatus(this.statusTrigger); } return; } const loadTrigger = PipelineStatus[this.statusTrigger]; if (!this.statusTriggerResolvable || this.statusTriggerResolvable.isResolved || !loadTrigger) return; if (this.statusTrigger === LocationStatus.PaintingStable) { this.waitForPageLoaded(); return; } // otherwise just look for state changes > the trigger for (const state of this.navigations.top.stateChanges.keys()) { // don't resolve states for redirected if (state === LocationStatus.HttpRedirected) continue; let pipelineStatus = PipelineStatus[state as IPipelineStatus]; if (state === LoadStatus.Load) { pipelineStatus = PipelineStatus.AllContentLoaded; } if (pipelineStatus >= loadTrigger) { this.resolvePendingStatus(state); return; } } } private waitForPageLoaded(): void { clearTimeout(this.waitingForLoadTimeout); const { isStable, timeUntilReadyMs } = this.getPaintStableStatus(); if (isStable) this.resolvePendingStatus('PaintingStable + Load'); if (!isStable && timeUntilReadyMs) { const loadDate = this.navigations.top.stateChanges.get(LoadStatus.Load); const contentPaintDate = this.navigations.top.stateChanges.get(LoadStatus.ContentPaint); this.waitingForLoadTimeout = setTimeout( () => this.resolvePendingStatus( `TimeElapsed. Loaded="${loadDate}", ContentPaint="${contentPaintDate}"`, ), timeUntilReadyMs, ).unref(); } } private resolvePendingStatus(resolvedWithStatus: string): void { if (this.statusTriggerResolvable && !this.statusTriggerResolvable?.isResolved) { this.logger.info(`Resolving pending "${this.statusTrigger}" with trigger`, { resolvedWithStatus, waitingForStatus: this.statusTrigger, url: this.navigations.currentUrl, }); clearTimeout(this.waitingForLoadTimeout); this.statusTriggerResolvable.resolve(); this.statusTriggerResolvable = null; this.statusTrigger = null; this.statusTriggerStartCommandId = null; } } private hasLocationTrigger(trigger: ILocationTrigger, sinceCommandId: number) { let previousLoadedNavigation: INavigation; for (const history of this.navigations.history) { const isMatch = history.startCommandId >= sinceCommandId; if (isMatch) { let isLocationChange = false; if (trigger === LocationTrigger.reload) { isLocationChange = FrameNavigationsObserver.isNavigationReload(history.navigationReason); if ( !isLocationChange && !history.stateChanges.has(LoadStatus.HttpRedirected) && previousLoadedNavigation && previousLoadedNavigation.finalUrl === history.finalUrl ) { isLocationChange = previousLoadedNavigation.loaderId !== history.loaderId; } } // if there was a previously loaded url, use this change if ( trigger === LocationTrigger.change && previousLoadedNavigation && previousLoadedNavigation.finalUrl !== history.finalUrl ) { // Don't accept adding a slash as a page change const isInPageUrlAdjust = history.navigationReason === 'inPage' && history.finalUrl.replace(previousLoadedNavigation.finalUrl, '').length <= 1; if (!isInPageUrlAdjust) isLocationChange = true; } if (isLocationChange) { this.logger.info(`Resolving waitForLocation(${trigger}) with navigation history`, { historyEntry: history, status: trigger, sinceCommandId, }); return true; } } if ( (history.stateChanges.has(LoadStatus.HttpResponded) || history.stateChanges.has(LoadStatus.DomContentLoaded)) && !history.stateChanges.has(LoadStatus.HttpRedirected) ) { previousLoadedNavigation = history; } } return false; } private createStatusTriggeredPromise( status: ILocationStatus, timeoutMs: number, sinceCommandId?: number, ): Promise<void> { if (this.statusTriggerResolvable) this.cancelWaiting('New location trigger created'); this.statusTrigger = status; this.statusTriggerStartCommandId = sinceCommandId; this.statusTriggerResolvable = createPromise<void>(timeoutMs ?? 60e3); return this.statusTriggerResolvable.promise; } private static isNavigationReload(reason: NavigationReason): boolean { return reason === 'httpHeaderRefresh' || reason === 'metaTagRefresh' || reason === 'reload'; } }
the_stack
'use strict'; // tslint:disable: no-console import { ExtensionUtil } from '../extension/util/ExtensionUtil'; import * as vscode from 'vscode'; import * as fs from 'fs-extra'; import * as sinon from 'sinon'; import * as path from 'path'; import { SettingConfigurations } from '../extension/configurations'; import { SinonSandbox, SinonStub } from 'sinon'; import { FabricRuntimeUtil, FileConfigurations, FabricEnvironmentRegistryEntry, FabricWalletRegistry, FabricGatewayRegistry, FabricEnvironmentRegistry } from 'ibm-blockchain-platform-common'; import { GlobalState, ExtensionData } from '../extension/util/GlobalState'; import { UserInputUtil } from '../extension/commands/UserInputUtil'; import { LocalMicroEnvironment } from '../extension/fabric/environments/LocalMicroEnvironment'; import { EnvironmentFactory } from '../extension/fabric/environments/EnvironmentFactory'; import { LocalMicroEnvironmentManager } from '../extension/fabric/environments/LocalMicroEnvironmentManager'; import { RepositoryRegistry } from '../extension/registries/RepositoryRegistry'; import { TimerUtil } from '../extension/util/TimerUtil'; export class TestUtil { public static EXTENSION_TEST_DIR: string = path.join(__dirname, '..', '..', 'test', 'tmp'); static async setupTests(sandbox?: SinonSandbox): Promise<void> { if (!ExtensionUtil.isActive()) { await this.storeAll(); await vscode.workspace.getConfiguration().update(SettingConfigurations.EXTENSION_DIRECTORY, this.EXTENSION_TEST_DIR, vscode.ConfigurationTarget.Global); await vscode.workspace.getConfiguration().update(SettingConfigurations.EXTENSION_BYPASS_PREREQS, true, vscode.ConfigurationTarget.Global); await vscode.workspace.getConfiguration().update(SettingConfigurations.EXTENSION_LOCAL_FABRIC, true, vscode.ConfigurationTarget.Global); await vscode.workspace.getConfiguration().update(SettingConfigurations.HOME_SHOW_ON_NEXT_ACTIVATION, false, vscode.ConfigurationTarget.Global); await vscode.workspace.getConfiguration().update(SettingConfigurations.FEATURE_FLAGS, {}, vscode.ConfigurationTarget.Global); await vscode.workspace.getConfiguration().update(SettingConfigurations.EXTENSION_ENABLE_CUSTOM_LOCAL_ENVIRONMENT_START_IMAGE, false, vscode.ConfigurationTarget.Global); await vscode.workspace.getConfiguration().update(SettingConfigurations.EXTENSION_CUSTOM_LOCAL_ENVIRONMENT_START_IMAGE_VALUE, 'ibmcom/ibp-microfab:latest', vscode.ConfigurationTarget.Global); if (!sandbox) { sandbox = sinon.createSandbox(); } else { sandbox.restore(); sandbox.reset(); } let showConfirmationWarningMessage: SinonStub; let createStub: SinonStub; showConfirmationWarningMessage = sandbox.stub(UserInputUtil, 'showConfirmationWarningMessage'); showConfirmationWarningMessage.withArgs(`The local runtime configurations are out of date and must be torn down before updating. Do you want to teardown your local runtimes now?`).resolves(true); createStub = sandbox.stub(LocalMicroEnvironment.prototype, 'create').resolves(); sandbox.stub(vscode.window, 'showInformationMessage').withArgs(`You have successfully updated to version 2 of the IBM Blockchain Platform extension. Lots of changes have happened since version 1, so be sure to check what's new!`).resolves(undefined); await this.setupLocalFabric(); await ExtensionUtil.activateExtension(); createStub.restore(); showConfirmationWarningMessage.restore(); } else { await vscode.workspace.getConfiguration().update(SettingConfigurations.EXTENSION_DIRECTORY, this.EXTENSION_TEST_DIR, vscode.ConfigurationTarget.Global); const _path: string = path.join(this.EXTENSION_TEST_DIR, 'v2'); FabricWalletRegistry.instance().setRegistryPath(_path); FabricGatewayRegistry.instance().setRegistryPath(_path); FabricEnvironmentRegistry.instance().setRegistryPath(_path); RepositoryRegistry.instance().setRegistryPath(_path); } } static async setupLocalFabric(): Promise<void> { await fs.ensureDir(this.EXTENSION_TEST_DIR); // Ensure the environments directory exists first const environmentDir: string = path.join(this.EXTENSION_TEST_DIR, 'v2', FileConfigurations.FABRIC_ENVIRONMENTS, FabricRuntimeUtil.LOCAL_FABRIC); await fs.ensureDir(environmentDir); // Copy the generated '1 Org Local Fabric' directory into the environments directory. const localFabricDir: string = path.join(this.EXTENSION_TEST_DIR, '..', '..', '..', '..', 'test', 'data', FabricRuntimeUtil.LOCAL_FABRIC); await fs.copy(localFabricDir, environmentDir); const envReg: FabricEnvironmentRegistryEntry = await fs.readJSON(path.join(environmentDir, '.config.json')); envReg.environmentDirectory = environmentDir; await fs.writeJSON(path.join(environmentDir, '.config.json'), envReg); } static async startLocalFabric(): Promise<void> { await this.setupLocalFabric(); const settings: any = {}; settings[FabricRuntimeUtil.LOCAL_FABRIC] = 8080; await vscode.workspace.getConfiguration().update(SettingConfigurations.FABRIC_RUNTIME, settings, vscode.ConfigurationTarget.Global); FabricWalletRegistry.instance().setRegistryPath(path.join(this.EXTENSION_TEST_DIR, 'v2')); FabricGatewayRegistry.instance().setRegistryPath(path.join(this.EXTENSION_TEST_DIR, 'v2')); FabricEnvironmentRegistry.instance().setRegistryPath(path.join(this.EXTENSION_TEST_DIR, 'v2')); RepositoryRegistry.instance().setRegistryPath(path.join(this.EXTENSION_TEST_DIR, 'v2')); const environmentDir: string = path.join(this.EXTENSION_TEST_DIR, 'v2', FileConfigurations.FABRIC_ENVIRONMENTS, FabricRuntimeUtil.LOCAL_FABRIC); const envReg: FabricEnvironmentRegistryEntry = await fs.readJSON(path.join(environmentDir, '.config.json')); const microManager: LocalMicroEnvironmentManager = LocalMicroEnvironmentManager.instance(); microManager.removeRuntime(FabricRuntimeUtil.LOCAL_FABRIC); await microManager.addRuntime(FabricRuntimeUtil.LOCAL_FABRIC, 8080, envReg.numberOfOrgs); const environment: LocalMicroEnvironment = EnvironmentFactory.getEnvironment(envReg) as LocalMicroEnvironment; const isRunning: boolean = await environment.isRunning(); if (!isRunning) { await environment.start(); await TimerUtil.sleep(5000); } await environment.waitFor(); } static async storeAll(): Promise<void> { await this.storeExtensionDirectoryConfig(); await this.storeRuntimesConfig(); await this.storeShowHomeOnStart(); await this.storeBypassPreReqs(); await this.storeEnableLocalFabric(); try { await this.storeGlobalState(); } catch (error) { // ignore } } static async restoreAll(): Promise<void> { await this.restoreExtensionDirectoryConfig(); await this.restoreRuntimesConfig(); await this.restoreShowHomeOnStart(); await this.restoreBypassPreReqs(); await this.restoreEnableLocalFabric(); try { await this.restoreGlobalState(); } catch (error) { // ignore } } static async storeExtensionDirectoryConfig(): Promise<void> { this.USER_PACKAGE_DIR_CONFIG = await vscode.workspace.getConfiguration().get(SettingConfigurations.EXTENSION_DIRECTORY); console.log('Storing user extension directory:', this.USER_PACKAGE_DIR_CONFIG); } static async restoreExtensionDirectoryConfig(): Promise<void> { console.log('Restoring user extension directory to settings:', this.USER_PACKAGE_DIR_CONFIG); await vscode.workspace.getConfiguration().update(SettingConfigurations.EXTENSION_DIRECTORY, this.USER_PACKAGE_DIR_CONFIG, vscode.ConfigurationTarget.Global); } static async storeRuntimesConfig(): Promise<void> { this.USER_RUNTIMES_CONFIG = await vscode.workspace.getConfiguration().get(SettingConfigurations.FABRIC_RUNTIME); console.log('Storing user runtimes:', this.USER_RUNTIMES_CONFIG); } static async restoreRuntimesConfig(): Promise<void> { console.log('Restoring user runtimes config to settings:', this.USER_RUNTIMES_CONFIG); await vscode.workspace.getConfiguration().update(SettingConfigurations.FABRIC_RUNTIME, this.USER_RUNTIMES_CONFIG, vscode.ConfigurationTarget.Global); } static async storeShowHomeOnStart(): Promise<void> { this.HOME_STARTUP = await vscode.workspace.getConfiguration().get(SettingConfigurations.HOME_SHOW_ON_STARTUP); console.log('Storing home startup:', this.HOME_STARTUP); } static async restoreShowHomeOnStart(): Promise<void> { console.log('Restoring show home on startup config to settings:', this.HOME_STARTUP); await vscode.workspace.getConfiguration().update(SettingConfigurations.HOME_SHOW_ON_STARTUP, this.HOME_STARTUP, vscode.ConfigurationTarget.Global); } static async storeBypassPreReqs(): Promise<void> { this.BYPASS_PREREQS = await vscode.workspace.getConfiguration().get(SettingConfigurations.EXTENSION_BYPASS_PREREQS); console.log('Storing bypass prereqs:', this.BYPASS_PREREQS); } static async restoreBypassPreReqs(): Promise<void> { console.log('Restoring bypass prereqs to settings:', this.BYPASS_PREREQS); await vscode.workspace.getConfiguration().update(SettingConfigurations.EXTENSION_BYPASS_PREREQS, this.BYPASS_PREREQS, vscode.ConfigurationTarget.Global); } static async storeGlobalState(): Promise<void> { this.GLOBAL_STATE = GlobalState.get(); } static async restoreGlobalState(): Promise<void> { await GlobalState.update(this.GLOBAL_STATE); } static async storeEnableLocalFabric(): Promise<void> { this.ENABLE_LOCAL_FABRIC = await vscode.workspace.getConfiguration().get(SettingConfigurations.EXTENSION_LOCAL_FABRIC); console.log('Storing enable Local Fabric to settings:', this.ENABLE_LOCAL_FABRIC); } static async restoreEnableLocalFabric(): Promise<void> { console.log('Restoring enable Local Fabric to settings:', this.ENABLE_LOCAL_FABRIC); await vscode.workspace.getConfiguration().update(SettingConfigurations.EXTENSION_LOCAL_FABRIC, this.ENABLE_LOCAL_FABRIC, vscode.ConfigurationTarget.Global); } static async deleteTestFiles(deletePath: string): Promise<void> { try { await fs.remove(deletePath); } catch (error) { if (!error.message.includes('ENOENT: no such file or directory')) { throw error; } } } static async storeShowHomeOnNextStart(): Promise<void> { this.HOME_STARTUP_NEXT = await vscode.workspace.getConfiguration().get(SettingConfigurations.HOME_SHOW_ON_NEXT_ACTIVATION); console.log('Storing home startup on next activation:', this.HOME_STARTUP_NEXT); } static async restoreShowHomeOnNextStart(): Promise<void> { console.log('Restoring show home startup on next activation config to settings:', this.HOME_STARTUP_NEXT); await vscode.workspace.getConfiguration().update(SettingConfigurations.HOME_SHOW_ON_NEXT_ACTIVATION, this.HOME_STARTUP_NEXT, vscode.ConfigurationTarget.Global); } private static USER_PACKAGE_DIR_CONFIG: any; private static USER_RUNTIMES_CONFIG: any; private static HOME_STARTUP: any; private static BYPASS_PREREQS: any; private static GLOBAL_STATE: ExtensionData; private static ENABLE_LOCAL_FABRIC: boolean; private static HOME_STARTUP_NEXT: boolean; }
the_stack
import * as t from "io-ts"; import { MaybePromise, MaybeArray, Predicate, RegExpType, Dict } from "./utils/util-types"; import { identity, noop, isEmpty, constant, some, keys } from "lodash"; import { checkArray, checkNil, checkFn } from "./utils/guards"; import { assertType } from "./utils/assertions"; import { maybeArray } from "./utils/maybe"; import { matchString } from "./utils/predicate"; /** * Covers all the primitive mutation types used in the MutationNotification objects * that are constructed by GRelDAL */ export enum PrimitiveMutationType { Insert = "INSERT", Update = "UPDATE", Delete = "DELETE", } /** * A notification to be published that conveys information about a mutation that * has happened in the application. * * These mutations may not be GraphQL mutations, and application backend * can choose to use NotificationDispatcher to dispatch events which will * become available through GraphQL subscriptions. * * Must be serializable */ export interface MutationNotification<TEntity extends {} = any> { /** * Type of Mutation. For notifications generated by GRelDAL this will * always be of PrimitiveMutationType */ type: string; /** * Mapping of mapped source name to List of entities affected by this mutation. * * For GRelDAL generated mutation notifications, this collection will * have only a single key-value pair */ entities: Dict<TEntity[]>; /** * Application specific metadata */ metadata?: any; } /** * Represents a notification for a single (usually atomic) operation that changes the state of * a singe datasource */ export interface PrimitiveMutationNotification<TEntity extends {}> extends MutationNotification<TEntity> { type: PrimitiveMutationType; } /** * A function that intercepts a list of notifications and can transform it. * * Only those notifications will be published which have been removed from the interceptor. */ interface NotificationDispatchInterceptor { (notification: Array<MutationNotification<any>>): MaybePromise<Array<MutationNotification<any>>>; } const StringPredicateRT = maybeArray(t.union([t.string, RegExpType, t.Function])); const NotificationDispatchInterceptorConfigRT = t.intersection( [ t.partial({ type: StringPredicateRT, source: StringPredicateRT, }), t.type({ intercept: t.union([t.Function, t.boolean]), }), ], "NotificationDispatchInterceptorConfig", ); /** * Specifies what Notifications are to be intercepted. */ interface NotificationDispatchInterceptorConfig extends t.TypeOf<typeof NotificationDispatchInterceptorConfigRT> { /** * Specifies which notifications are to be intercepted based on type. * * This can be: * * - A string: In which case an exact match is performed against the type property of notification * - A regular expression to match the type property by a pattern * - A function that takes the type property value and returns true/false * - An array of the above, in which case any of the predicates matching will be considered a successful match. * * If both type and source are specified, then incoming notifications must match *both* of them * to be intercepted. */ type?: MaybeArray<string | RegExp | Predicate<string>>; /** * Specifies which notifications are to be intercepted based on source. * * This can be: * * - A string: In which case an exact match is performed against the source property of notification * - A regular expression to match the source property by a pattern * - A function that takes the source property value and returns true/false * - An array of the above, in which case any of the predicates matching will be considered a successful match. * * If both type and source are specified, then incoming notifications must match *both* of them * to be intercepted. */ source?: MaybeArray<string | RegExp | Predicate<string>>; /** * Whether the notifications not matching the aforementioned type/source should be retained * or discarded. * * True (retained) by default. * * In a chain of dispatch interceptors, if any of the interceptors choses not to retain * notifications that don't match, they will not be available to other interceptors * down the chain. */ retainRest?: boolean; /** * Interceptor function that receives list of notifications to be published and can choose * to augment, transform or remove them. * * Only the notifications actually returned from the interceptor will be published. * * This can be: * - A function, which will receives list of notifications to be published and can choose * to augment, transform or remove them. * - True, in which case all matched notifications are returned as is * - False, in which case all matched notifications are discarded */ intercept: NotificationDispatchInterceptor | boolean; } interface NormalizedNotificationDispatcherConfig { intercept: NotificationDispatchInterceptor; publish: (notification: MutationNotification<any>) => void; } const NotificationDispatcherConfigRT = t.intersection( [ t.partial({ intercept: maybeArray(t.union([t.Function, NotificationDispatchInterceptorConfigRT])), }), t.type({ publish: t.Function, }), ], "NotificationDispatcherConfig", ); /** * Global notification Dispatcher Configuration */ interface NotificationDispatcherConfig extends t.TypeOf<typeof NotificationDispatcherConfigRT> { /** * Interceptors which can receive and transform, add or remove notifications before * they are published. * * The interceptor can be: * * 1. A configuration of type {@link NotificationDispatchInterceptorConfig} which will specify * which notifications are to be intercepted based on type or source. * * 2. A function that receives an array of notifications and returns transformed list of notifications * Only the notifications that are returned by this interceptor will be published. * * 3. An array of either of the above, in which case the interceptors will be composed * as a chain of middlewares, and only the notifications that are returned by the last interceptor * in the chain will be published. */ intercept?: MaybeArray<NotificationDispatchInterceptor | NotificationDispatchInterceptorConfig>; /** * Function used to publish the notifications to an observable channel. * * This can be used for graphql-subscriptions integration: */ publish: (notification: MutationNotification<any>) => void; } const normalizeInterceptor = (i: NotificationDispatchInterceptorConfig | NotificationDispatchInterceptor) => { if (checkFn(i)) return i; const checkSource = checkNil(i.source) ? constant(true) : (sources: string[]) => some(sources, matchString(i.source!)); const checkType = checkNil(i.type) ? constant(true) : matchString(i.type); const intercept = checkFn(i.intercept) ? // When provided as a function, return the interceptor as is i.intercept : i.intercept ? // When provided as true, assume that received notifications are to be // returned as is. // // This is primarily helpful in conjunction with retainRest: false to discard // some notifications (identity as NotificationDispatchInterceptor) : // When specified as false, assume that received notifications are to be // discarded constant([]); return async (narr: MutationNotification[]): Promise<MutationNotification[]> => { const retained: MutationNotification[] = []; const consumed: MutationNotification[] = []; for (const n of narr) { if ((checkNil(n.entities) || checkSource(keys(n.entities))) && checkType(n.type)) { consumed.push(n); } else if (i.retainRest !== false) { retained.push(n); } } if (!isEmpty(consumed)) { const intercepted = await intercept(consumed); return intercepted.concat(retained); } return retained; }; }; const normalize = (c: NotificationDispatcherConfig): NormalizedNotificationDispatcherConfig => { let intercept: NotificationDispatchInterceptor; if (checkNil(c.intercept)) intercept = identity; else if (checkArray(c.intercept)) { const steps = c.intercept.map(i => normalizeInterceptor(i)); intercept = async (notifications: MutationNotification<any>[]) => { for (const step of steps) { if (isEmpty(notifications)) return notifications; notifications = await step(notifications); } return notifications; }; } else intercept = normalizeInterceptor(c.intercept); return { ...c, intercept }; }; export const defaultConfig: NormalizedNotificationDispatcherConfig = Object.freeze({ intercept: identity, publish: noop, }); /** * The current configuration of NotificationDispatcher. * * Default configuration does nothing: it has no interceptors and it publishes nowhere. */ export let config = defaultConfig; /** * Configure NotificationDispatcher. * * Note that the NotificationDispatcher is singleton and thus re-invocation of this function is * not recommended. * * If re-invoked the previous configuration will be overriden completely (and NOT merged). So * any previous interceptors will get lost. If merging is desired, it should be taken care of either * at the application level or by passing a function to configure which will receive previous/default * configuration. */ export function configure(cfg: NotificationDispatcherConfig) { assertType(NotificationDispatcherConfigRT, cfg, "NotificationDispatcher config"); config = normalize(cfg); } /** * Reset Notification dispatcher configuration to default. * * Primarily useful in tests. */ export function resetConfig() { config = defaultConfig; } /** * Publish notifications to an observable channel after passing them through a chain * of interceptors (Refer {@link NotificationDispatcherConfig}) if configured. */ export async function publish<TEntity>(notifications: MaybeArray<MutationNotification<TEntity>>) { const intercepted = await config.intercept(checkArray(notifications) ? notifications : [notifications]); intercepted.forEach(config.publish); }
the_stack
import { OnDestroy, OnInit, OnChanges, EventEmitter, ElementRef, Input, Output, SimpleChanges, Directive, NgZone } from '@angular/core'; import * as Chart from 'chart.js'; import { StoreService } from './store.service'; import { NgChartjsService } from './ng-chartjs.service'; import { getColors, Colors } from './colors'; export type Labels = Array<string | string[] | number | number[] | Date | Date[] | any | any[]>; export type Orientation = 'oldest' | 'latest'; export interface NgChartjsEvent { event: MouseEvent; active: Array<{}>; } /* tslint:disable-next-line */ @Directive({ selector: 'canvas[ngChartjs]', exportAs: 'ngChartjs' }) export class NgChartjsDirective implements OnDestroy, OnChanges, OnInit { // 图表的点集,它应该是数组<number []>仅用于线,条和雷达,否则数字[]; // @ts-ignore @Input() data: number[] | any[]; // 相当于chart.js内 data: {datasets: [{...}]} // @ts-ignore @Input() datasets: Chart.ChartDataSets[]; // x轴标签。这对图表来说是必要的:线,条和雷达。并且只是图表的标签(悬停):polarArea,pie和doughnut @Input() labels: Labels = []; // 相当于chart.js的option @Input() options: Chart.ChartOptions = {}; // 内联插件属性 // @ts-ignore @Input() inlinePlugins: any[]; // chartType line, bar, radar, pie, polarArea, doughnut // @ts-ignore @Input() chartType: Chart.ChartType; // 数据颜色,如果没有指定,将使用默认和|或随机颜色 // @ts-ignore @Input() colors: Colors[]; // 是否显示图例 // @ts-ignore @Input() legend: boolean; // @ts-ignore @Input() adding: { labels: Labels[], data: any[][] }; // @ts-ignore @Input() removing: { orientation: Orientation }; // orientation is 'oldest' or 'latest // @ts-ignore @Input() resetOption: Chart.ChartOptions; @Input() noZone = true; // disable angular NgZone // @ts-ignore @Input() id: string = null; // chart instance id // 鼠标点击图表所有的区域 @Output() chartClick: EventEmitter<NgChartjsEvent> = new EventEmitter(); // 鼠标悬浮在标签或者活跃的点上面时 @Output() chartHover: EventEmitter<NgChartjsEvent> = new EventEmitter(); // get Chartjs object // @ts-ignore chart: Chart; // @ts-ignore private ctx: CanvasRenderingContext2D; private initFlag = false; private hasChanges = false; private element: ElementRef; public constructor( element: ElementRef, private ngChartjsService: NgChartjsService, private storeService: StoreService, private zone: NgZone) { this.element = element; // 获取指令所在canvas元素 } ngOnInit(): void { this.ctx = this.element.nativeElement.getContext('2d'); // 获取元素的ctx this.initFlag = true; // 是否初始化了的标志 if (this.data || this.datasets) { // 判断data和datasets有一个有数据就刷新 if (this.noZone) { this.zone.runOutsideAngular(() => { this.refresh(); }); } else { this.refresh(); } } } ngOnChanges(changes: SimpleChanges): void { // TODO: 插件变化刷新,开放刷新按钮 if (this.initFlag) { // Check if the changes are in the data or datasets if (changes.hasOwnProperty('data') || changes.hasOwnProperty('datasets')) { if (changes.data) { this.updateChartData(changes.data.currentValue); } else { this.updateChartData(changes.datasets.currentValue); } this.hasChanges = true; } if (changes.hasOwnProperty('labels')) { this.chart.data.labels = changes.labels.currentValue; this.hasChanges = true; } if (changes.hasOwnProperty('legend')) { if (changes.legend.currentValue !== changes.legend.previousValue) { // @ts-ignore this.chart.options.legend.display = changes.legend.currentValue; this.hasChanges = true; } } if (changes.hasOwnProperty('adding')) { this.addData_(changes.adding.currentValue.labels, changes.adding.currentValue.data); this.hasChanges = true; } if (changes.hasOwnProperty('removing')) { if (changes.removing.currentValue.orientation === 'oldest' || changes.removing.currentValue.orientation === 'latest') { this.removeData_(changes.removing.currentValue.orientation); this.hasChanges = true; } } if (changes.hasOwnProperty('chartType')) { this.refresh(); } if (changes.hasOwnProperty('resetOption')) { Object.assign(this.chart.options, changes.resetOption.currentValue); this.hasChanges = true; } if (this.hasChanges) { this.chart.update(); this.hasChanges = false; } // change chart id if (changes.hasOwnProperty('id')) { this.removeChart(changes.id.previousValue); this.addChart(changes.id.currentValue); } } } ngOnDestroy(): void { if (this.chart) { this.chart.destroy(); // @ts-ignore this.chart = void 0; this.removeChart(this.id); } } // update chartjs update(): void { this.chart.update(); } // Dynamic add data addData(labels: Labels[], data: any[][]): void { this.addData_(labels, data); this.update(); } // Dynamic remove data, orientation is 'ildest' or 'latest' removeData(orientation: Orientation): void { this.removeData_(orientation); this.update(); } private refresh(): void { this.ngOnDestroy(); this.chart = this.getChartBuilder(this.ctx/*, data, this.options*/); this.addChart(this.id); } private removeChart(id: string): void { if (this.element.nativeElement.hasAttribute('id')) { this.storeService.removeChart(this.element.nativeElement.id); return; } if (id !== null && id !== undefined) { this.storeService.removeChart(id); // delete chart instance. } } private addChart(id: string): void { if (this.element.nativeElement.hasAttribute('id')) { this.storeService.addChart(this.element.nativeElement.id, this.chart); return; } if (id !== null && id !== undefined) { this.storeService.addChart(id, this.chart); } } private updateChartData(newDataValues: number[] | any[]): void { if (Array.isArray(newDataValues[0].data)) { // @ts-ignore this.chart.data.datasets.forEach((dataset: Chart.ChartDataSets, i: number) => { dataset.data = newDataValues[i].data; if (newDataValues[i].label) { dataset.label = newDataValues[i].label; } }); } else { // @ts-ignore this.chart.data.datasets[0].data = newDataValues; } // update colors // @ts-ignore this.chart.data.datasets = this.updateColors(this.chart.data.datasets); } private getChartBuilder(ctx: CanvasRenderingContext2D/*, data:Array<any>, options:any*/): Chart { const datasets = this.getDatasets(); const options: Chart.ChartOptions = Object.assign({}, this.options); // 深复制options if (this.legend === false) { // 设置options的legend TODO: 后续这个属性去除,直接在options内设置 options.legend = { display: false }; } // hock for onHover and onClick events options.hover = options.hover || {}; if (!options.hover.onHover) { options.hover.onHover = (event: MouseEvent, active: Array<{}>) => { if (active && !active.length) { return; } this.chartHover.emit({ event, active }); }; } if (!options.onClick) { options.onClick = (event: MouseEvent, active: Array<{}>) => { this.chartClick.emit({ event, active }); }; } const opts = { type: this.chartType, data: { labels: this.labels, datasets: datasets // TODO: 后续更改这个属性名字,否则警告 }, options: options, // TODO: 后续更改这个属性名字,否则警告 plugins: this.inlinePlugins }; return new Chart(ctx, opts); } // 获取 chart.js的datasets数据 private getDatasets(): Chart.ChartDataSets[] { // @ts-ignore let datasets: Chart.ChartDataSets[] = void 0; // in case if datasets is not provided, but data is present if (!this.datasets || !this.datasets.length && (this.data && this.data.length)) { if (Array.isArray(this.data[0])) { datasets = (this.data as number[][]).map((data: number[], index: number) => { return { data, label: this.labels[index] || `Label ${index}` }; }); } else { datasets = [{ data: this.data, label: `Label 0` }]; } } datasets = this.updateColors(datasets); // update colors if (!datasets) { throw new Error(`ng-chartjs configuration error, data or datasets field are required to render char ${this.chartType}`); } return datasets; } // update dataset colors private updateColors(datasets: Chart.ChartDataSets[]): Chart.ChartDataSets[] { if (this.datasets && this.datasets.length || (datasets && datasets.length)) { // fix elm type, pre type is number datasets = (this.datasets || datasets).map((elm: Chart.ChartDataSets, index: number) => { const newElm: Chart.ChartDataSets = Object.assign({}, elm); if (this.colors && this.colors.length) { Object.assign(newElm, this.colors[index]); } else { // @ts-ignore Object.assign(newElm, getColors(this.chartType, index, newElm.data.length)); } return newElm; }); } return datasets; } private addData_(labels: Labels[], data: any[][]): void { if (labels.length === 0 || data.length === 0) { return; } // update labels // @ts-ignore labels.forEach((label) => { this.chart.data.labels.push(label); }); // @ts-ignore this.chart.data.datasets.forEach((dataset, index) => { if (data[index]) { for (let i = 0; i < data[index].length; i++) { // @ts-ignore dataset.data.push(data[index][i]); } } else { console.log('The added data does not match the original data'); return; } }); } private removeData_(orientation: Orientation): void { // fix: support to oldest feature if (orientation === 'latest') { // @ts-ignore this.chart.data.labels.pop(); // @ts-ignore this.chart.data.datasets.forEach((dataset: Chart.ChartDataSets) => { // @ts-ignore dataset.data.pop(); }); } else if (orientation === 'oldest') { // @ts-ignore this.chart.data.labels.shift(); // @ts-ignore this.chart.data.datasets.forEach((dataset: Chart.ChartDataSets) => { // @ts-ignore dataset.data.shift(); }); } } }
the_stack
import _ from 'lodash'; import moment from 'moment'; import semver from 'semver'; const debug = require('debug')('restapi'); const debugCacheOptimization = require('debug')('oss-cache-optimization'); const debugShowStandardBehavior = false; const debugOutputUnregisteredEntityApis = true; import { IShouldServeCache, IntelligentEngine, ApiContext, IApiContextCacheValues, IApiContextRedisKeys, ApiContextType, IRestResponse, IRestMetadata } from './core'; import { getEntityDefinitions, GitHubResponseType, ResponseBodyType } from './endpointEntities'; import appPackage from '../../package.json'; import { IGetAuthorizationHeader, IAuthorizationHeaderValue } from '../../interfaces'; const appVersion = appPackage.version; const longtermMetadataMinutes = 60 * 24 * 14; // assumed to be a long time const longtermResponseMinutes = 60 * 24 * 7; // a week, sliding const acceleratedExpirationMinutes = 10; // quick cleanup const entityData = getEntityDefinitions(); const emptySet = new Set<string>(); interface IReducedGitHubMetadata { etag: string; av: string; link?: any; updated?: any; } interface IGitHubLink { link: string; } export class IntelligentGitHubEngine extends IntelligentEngine { public static findLibraryMethod(libraryInstance, apiName) { const instance = libraryInstance; const combined = apiName; const i = combined.indexOf('.'); let apiGroup = null; let apiMethodName = combined; if (i >= 0) { apiGroup = combined.substr(0, i); apiMethodName = combined.substr(i + 1); } const group = apiGroup ? instance[apiGroup] : instance; if (!group) { throw new Error(`The GitHub REST API library does not support the API group of type "${apiGroup}".`); } const method = group[apiMethodName]; if (!method) { throw new Error(`The GitHub REST API library does not support the API "${apiMethodName}" within the API group of type "${apiGroup}".`); } return method; } async callApi(apiContext: GitHubApiContext, optionalMessage?: string): Promise<IRestResponse> { const token = apiContext.token; // CONSIDER: rename apiContext.token *to* something like apiContext.authorization if (typeof (token) === 'string' && (!(token as string).startsWith('token ') && !(token as string).startsWith('bearer '))) { if (optionalMessage) { debug(optionalMessage); } const warning = `API context api=${apiContext.api} does not have a token that starts with 'token [REDACTED]' or 'bearer [REDACTED], investigate this breakpoint`; throw new Error(warning); } let authorizationHeaderValue = typeof (token) === 'string' ? token as string : null; if (!authorizationHeaderValue) { if (typeof (token) === 'function') { const response = await token(); if (typeof (response) === 'string') { // happens when it isn't a more modern GitHub app response authorizationHeaderValue = response; } else { const value = response['value']; if (!value) { throw new Error('No value'); } authorizationHeaderValue = value; apiContext.tokenSource = response; } } } if (optionalMessage) { let apiTypeSuffix = apiContext.tokenSource && apiContext.tokenSource.purpose ? ' [' + apiContext.tokenSource.purpose + ']' : ''; if (!apiTypeSuffix && apiContext.tokenSource && apiContext.tokenSource.source) { apiTypeSuffix = ` [token source=${apiContext.tokenSource.source}]`; } debug(`${optionalMessage}${apiTypeSuffix}`); } const headers = { Authorization: authorizationHeaderValue, }; if (apiContext.options.headers) { apiContext.headers = apiContext.options.headers; Object.assign(headers, apiContext.headers); } if (apiContext.etag) { headers['If-None-Match'] = apiContext.etag; } ++apiContext.cost.github.restApiCalls; const args = []; const apiMethod = apiContext.apiMethod; if (apiContext.fakeLink) { args.push(apiContext.fakeLink, headers); } else { const argOptions = Object.assign({}, apiContext.options); if (argOptions.octokitRequest) { args.push(argOptions.octokitRequest); delete argOptions.octokitRequest; } if (argOptions.additionalDifferentiationParameters) { delete argOptions.additionalDifferentiationParameters; } argOptions.headers = headers; args.push(argOptions); } const thisArgument = apiMethod.thisInstance || null; const response = await apiMethod.apply(thisArgument, args); return response; } processMetadataBeforeCall(apiContext: ApiContext, metadata: IRestMetadata) { if (metadata && metadata.av && apiContext.libraryContext.breakingChangeGitHubPackageVersion && !semver.gte(metadata.av, apiContext.libraryContext.breakingChangeGitHubPackageVersion)) { console.warn(`${apiContext.redisKey.metadata} was using ${metadata.av}, which is < to ${apiContext.libraryContext.breakingChangeGitHubPackageVersion}. This is a schema break, discarding cache.`); metadata = undefined; } else if (metadata && !metadata.av) { // Old version of metadata, no package version, which is required for all GitHub REST API metadata now metadata = undefined; } if (metadata && metadata.etag) { apiContext.etag = metadata.etag; apiContext.metadata = metadata; } return metadata; } withResponseUpdateMetadata(apiContext: ApiContext, response: IRestResponse) { return response; } optionalStripResponse(apiContext: ApiContext, response: IRestResponse): IRestResponse { const clonedResponse = Object.assign({}, response); if (response.headers) { let clonedHeaders = StripGitHubEntity(GitHubResponseType.Headers, response.headers, 'response.headers'); if (clonedHeaders) { clonedResponse.headers = clonedHeaders; if (debugShowStandardBehavior) { debugCacheOptimization('using stripped headers'); } } } if (response.data) { let apiCall = apiContext.api as string; if ((apiContext as GitHubApiContext).pageAwareTypeInformation) { const pageAwareTypeInformation = (apiContext as GitHubApiContext).pageAwareTypeInformation; if (pageAwareTypeInformation && pageAwareTypeInformation.methodName) { apiCall = pageAwareTypeInformation.methodName; } } const knownEntityType = entityData.apiToEntityType.get(apiCall); const knownResponseBodyType = entityData.apiToEntityResponseType.get(apiCall); if (!knownEntityType) { if (debugOutputUnregisteredEntityApis) { debugCacheOptimization(apiCall); debugCacheOptimization(JSON.stringify(response.data, undefined, 2)); } debugCacheOptimization(`Cache Optimization WARNING: the API call ${apiCall} has no known entity response type, so data will not be optimized for caching`); } else if (Array.isArray(response.data) && knownResponseBodyType !== ResponseBodyType.Array) { if (debugOutputUnregisteredEntityApis) { debugCacheOptimization(apiCall); debugCacheOptimization(JSON.stringify(response.data, undefined, 2)); } debugCacheOptimization(`Cache Optimization WARNING: the API call ${apiCall} is not registered to return an array, but it did.. NO optimization being performed.`); } else if (knownResponseBodyType === ResponseBodyType.Array && Array.isArray(response.data)) { let arrayClone = []; const remainingKeys = new Set(Object.getOwnPropertyNames(response.data)); remainingKeys.delete('length'); for (let i = 0; i < response.data.length; i++) { const entity = response.data[i]; const entityClone = StripGitHubEntity(knownEntityType, entity, 'response.data[' + i + ']'); arrayClone.push(entityClone ? entityClone : entity); remainingKeys.delete(i.toString()); } if (remainingKeys.size) { const names = Array.from(remainingKeys.keys()).join(', '); throw new Error(`This entity simplification function assumes that there are no additional keys appended to the response data array. The following keys remain: ${names}`); } if (arrayClone.length) { clonedResponse.data = arrayClone; if (debugShowStandardBehavior) { debugCacheOptimization(`using reduced response array body for ${arrayClone.length} entities`); } } } else if (knownResponseBodyType === ResponseBodyType.Array) { if (debugOutputUnregisteredEntityApis) { debugCacheOptimization(apiCall); debugCacheOptimization(JSON.stringify(response.data, undefined, 2)); } debugCacheOptimization(`Cache Optimization WARNING: the API call ${apiCall} is registered to return an array, but it did not.. NO optimization being performed.`); } else { const strippedBody = StripGitHubEntity(knownEntityType, response.data, 'response.data'); if (strippedBody) { clonedResponse.data = strippedBody; if (debugShowStandardBehavior) { debugCacheOptimization(`reduced response body for entity ${knownEntityType} used`); } } else { if (debugShowStandardBehavior) { debugCacheOptimization(`nothing could be reduced from the response.data for ${knownEntityType}`); } } } } return clonedResponse; } reduceMetadataToCacheFromResponse(apiContext: ApiContext, response: IRestResponse): any { const headers = response ? response.headers : null; if (headers?.etag) { let reduced: IReducedGitHubMetadata = { etag: headers.etag, av: appVersion, }; if (headers.link) { reduced.link = headers.link; } // Updated for 2021: parse last-modified to use as a more accurate 'changed' value const lastModifiedTime = headers?.['last-modified']; let updated = lastModifiedTime ? new Date(lastModifiedTime) : null; if (!updated) { const calledTime = apiContext.calledTime ? apiContext.calledTime : new Date(); updated = calledTime; } reduced.updated = updated.toISOString(); return reduced; } return headers; } withResponseShouldCacheBeServed(apiContext: ApiContext, response: IRestResponse): boolean | IShouldServeCache { if (response === undefined) { throw new Error('The response was undefined and unable to process.'); } if (!response.headers) { throw new Error('As of Octokit 15.8.0, responses must have headers on the response'); } const headers = response.headers; let retryAfter = headers['retry-after']; if (retryAfter) { debug(`Retry-After header was present: ${retryAfter}`); } const rateLimitRemaining = headers['x-ratelimit-remaining']; if (rateLimitRemaining) { apiContext.cost.github.remainingApiTokens = rateLimitRemaining; } const { status } = response; let cacheOk = false; const displayInfo = apiContext.redisKey ? apiContext.redisKey.root : ''; if (status === 304 || response.notModified) { let appPurposeSuffix = apiContext.tokenSource && apiContext.tokenSource.purpose ? ` [${apiContext.tokenSource.purpose}]` : ''; if (apiContext.tokenSource && !apiContext.tokenSource.purpose && apiContext.tokenSource.source) { appPurposeSuffix = ` [token source=${apiContext.tokenSource.source}]`; } debug(`304: ${displayInfo} ${appPurposeSuffix}`); ++apiContext.cost.github.cacheHits; cacheOk = true; } else if (status !== undefined && (status < 200 || status >= 300)) { // The underlying library I believe actually processes these conditions as errors anyway throw new Error(`Response code of ${status} is not currently supported in this system.`); } return cacheOk; } getResponseMetadata(apiContext: ApiContext, response: IRestResponse): IRestMetadata { const md: IRestMetadata = { headers: response.headers, status: response.status, }; return md; } withMetadataShouldCacheBeServed(apiContext: ApiContext, metadata: IRestMetadata): boolean | IShouldServeCache { // result can be falsy OR an object; { cache: true, refresh: true } // cache: whether to use the cache, if available // refresh: whether to refresh in the background for a newer value let shouldServeCache: IShouldServeCache | boolean = false; const maxAgeSeconds = apiContext.maxAgeSeconds; const updatedIso = metadata ? metadata.updated : null; const refreshingIso = metadata ? metadata.refreshing : null; if (metadata && !updatedIso) { debug(`${apiContext.redisKey.metadata} entity without updated date found`); } if (apiContext.generatedRefreshId) { debug(`${apiContext.redisKey.metadata} this is technically a refresh operation right now behind the scenes`); } if (maxAgeSeconds && updatedIso) { const updated = moment(updatedIso); const calledTime = apiContext.calledTime; if (updated.add(maxAgeSeconds, 'seconds').isAfter(calledTime)) { shouldServeCache = true; shouldServeCache = { cache: true, remaining: 'expires ' + moment(updatedIso).add(maxAgeSeconds, 'seconds').fromNow(), }; // debug('cache OK to serve as last updated was ' + updated); } else if (apiContext.backgroundRefresh) { let shouldRefresh = true; debug(apiContext.redisKey.metadata + ' need to go live as last updated ' + updated.format() + ' and our max seconds value is ' + maxAgeSeconds); if (refreshingIso) { let secondsToAllowForRefresh = 2 + (apiContext.delayBeforeRefreshMilliseconds / 1000); if (Array.isArray(metadata.pages)) { secondsToAllowForRefresh += (metadata.pages.length * 1.25); } secondsToAllowForRefresh = Math.round(secondsToAllowForRefresh); const refreshWindow = moment(refreshingIso).add(secondsToAllowForRefresh, 'seconds'); if (moment().utc().isAfter(refreshWindow)) { debug(`Another worker\'s refresh did not complete. Refreshing in this instance. ${apiContext.redisKey.metadata}`); } else { shouldRefresh = false; debug(`A refresh is already being processed by another worker. Allowing a window of ${secondsToAllowForRefresh}s before retry. ${apiContext.redisKey.metadata}`); } } shouldServeCache = { cache: true, refresh: shouldRefresh, }; } } else { if (!metadata) { debug(`NO_METADATA: ${apiContext.redisKey.metadata} [empty]`); } else { debug(`NO_CHANGE: ${apiContext.redisKey.metadata} ${metadata.etag ? '[etag: ' + metadata.etag + ']' : ''}`); } } return shouldServeCache; } } export class GitHubApiContext extends ApiContext { private _apiMethod: any; private _redisKeys: IApiContextRedisKeys; private _cacheValues: IApiContextCacheValues; private _token: string | IGetAuthorizationHeader | IAuthorizationHeaderValue; public fakeLink?: IGitHubLink; public headers?: any; public pageAwareTypeInformation?: any; // used for extended cache options constructor(api: any, options: any) { super(api, options); const root = IntelligentEngine.redisKeyForApi(this.apiTypePrefix, api, options); this._redisKeys = { root: root, metadata: root ? root + IntelligentEngine.redisKeyAspectSuffix('headers') : IntelligentEngine.redisKeyForApi(this.apiTypePrefix, api, options, 'headers'), }; this._cacheValues = { longtermMetadata: longtermMetadataMinutes, longtermResponse: longtermResponseMinutes, acceleratedExpiration: acceleratedExpirationMinutes, }; } get token(): string | IGetAuthorizationHeader | IAuthorizationHeaderValue { return this._token; } get apiMethod(): any { return this._apiMethod; } get apiTypePrefix(): string { return 'github#'; } get redisKey(): IApiContextRedisKeys { return this._redisKeys; } get cacheValues(): IApiContextCacheValues { return this._cacheValues; } get contextType(): ApiContextType { return ApiContextType.GitHubRestApi; } attachToApiImplementation(implementationLibrary: any) { if (this._apiMethod) { // NOTE: this restriction was not in place in the original implementation // and is probably not needed throw new Error('API has already been attached to'); } const method = IntelligentGitHubEngine.findLibraryMethod(implementationLibrary, this.api); method['thisInstance'] = implementationLibrary; // // HACK, is there a better way? this._apiMethod = method; } setLibraryContext(libraryContext: any) { this.libraryContext = libraryContext; } overrideToken(token: string | IGetAuthorizationHeader | IAuthorizationHeaderValue) { if (token && token['value']) { const asPair = token as IAuthorizationHeaderValue; this._token = asPair.value; this.tokenSource = asPair; } else if (typeof (token) === 'string') { this._token = token as string; } else { this._token = token; } } overrideApiMethod(method: any) { this._apiMethod = method; } } function prepareApiContextForGithub(apiContext: GitHubApiContext, github: any): GitHubApiContext { if (!apiContext.apiMethod) { apiContext.attachToApiImplementation(github); } return apiContext; } export function createFullContext(api: any, options: any, github: any, libraryContext: any): GitHubApiContext { const apiContext = prepareApiContextForGithub(createApiContextForGithub(api, options), github); apiContext.setLibraryContext(libraryContext); return apiContext; } function createApiContextForGithub(api: any, options: any): GitHubApiContext { const apiContext = new GitHubApiContext(api, options); return apiContext; } export function StripGitHubEntity(entityType: GitHubResponseType, incomingEntity: any, keyOrName: string): any | null { let entityClone = null; if (!incomingEntity || typeof (incomingEntity) !== 'object') { return; // no change } const keepers = entityData.entityPropertiesToKeep.get(entityType) || emptySet; const droppers = entityData.entityPropertiesToDrop.get(entityType) || emptySet; const objects = entityData.entityPropertiesSubsets.get(entityType); const entityKeys = Object.getOwnPropertyNames(incomingEntity); for (let j = 0; j < entityKeys.length; j++) { const fieldName = entityKeys[j]; const fieldObjectType = objects ? objects.get(fieldName) : null; if (keepers.has(fieldName)) { // Safe known field to keep } else if (droppers.has(fieldName)) { if (!entityClone) { entityClone = Object.assign({}, incomingEntity); if (debugShowStandardBehavior) { debugCacheOptimization(`stripping from response ${keyOrName} of type ${entityType}: (clone)`); } } delete entityClone[fieldName]; if (debugShowStandardBehavior) { debugCacheOptimization(`field strip: ${fieldName} from ${keyOrName} entity (${entityType})`); } } else if (fieldObjectType) { // this property itself is a sub-object that might want to get parsed if (!entityClone) { entityClone = Object.assign({}, incomingEntity); if (debugShowStandardBehavior) { debugCacheOptimization(`stripping from response ${keyOrName} of type ${entityType}: (clone)`); } } const newSubObject = StripGitHubEntity(fieldObjectType, entityClone[fieldName], `${keyOrName}.${fieldName}`); if (newSubObject) { entityClone[fieldName] = newSubObject; if (debugShowStandardBehavior) { debugCacheOptimization(`replacing ${keyOrName}.${fieldName} sub-entity with a subset object (${entityType})`); } } else { if (debugShowStandardBehavior) { debugCacheOptimization(`no subset required for sub-entity ${keyOrName}.${fieldName} (${entityType})`); } } } else { debugCacheOptimization(`*NOT* stripping ${keyOrName}.${fieldName} (type ${entityType}) (not a registered field)`); } } return entityClone; }
the_stack
import { AnyTypeComposer, camelCase, EnumTypeComposerValueConfigDefinition, GraphQLJSON, InputTypeComposerFieldConfigAsObjectDefinition, InputTypeComposerFieldConfigMap, isSomeInputTypeComposer, ObjectTypeComposer, ObjectTypeComposerFieldConfig, ObjectTypeComposerFieldConfigMap, ObjectTypeComposerFieldConfigMapDefinition, ScalarTypeComposer, SchemaComposer, } from 'graphql-compose'; import { GraphQLBoolean, GraphQLFloat, GraphQLInt, GraphQLScalarType, GraphQLString, isNonNullType, Kind, } from 'graphql'; import { GraphQLBigInt, GraphQLDateTime, GraphQLEmailAddress, GraphQLIPv4, GraphQLIPv6, GraphQLTime, GraphQLURL, GraphQLVoid, RegularExpression, } from 'graphql-scalars'; import { sanitizeNameForGraphQL } from '@graphql-mesh/utils'; import { Logger } from '@graphql-mesh/types'; import Ajv, { ValidateFunction } from 'ajv'; import addFormats from 'ajv-formats'; import { inspect } from '@graphql-tools/utils'; import { visitJSONSchema, JSONSchema } from 'json-machete'; interface TypeComposers { input?: AnyTypeComposer<any>; output: AnyTypeComposer<any> | SchemaComposer; } const JSONSchemaStringFormats = [ 'date', 'hostname', 'regex', 'json-pointer', 'relative-json-pointer', 'uri-reference', 'uri-template', ]; const JSONSchemaStringFormatScalarMapFactory = (ajv: Ajv) => new Map<string, GraphQLScalarType>( JSONSchemaStringFormats.map(format => { const schema = { type: 'string', format, }; const validate = ajv.compile(schema); const coerceString = (value: string) => { if (validate(value)) { return value; } throw new Error(`Expected ${format} but got: ${value}`); }; const scalar = new GraphQLScalarType({ name: camelCase(format), description: `Represents ${format} values`, serialize: coerceString, parseValue: coerceString, parseLiteral: ast => { if (ast.kind === Kind.STRING) { return coerceString(ast.value); } throw new Error(`Expected string in ${format} format but got: ${(ast as any).value}`); }, extensions: { codegenScalarType: 'string', }, }); return [format, scalar]; }) ); const ONE_OF_DEFINITION = /* GraphQL */ ` directive @oneOf on INPUT_OBJECT | FIELD_DEFINITION `; export function getComposerFromJSONSchema( schema: JSONSchema, logger: Logger, generateInterfaceFromSharedFields = false ): Promise<TypeComposers> { const schemaComposer = new SchemaComposer(); const ajv = new Ajv({ strict: false, }); addFormats(ajv); const formatScalarMap = JSONSchemaStringFormatScalarMapFactory(ajv); const futureTasks = new Set<VoidFunction>(); return visitJSONSchema(schema, function mutateFn(subSchema, { path }): any { const getTypeComposer = (): any => { if (typeof subSchema === 'boolean') { const typeComposer = schemaComposer.getAnyTC(GraphQLJSON); return subSchema ? { input: typeComposer, output: typeComposer, } : undefined; } const getValidTypeName = (isInput: boolean) => { const sanitizedName = sanitizeNameForGraphQL(isInput ? subSchema.title + '_Input' : subSchema.title); if (schemaComposer.has(sanitizedName)) { let i = 2; while (schemaComposer.has(sanitizedName + i)) { i++; } return sanitizedName + i; } return sanitizedName; }; const validate = (data: any) => ajv.validate( { $ref: '#/definitions/schema' + path, definitions: { schema, }, }, data ); const getGenericJSONScalar = (isInput: boolean, description?: string) => { const coerceValue = (value: any) => { if (!validate(value)) { throw new Error(`${inspect(value)} is not valid!`); } return value; }; return schemaComposer.createScalarTC({ name: getValidTypeName(isInput), description: subSchema.description || description, serialize: coerceValue, parseValue: coerceValue, parseLiteral: (...args) => { const value = GraphQLJSON.parseLiteral(...args); return coerceValue(value); }, }); }; const getUnionTypeComposers = (typeComposersList: any[]) => { // Filter null types typeComposersList = typeComposersList.filter( typeComposers => typeComposers.input.getTypeName() !== 'Void' || typeComposers.output.getTypeName() !== 'Void' ); if (typeComposersList.length === 1) { return typeComposersList[0]; } const unionInputFields: Record<string, any> = {}; const outputTypeComposers: ObjectTypeComposer<any>[] = []; let ableToUseGraphQLUnionType = true; typeComposersList.forEach(typeComposers => { const { input, output } = typeComposers; if (isSomeInputTypeComposer(output)) { ableToUseGraphQLUnionType = false; } else { outputTypeComposers.push(output); } unionInputFields[input.getTypeName()] = { type: input, }; }); const input = schemaComposer.createInputTC({ name: getValidTypeName(true), description: subSchema.description, fields: unionInputFields, }); if (!schemaComposer.hasDirective('oneOf')) { schemaComposer.addTypeDefs(ONE_OF_DEFINITION); } input.setDirectives([ { name: 'oneOf', args: {}, }, ]); let output: AnyTypeComposer<any>; if (ableToUseGraphQLUnionType) { const resolveType = (data: any) => { if (data.__typename) { return data.__typename; } else if (data.resourceType) { return data.resourceType; } const errors = new Map<string, string>(); for (const outputTypeComposer of outputTypeComposers) { const validateFn = outputTypeComposer.getExtension('validate') as ValidateFunction; if (validateFn) { const isValid = validateFn(data); const typeName = outputTypeComposer.getTypeName(); if (isValid) { return typeName; } errors.set(typeName, inspect(ajv.errors)); } } throw new AggregateError( errors.values(), `Received data doesn't met the union; \n Data: ${inspect(data)} \n Errors:\n${[...errors.entries()].map( ([typeName, error]) => ` - ${typeName}: \n ${error}\n` )}` ); }; let sharedFields: Record<string, ObjectTypeComposerFieldConfig<any, any, any>>; if (generateInterfaceFromSharedFields) { for (const typeComposer of outputTypeComposers) { const fieldMap = typeComposer.getFields(); if (!sharedFields) { sharedFields = { ...fieldMap }; } else { for (const potentialSharedFieldName in sharedFields) { if ( !( potentialSharedFieldName in fieldMap && fieldMap[potentialSharedFieldName].type.getTypeName() === sharedFields[potentialSharedFieldName].type.getTypeName() ) ) { sharedFields[potentialSharedFieldName] = undefined; delete sharedFields[potentialSharedFieldName]; } } } } } if (sharedFields && Object.keys(sharedFields).length > 0) { output = schemaComposer.createInterfaceTC({ name: getValidTypeName(false), description: subSchema.description, fields: sharedFields, resolveType, }); for (const typeComposer of outputTypeComposers) { typeComposer.addInterface(output); // GraphQL removes implementations schemaComposer.addSchemaMustHaveType(typeComposer); } } else { // If no shared fields found output = schemaComposer.createUnionTC({ name: getValidTypeName(false), description: subSchema.description, types: outputTypeComposers, resolveType, }); } } else { output = getGenericJSONScalar(false); } return { input, output, }; }; if (subSchema.pattern) { const scalarType = new RegularExpression(getValidTypeName(false), new RegExp(subSchema.pattern), { description: subSchema.description, }); const typeComposer = schemaComposer.getAnyTC(scalarType); return { input: typeComposer, output: typeComposer, }; } if (subSchema.const) { const tsTypeName = JSON.stringify(subSchema.const); const scalarTypeName = getValidTypeName(false); const scalarType = new RegularExpression(scalarTypeName, new RegExp(subSchema.const), { description: subSchema.description || `A field whose value is ${tsTypeName}`, errorMessage: (_r, v: string) => `Expected ${tsTypeName} but got ${JSON.stringify(v)}`, }); scalarType.extensions = { codegenScalarType: tsTypeName, }; const typeComposer = schemaComposer.createScalarTC(scalarType); return { input: typeComposer, output: typeComposer, }; } if (subSchema.enum) { const values: Record<string, EnumTypeComposerValueConfigDefinition> = {}; for (const value of subSchema.enum) { const enumKey = sanitizeNameForGraphQL(value.toString()); values[enumKey] = { value, }; } const typeComposer = schemaComposer.createEnumTC({ name: getValidTypeName(false), values, }); return { input: typeComposer, output: typeComposer, }; } if (subSchema.oneOf && !subSchema.properties) { return getUnionTypeComposers(subSchema.oneOf); } if (subSchema.allOf && !subSchema.properties) { const inputFieldMap: InputTypeComposerFieldConfigMap = {}; const fieldMap: ObjectTypeComposerFieldConfigMap<any, any> = {}; let ableToUseGraphQLInputObjectType = true; let ableToUseGraphQLObjectType = true; for (const maybeTypeComposers of subSchema.allOf as any) { const { input: inputTypeComposer, output: outputTypeComposer } = maybeTypeComposers; if (inputTypeComposer instanceof ScalarTypeComposer) { ableToUseGraphQLInputObjectType = false; } else { const inputTypeElemFieldMap = inputTypeComposer.getFields(); for (const fieldName in inputTypeElemFieldMap) { const field = inputTypeElemFieldMap[fieldName]; inputFieldMap[fieldName] = field; } } if (outputTypeComposer instanceof ScalarTypeComposer) { ableToUseGraphQLObjectType = false; } else { const typeElemFieldMap = outputTypeComposer.getFields(); for (const fieldName in typeElemFieldMap) { const field = typeElemFieldMap[fieldName]; fieldMap[fieldName] = field; } } } let inputTypeComposer, outputTypeComposer; if (ableToUseGraphQLObjectType) { outputTypeComposer = schemaComposer.createObjectTC({ name: getValidTypeName(false), description: subSchema.description, fields: fieldMap, }); } else { outputTypeComposer = getGenericJSONScalar(false); } if (ableToUseGraphQLInputObjectType) { inputTypeComposer = schemaComposer.createInputTC({ name: getValidTypeName(true), description: subSchema.description, fields: inputFieldMap, }); } else { inputTypeComposer = ableToUseGraphQLObjectType ? getGenericJSONScalar(true) : outputTypeComposer; } return { input: inputTypeComposer, output: outputTypeComposer, }; } if (subSchema.anyOf && !subSchema.properties) { // It should not have `required` because it is `anyOf` not `allOf` const inputFieldMap: InputTypeComposerFieldConfigMap = {}; const fieldMap: ObjectTypeComposerFieldConfigMap<any, any> = {}; let ableToUseGraphQLInputObjectType = true; let ableToUseGraphQLObjectType = true; for (const typeComposers of subSchema.anyOf as any) { const { input: inputTypeComposer, output: outputTypeComposer } = typeComposers; if (inputTypeComposer instanceof ScalarTypeComposer) { ableToUseGraphQLInputObjectType = false; } else { const inputTypeElemFieldMap = inputTypeComposer.getFields(); for (const fieldName in inputTypeElemFieldMap) { const field = inputTypeElemFieldMap[fieldName]; inputFieldMap[fieldName] = isNonNullType(field.type.getType()) ? { ...field, type: () => field.type.ofType, } : field; } } if (outputTypeComposer instanceof ScalarTypeComposer) { ableToUseGraphQLObjectType = false; } else { const typeElemFieldMap = outputTypeComposer.getFields(); for (const fieldName in typeElemFieldMap) { const field = typeElemFieldMap[fieldName]; fieldMap[fieldName] = isNonNullType(field.type.getType()) ? { ...field, type: () => field.type.ofType, } : field; } } } let inputTypeComposer, outputTypeComposer; if (ableToUseGraphQLObjectType) { outputTypeComposer = schemaComposer.createObjectTC({ name: getValidTypeName(false), description: subSchema.description, fields: fieldMap, }); } else { outputTypeComposer = getGenericJSONScalar(false); } if (ableToUseGraphQLInputObjectType) { inputTypeComposer = schemaComposer.createInputTC({ name: getValidTypeName(true), description: subSchema.description, fields: inputFieldMap, }); } else { inputTypeComposer = ableToUseGraphQLObjectType ? getGenericJSONScalar(true) : outputTypeComposer; } return { input: inputTypeComposer, output: outputTypeComposer, }; } if (Array.isArray(subSchema.type)) { const validTypes = subSchema.type.filter((typeName: string) => typeName !== 'null'); if (validTypes.length === 1) { subSchema.type = validTypes[0]; // continue with the single type } else { const typeComposer = getGenericJSONScalar(false); return { input: typeComposer, output: typeComposer, }; } } switch (subSchema.type) { case 'boolean': { const typeComposer = schemaComposer.getAnyTC(GraphQLBoolean); return { input: typeComposer, output: typeComposer, }; } case 'null': { const typeComposer = schemaComposer.getAnyTC(GraphQLVoid); return { input: typeComposer, output: typeComposer, }; } case 'integer': { if (subSchema.format === 'int64') { const typeComposer = schemaComposer.getAnyTC(GraphQLBigInt); return { input: typeComposer, output: typeComposer, }; } const typeComposer = schemaComposer.getAnyTC(GraphQLInt); return { input: typeComposer, output: typeComposer, }; } case 'number': { const typeComposer = schemaComposer.getAnyTC(GraphQLFloat); return { input: typeComposer, output: typeComposer, }; } case 'string': if (subSchema.minLength || subSchema.maxLength) { const typeComposerName = getValidTypeName(false); const coerceString = (v: any) => { if (v != null) { const vStr = v.toString(); if (typeof subSchema.minLength !== 'undefined' && vStr.length < subSchema.minLength) { throw new Error(`${typeComposerName} cannot be less than ${subSchema.minLength} but given ${vStr}`); } if (typeof subSchema.maxLength !== 'undefined' && vStr.length > subSchema.maxLength) { throw new Error(`${typeComposerName} cannot be more than ${subSchema.maxLength} but given ${vStr}`); } return vStr; } }; const typeComposer = schemaComposer.createScalarTC({ name: typeComposerName, description: subSchema.description, serialize: coerceString, parseValue: coerceString, parseLiteral: ast => { if ('value' in ast) { return coerceString(ast.value); } return null; }, }); return { input: typeComposer, output: typeComposer, }; } switch (subSchema.format) { case 'date-time': { const typeComposer = schemaComposer.getAnyTC(GraphQLDateTime); return { input: typeComposer, output: typeComposer, }; } case 'time': { const typeComposer = schemaComposer.getAnyTC(GraphQLTime); return { input: typeComposer, output: typeComposer, }; } case 'email': { const typeComposer = schemaComposer.getAnyTC(GraphQLEmailAddress); return { input: typeComposer, output: typeComposer, }; } case 'ipv4': { const typeComposer = schemaComposer.getAnyTC(GraphQLIPv4); return { input: typeComposer, output: typeComposer, }; } case 'ipv6': { const typeComposer = schemaComposer.getAnyTC(GraphQLIPv6); return { input: typeComposer, output: typeComposer, }; } case 'uri': { const typeComposer = schemaComposer.getAnyTC(GraphQLURL); return { input: typeComposer, output: typeComposer, }; } case 'date': case 'idn-email': case 'hostname': case 'regex': case 'json-pointer': case 'relative-json-pointer': case 'uri-reference': case 'iri': case 'iri-reference': case 'uri-template': { // Trust ajv const typeComposer = schemaComposer.getAnyTC(formatScalarMap.get(subSchema.format)); return { input: typeComposer, output: typeComposer, }; } default: { const typeComposer = schemaComposer.getAnyTC(GraphQLString); return { input: typeComposer, output: typeComposer, }; } } case 'array': if ( typeof subSchema.items === 'object' && !Array.isArray(subSchema.items) && Object.keys(subSchema.items).length > 0 ) { const typeComposers = subSchema.items; return { input: typeComposers.input.getTypePlural(), output: typeComposers.output.getTypePlural(), }; } if (subSchema.contains) { // Scalars cannot be in union type const typeComposer = getGenericJSONScalar(false).getTypePlural(); return { input: typeComposer, output: typeComposer, }; } if (typeof subSchema.items === 'object' && Array.isArray(subSchema.items)) { const existingItems = [...(subSchema.items as any)]; /* TODO if (subSchema.additionalItems) { existingItems.push(subSchema.additionalItems); } */ const { input: inputTypeComposer, output: outputTypeComposer } = getUnionTypeComposers(existingItems); return { input: inputTypeComposer.getTypePlural(), output: outputTypeComposer.getTypePlural(), }; } // If it doesn't have any clue { const typeComposer = getGenericJSONScalar(false).getTypePlural(); return { input: typeComposer, output: typeComposer, }; } case 'object': const fieldMap: ObjectTypeComposerFieldConfigMapDefinition<any, any> = {}; let inputFieldMap: Record<string, InputTypeComposerFieldConfigAsObjectDefinition & { type: any }> = {}; if (subSchema.properties) { subSchema.type = 'object'; for (const propertyName in subSchema.properties) { // TODO: needs to be fixed if (propertyName === 'additionalProperties') { continue; } const typeComposers = subSchema.properties[propertyName]; const fieldName = sanitizeNameForGraphQL(propertyName); fieldMap[fieldName] = { type: () => subSchema.required?.includes(propertyName) ? typeComposers.output.getTypeNonNull() : typeComposers.output, // Make sure you get the right property resolve: root => root[propertyName], }; inputFieldMap[fieldName] = { type: () => subSchema.required?.includes(propertyName) ? typeComposers.input?.getTypeNonNull() : typeComposers.input, // Let execution logic know what is the expected propertyName extensions: { propertyName, }, }; } } if (subSchema.additionalProperties) { fieldMap.additionalProperties = { type: GraphQLJSON, resolve: (root: any) => root, }; inputFieldMap = {}; } if (subSchema.title === '_schema') { futureTasks.forEach(futureTask => futureTask()); return { output: schemaComposer, }; } if (subSchema.title === 'Query') { const typeComposer = schemaComposer.Query; typeComposer.addFields(fieldMap); return { output: typeComposer, }; } if (subSchema.title === 'Mutation') { const typeComposer = schemaComposer.Mutation; typeComposer.addFields(fieldMap); return { output: typeComposer, }; } if (subSchema.title === 'Subscription') { const typeComposer = schemaComposer.Subscription; typeComposer.addFields(fieldMap); return { output: typeComposer, }; } if (subSchema.title === 'QueryInput') { const typeComposer = schemaComposer.Query; for (const fieldName in inputFieldMap) { futureTasks.add(() => typeComposer.addFieldArgs(fieldName, { input: { type: () => inputFieldMap[fieldName].type(), }, }) ); } return { output: typeComposer, }; } if (subSchema.title === 'MutationInput') { const typeComposer = schemaComposer.Mutation; for (const fieldName in inputFieldMap) { futureTasks.add(() => typeComposer.addFieldArgs(fieldName, { input: { type: () => inputFieldMap[fieldName].type(), }, }) ); } return { output: typeComposer, }; } if (subSchema.title === 'SubscriptionInput') { const typeComposer = schemaComposer.Subscription; for (const fieldName in inputFieldMap) { futureTasks.add(() => typeComposer.addFieldArgs(fieldName, { input: { type: () => inputFieldMap[fieldName].type(), }, }) ); } return { output: typeComposer, }; } const output = Object.keys(fieldMap).length === 0 ? getGenericJSONScalar(false) : schemaComposer.createObjectTC({ name: getValidTypeName(false), description: subSchema.description, fields: fieldMap, extensions: { validate, }, }); const input = Object.keys(inputFieldMap).length === 0 ? getGenericJSONScalar(true) : schemaComposer.createInputTC({ name: getValidTypeName(true), description: subSchema.description, fields: inputFieldMap, }); return { input, output, }; } logger.warn(`GraphQL Type cannot be created for this JSON Schema definition; subSchema: ${inspect(subSchema)} path: ${inspect(path)}`); const typeComposer = schemaComposer.getAnyTC(GraphQLJSON); return { input: typeComposer, output: typeComposer, }; }; const result = getTypeComposer(); return result; }); }
the_stack
import fs from 'fs' import path from 'path' import { defineConfigWithTheme } from 'vitepress' import type { Config as ThemeConfig } from '@vue/theme' import baseConfig from '@vue/theme/config' import { headerPlugin } from './headerMdPlugin' const nav = [ { text: '文档', activeMatch: `^/(guide|style-guide|cookbook|examples)/`, items: [ { text: '指南', link: '/guide/introduction' }, { text: '教程', link: '/tutorial/' }, { text: '范例', link: '/examples/' }, { text: '快速开始', link: '/guide/quick-start' }, // { text: '风格指南', link: '/style-guide/' }, { text: 'Vue 2 Docs', link: 'https://v2.vuejs.org' }, { text: '从 Vue 2 迁移', link: 'https://v3-migration.vuejs.org/' } ] }, { text: 'API', activeMatch: `^/api/`, link: '/api/' }, { text: 'Playground', link: 'https://sfc.vuejs.org' }, { text: '生态系统', activeMatch: `^/ecosystem/`, items: [ { text: '资源', items: [ { text: '合作伙伴', link: '/partners/' }, { text: '主题', link: '/ecosystem/themes' }, { text: '工作', link: 'https://vuejobs.com/?ref=vuejs' }, { text: 'T-Shirt 商店', link: 'https://vue.threadless.com/' } ] }, { text: '核心库', items: [ { text: 'Vue Router', link: 'https://router.vuejs.org/' }, { text: 'Pinia', link: 'https://pinia.vuejs.org/' } ] }, { text: '视频课程', items: [ { text: 'Vue Mastery', link: 'https://www.vuemastery.com/courses/' }, { text: 'Vue School', link: 'https://vueschool.io/?friend=vuejs&utm_source=Vuejs.org&utm_medium=Link&utm_content=Navbar%20Dropdown' } ] }, { text: '帮助', items: [ { text: 'Discord 聊天室', link: 'https://discord.com/invite/HBherRA' }, { text: 'GitHub 论坛', link: 'https://github.com/vuejs/core/discussions' }, { text: 'DEV Community', link: 'https://dev.to/t/vue' } ] }, { text: 'News', items: [ { text: '博客', link: 'https://blog.vuejs.org/' }, { text: 'Twitter', link: 'https://twitter.com/vuejs' }, { text: '新闻', link: 'https://news.vuejs.org/' }, { text: '活动', link: 'https://events.vuejs.org/' } ] } ] }, { text: '语言', items: [ { text: '简体中文', link: 'https://staging-cn.vuejs.org/' }, { text: 'English', link: 'https://staging.vuejs.org/' } ] }, { text: '关于', activeMatch: `^/about/`, items: [ { text: 'FAQ', link: '/about/faq' }, { text: '团队', link: '/about/team' }, { text: '版本发布', link: '/about/releases' }, { text: '社区指南', link: '/about/community-guide' }, { text: '行为准则', link: '/about/coc' }, { text: '纪录片', link: 'https://www.youtube.com/watch?v=OrxmtDw4pVI' } ] }, { text: '赞助者', link: '/sponsor/' }, { text: 'Partners', link: '/partners/', activeMatch: `^/partners/` } ] export const sidebar = { '/guide/': [ { text: '开始', items: [ { text: '简介', link: '/guide/introduction' }, { text: '快速开始', link: '/guide/quick-start' } ] }, { text: '基础', items: [ { text: '创建一个应用', link: '/guide/essentials/application' }, { text: '模板语法', link: '/guide/essentials/template-syntax' }, { text: '响应式基础', link: '/guide/essentials/reactivity-fundamentals' }, { text: '计算属性', link: '/guide/essentials/computed' }, { text: '类与样式绑定', link: '/guide/essentials/class-and-style' }, { text: '条件渲染', link: '/guide/essentials/conditional' }, { text: '列表渲染', link: '/guide/essentials/list' }, { text: '事件处理', link: '/guide/essentials/event-handling' }, { text: '表单输入绑定', link: '/guide/essentials/forms' }, { text: '生命周期', link: '/guide/essentials/lifecycle' }, { text: '侦听器', link: '/guide/essentials/watchers' }, { text: '模板 ref', link: '/guide/essentials/template-refs' }, { text: '组件基础', link: '/guide/essentials/component-basics' } ] }, { text: '深入组件', items: [ { text: '注册', link: '/guide/components/registration' }, { text: 'Prop', link: '/guide/components/props' }, { text: '事件', link: '/guide/components/events' }, { text: '透传 Attribute', link: '/guide/components/attrs' }, { text: '插槽', link: '/guide/components/slots' }, { text: '依赖注入', link: '/guide/components/provide-inject' }, { text: '异步组件', link: '/guide/components/async' } ] }, { text: '可重用性', items: [ { text: '组合式函数', link: '/guide/reusability/composables' }, { text: '自定义指令', link: '/guide/reusability/custom-directives' }, { text: '插件', link: '/guide/reusability/plugins' } ] }, { text: '内置组件', items: [ { text: 'Transition', link: '/guide/built-ins/transition' }, { text: 'TransitionGroup', link: '/guide/built-ins/transition-group' }, { text: 'KeepAlive', link: '/guide/built-ins/keep-alive' }, { text: 'Teleport', link: '/guide/built-ins/teleport' }, { text: 'Suspense', link: '/guide/built-ins/suspense' } ] }, { text: '升级规模', items: [ { text: '单文件组件', link: '/guide/scaling-up/sfc' }, { text: '工具链', link: '/guide/scaling-up/tooling' }, { text: '路由', link: '/guide/scaling-up/routing' }, { text: '状态管理', link: '/guide/scaling-up/state-management' }, { text: '测试', link: '/guide/scaling-up/testing' }, { text: '服务端渲染(SSR)', link: '/guide/scaling-up/ssr' } ] }, { text: '最佳实践', items: [ { text: '生产部署', link: '/guide/best-practices/production-deployment' }, { text: '性能', link: '/guide/best-practices/performance' }, { text: '无障碍访问', link: '/guide/best-practices/accessibility' }, { text: '安全', link: '/guide/best-practices/security' } ] }, { text: 'TypeScript', items: [ { text: '总览', link: '/guide/typescript/overview' }, { text: 'TS 与组合式 API', link: '/guide/typescript/composition-api' }, { text: 'TS 与选项式 API', link: '/guide/typescript/options-api' } ] }, { text: '进阶主题', items: [ { text: '使用 Vue 的多种方式', link: '/guide/extras/ways-of-using-vue' }, { text: '组合式 API FAQ', link: '/guide/extras/composition-api-faq' }, { text: '深入响应式系统', link: '/guide/extras/reactivity-in-depth' }, { text: '渲染机制', link: '/guide/extras/rendering-mechanism' }, { text: '渲染函数 & JSX', link: '/guide/extras/render-function' }, { text: 'Vue 与 Web Components', link: '/guide/extras/web-components' }, { text: '动画技巧', link: '/guide/extras/animation' }, { text: '响应性语法糖', link: '/guide/extras/reactivity-transform' } // { // text: '为 Vue 构建一个库', // link: '/guide/extras/building-a-library' // }, // { text: 'Custom Renderers', link: '/guide/extras/custom-renderer' }, // { // text: 'Vue for React 开发者', // link: '/guide/extras/vue-for-react-devs' // } ] } ], '/api/': [ { text: '全局 API', items: [ { text: '引用', link: '/api/application' }, { text: '概要', link: '/api/general' } ] }, { text: '组合式 API', items: [ { text: 'setup()', link: '/api/composition-api-setup' }, { text: '响应式: 核心', link: '/api/reactivity-core' }, { text: '响应式: 工具', link: '/api/reactivity-utilities' }, { text: '响应式: 进阶', link: '/api/reactivity-advanced' }, { text: '生命周期钩子', link: '/api/composition-api-lifecycle' }, { text: '依赖注入', link: '/api/composition-api-dependency-injection' } ] }, { text: '选项式 API', items: [ { text: '状态选项', link: '/api/options-state' }, { text: '渲染选项', link: '/api/options-rendering' }, { text: '生命周期选项', link: '/api/options-lifecycle' }, { text: '组合选项', link: '/api/options-composition' }, { text: '其他杂项', link: '/api/options-misc' }, { text: '组件实例', link: '/api/component-instance' } ] }, { text: '内置内容', items: [ { text: '指令', link: '/api/built-in-directives' }, { text: '组件', link: '/api/built-in-components' }, { text: '特殊元素', link: '/api/built-in-special-elements' }, { text: '特殊 Attribute', link: '/api/built-in-special-attributes' } ] }, { text: '单文件组件', items: [ { text: '语法定义', link: '/api/sfc-spec' }, { text: '<script setup>', link: '/api/sfc-script-setup' }, { text: 'CSS 功能', link: '/api/sfc-css-features' } ] }, { text: '进阶 API', items: [ { text: '渲染函数', link: '/api/render-function' }, { text: '服务端渲染', link: '/api/ssr' }, { text: 'TypeScript 工具类', link: '/api/utility-types' }, { text: '自定义渲染', link: '/api/custom-renderer' } ] } ], '/examples/': [ { text: '基础', items: [ { text: '你好,世界', link: '/examples/#hello-world' }, { text: '处理用户输入', link: '/examples/#handling-input' }, { text: 'Attribute 绑定', link: '/examples/#attribute-bindings' }, { text: '条件与循环', link: '/examples/#conditionals-and-loops' }, { text: '表单绑定', link: '/examples/#form-bindings' }, { text: '简单组件', link: '/examples/#simple-component' } ] }, { text: '实战', items: [ { text: 'Markdown 编辑器', link: '/examples/#markdown' }, { text: '获取数据', link: '/examples/#fetching-data' }, { text: '带有排序和过滤器的网格', link: '/examples/#grid' }, { text: '树状视图', link: '/examples/#tree' }, { text: 'SVG 图像', link: '/examples/#svg' }, { text: '带过渡动效的模态框', link: '/examples/#modal' }, { text: '带过渡动效的列表', link: '/examples/#list-transition' }, { text: 'TodoMVC', link: '/examples/#todomvc' } ] }, { // https://eugenkiss.github.io/7guis/ text: '7 GUIs', items: [ { text: '计数器', link: '/examples/#counter' }, { text: '温度转换器', link: '/examples/#temperature-converter' }, { text: '机票预订', link: '/examples/#flight-booker' }, { text: '计时器', link: '/examples/#timer' }, { text: 'CRUD', link: '/examples/#crud' }, { text: '画圆', link: '/examples/#circle-drawer' }, { text: '单元格', link: '/examples/#cells' } ] } ], '/style-guide/': [ { text: 'Style Guide', items: [ { text: 'Overview', link: '/style-guide/' }, { text: 'A - Essential', link: '/style-guide/rules-essential' }, { text: 'B - Strongly Recommended', link: '/style-guide/rules-strongly-recommended' }, { text: 'C - Recommended', link: '/style-guide/rules-recommended' }, { text: 'D - Use with Caution', link: '/style-guide/rules-use-with-caution' } ] } ] } export default defineConfigWithTheme<ThemeConfig>({ extends: baseConfig, lang: 'zh-CN', title: 'Vue.js', description: 'Vue.js - 渐进式的 JavaScript 框架', srcDir: 'src', srcExclude: ['tutorial/**/description.md'], scrollOffset: 'header', head: [ ['meta', { name: 'twitter:site', content: '@vuejs' }], ['meta', { name: 'twitter:card', content: 'summary' }], [ 'meta', { name: 'twitter:image', content: 'https://vuejs.org/images/logo.png' } ], [ 'link', { rel: 'preconnect', href: 'https://sponsors.vuejs.org' } ], [ 'script', {}, fs.readFileSync( path.resolve(__dirname, './inlined-scripts/restorePreference.js'), 'utf-8' ) ], [ 'script', { src: 'https://cdn.usefathom.com/script.js', 'data-site': 'XNOLWPLB', 'data-spa': 'auto', defer: '' } ] ], themeConfig: { nav, sidebar, algolia: { indexName: 'vuejs', appId: 'ML0LEBN7FQ', apiKey: 'f49cbd92a74532cc55cfbffa5e5a7d01', searchParameters: { facetFilters: ['version:v3'] } }, carbonAds: { code: 'CEBDT27Y', placement: 'vuejsorg' }, socialLinks: [ { icon: 'languages', link: '/translations/' }, { icon: 'github', link: 'https://github.com/vuejs/' }, { icon: 'twitter', link: 'https://twitter.com/vuejs' }, { icon: 'discord', link: 'https://discord.com/invite/HBherRA' } ], editLink: { repo: 'vuejs-translations/docs-zh-cn', text: '在 GitHub 上编辑此页' }, footer: { license: { text: 'MIT License', link: 'https://opensource.org/licenses/MIT' }, copyright: `Copyright © 2014-${new Date().getFullYear()} Evan You` } }, markdown: { config(md) { md.use(headerPlugin) } }, vite: { define: { __VUE_OPTIONS_API__: false }, optimizeDeps: { include: ['gsap', 'dynamics.js'], exclude: ['@vue/repl'] }, // @ts-ignore ssr: { external: ['@vue/repl'] }, server: { host: true, fs: { // for when developing with locally linked theme allow: ['../..'] } }, build: { minify: 'terser', chunkSizeWarningLimit: Infinity }, json: { stringify: true } }, vue: { reactivityTransform: true } })
the_stack
import { HttpClient, HttpResponse, HttpEvent } from '@angular/common/http'; import { Inject, Injectable, Optional } from '@angular/core'; import { ReposAPIClientInterface } from './repos-api-client.interface'; import { Observable } from 'rxjs'; import { tap } from 'rxjs/operators'; import { USE_DOMAIN, USE_HTTP_OPTIONS, ReposAPIClient } from './repos-api-client.service'; import { DefaultHttpOptions, HttpOptions } from '../../types'; import * as models from '../../models'; import * as guards from '../../guards'; @Injectable() export class GuardedReposAPIClient extends ReposAPIClient implements ReposAPIClientInterface { constructor( readonly httpClient: HttpClient, @Optional() @Inject(USE_DOMAIN) domain?: string, @Optional() @Inject(USE_HTTP_OPTIONS) options?: DefaultHttpOptions, ) { super(httpClient, domain, options); } /** * Delete a Repository. * Deleting a repository requires admin access. If OAuth is used, the delete_repo * scope is required. * * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepo( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepo( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepo( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepo( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepo(args, requestHttpOptions, observe); } /** * Get repository. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepo( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Repo>; getReposOwnerRepo( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Repo>>; getReposOwnerRepo( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Repo>>; getReposOwnerRepo( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Repo | HttpResponse<models.Repo> | HttpEvent<models.Repo>> { return super.getReposOwnerRepo(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRepo(res) || console.error(`TypeGuard for response 'models.Repo' caught inconsistency.`, res))); } /** * Edit repository. * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepo( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Repo>; patchReposOwnerRepo( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Repo>>; patchReposOwnerRepo( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Repo>>; patchReposOwnerRepo( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Repo | HttpResponse<models.Repo> | HttpEvent<models.Repo>> { return super.patchReposOwnerRepo(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRepo(res) || console.error(`TypeGuard for response 'models.Repo' caught inconsistency.`, res))); } /** * List assignees. * This call lists all the available assignees (owner + collaborators) to which * issues may be assigned. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoAssignees( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoAssigneesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Assignees>; getReposOwnerRepoAssignees( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoAssigneesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Assignees>>; getReposOwnerRepoAssignees( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoAssigneesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Assignees>>; getReposOwnerRepoAssignees( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoAssigneesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Assignees | HttpResponse<models.Assignees> | HttpEvent<models.Assignees>> { return super.getReposOwnerRepoAssignees(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isAssignees(res) || console.error(`TypeGuard for response 'models.Assignees' caught inconsistency.`, res))); } /** * Check assignee. * You may also check to see if a particular user is an assignee for a repository. * * Response generated for [ 204 ] HTTP response code. */ getReposOwnerRepoAssigneesAssignee( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoAssigneesAssigneeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; getReposOwnerRepoAssigneesAssignee( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoAssigneesAssigneeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; getReposOwnerRepoAssigneesAssignee( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoAssigneesAssigneeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; getReposOwnerRepoAssigneesAssignee( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoAssigneesAssigneeParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.getReposOwnerRepoAssigneesAssignee(args, requestHttpOptions, observe); } /** * Get list of branches * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoBranches( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoBranchesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Branches>; getReposOwnerRepoBranches( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoBranchesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Branches>>; getReposOwnerRepoBranches( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoBranchesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Branches>>; getReposOwnerRepoBranches( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoBranchesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Branches | HttpResponse<models.Branches> | HttpEvent<models.Branches>> { return super.getReposOwnerRepoBranches(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isBranches(res) || console.error(`TypeGuard for response 'models.Branches' caught inconsistency.`, res))); } /** * Get Branch * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoBranchesBranch( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoBranchesBranchParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Branch>; getReposOwnerRepoBranchesBranch( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoBranchesBranchParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Branch>>; getReposOwnerRepoBranchesBranch( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoBranchesBranchParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Branch>>; getReposOwnerRepoBranchesBranch( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoBranchesBranchParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Branch | HttpResponse<models.Branch> | HttpEvent<models.Branch>> { return super.getReposOwnerRepoBranchesBranch(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isBranch(res) || console.error(`TypeGuard for response 'models.Branch' caught inconsistency.`, res))); } /** * List. * When authenticating as an organization owner of an organization-owned * repository, all organization owners are included in the list of * collaborators. Otherwise, only users with access to the repository are * returned in the collaborators list. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoCollaborators( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCollaboratorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Users>; getReposOwnerRepoCollaborators( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCollaboratorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Users>>; getReposOwnerRepoCollaborators( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCollaboratorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Users>>; getReposOwnerRepoCollaborators( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCollaboratorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Users | HttpResponse<models.Users> | HttpEvent<models.Users>> { return super.getReposOwnerRepoCollaborators(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isUsers(res) || console.error(`TypeGuard for response 'models.Users' caught inconsistency.`, res))); } /** * Remove collaborator. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoCollaboratorsUser(args, requestHttpOptions, observe); } /** * Check if user is a collaborator * Response generated for [ 204 ] HTTP response code. */ getReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; getReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; getReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; getReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.getReposOwnerRepoCollaboratorsUser(args, requestHttpOptions, observe); } /** * Add collaborator. * Response generated for [ 204 ] HTTP response code. */ putReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; putReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; putReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; putReposOwnerRepoCollaboratorsUser( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoCollaboratorsUserParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.putReposOwnerRepoCollaboratorsUser(args, requestHttpOptions, observe); } /** * List commit comments for a repository. * Comments are ordered by ascending ID. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.RepoComments>; getReposOwnerRepoComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.RepoComments>>; getReposOwnerRepoComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.RepoComments>>; getReposOwnerRepoComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.RepoComments | HttpResponse<models.RepoComments> | HttpEvent<models.RepoComments>> { return super.getReposOwnerRepoComments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRepoComments(res) || console.error(`TypeGuard for response 'models.RepoComments' caught inconsistency.`, res))); } /** * Delete a commit comment * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoCommentsCommentId(args, requestHttpOptions, observe); } /** * Get a single commit comment. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.CommitComments>; getReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.CommitComments>>; getReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.CommitComments>>; getReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.CommitComments | HttpResponse<models.CommitComments> | HttpEvent<models.CommitComments>> { return super.getReposOwnerRepoCommentsCommentId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCommitComments(res) || console.error(`TypeGuard for response 'models.CommitComments' caught inconsistency.`, res))); } /** * Update a commit comment. * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.CommitComments>; patchReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.CommitComments>>; patchReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.CommitComments>>; patchReposOwnerRepoCommentsCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoCommentsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.CommitComments | HttpResponse<models.CommitComments> | HttpEvent<models.CommitComments>> { return super.patchReposOwnerRepoCommentsCommentId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCommitComments(res) || console.error(`TypeGuard for response 'models.CommitComments' caught inconsistency.`, res))); } /** * List commits on a repository. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoCommits( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Commits>; getReposOwnerRepoCommits( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Commits>>; getReposOwnerRepoCommits( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Commits>>; getReposOwnerRepoCommits( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Commits | HttpResponse<models.Commits> | HttpEvent<models.Commits>> { return super.getReposOwnerRepoCommits(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCommits(res) || console.error(`TypeGuard for response 'models.Commits' caught inconsistency.`, res))); } /** * Get the combined Status for a specific Ref * The Combined status endpoint is currently available for developers to preview. During the preview period, the API may change without advance notice. Please see the blog post for full details. * To access this endpoint during the preview period, you must provide a custom media type in the Accept header: * application/vnd.github.she-hulk-preview+json * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoCommitsRefStatus( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsRefStatusParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.RefStatus>; getReposOwnerRepoCommitsRefStatus( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsRefStatusParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.RefStatus>>; getReposOwnerRepoCommitsRefStatus( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsRefStatusParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.RefStatus>>; getReposOwnerRepoCommitsRefStatus( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsRefStatusParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.RefStatus | HttpResponse<models.RefStatus> | HttpEvent<models.RefStatus>> { return super.getReposOwnerRepoCommitsRefStatus(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRefStatus(res) || console.error(`TypeGuard for response 'models.RefStatus' caught inconsistency.`, res))); } /** * Get a single commit. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoCommitsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Commit>; getReposOwnerRepoCommitsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Commit>>; getReposOwnerRepoCommitsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Commit>>; getReposOwnerRepoCommitsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Commit | HttpResponse<models.Commit> | HttpEvent<models.Commit>> { return super.getReposOwnerRepoCommitsShaCode(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCommit(res) || console.error(`TypeGuard for response 'models.Commit' caught inconsistency.`, res))); } /** * List comments for a single commitList comments for a single commit. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoCommitsShaCodeComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsShaCodeCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.RepoComments>; getReposOwnerRepoCommitsShaCodeComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsShaCodeCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.RepoComments>>; getReposOwnerRepoCommitsShaCodeComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsShaCodeCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.RepoComments>>; getReposOwnerRepoCommitsShaCodeComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCommitsShaCodeCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.RepoComments | HttpResponse<models.RepoComments> | HttpEvent<models.RepoComments>> { return super.getReposOwnerRepoCommitsShaCodeComments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRepoComments(res) || console.error(`TypeGuard for response 'models.RepoComments' caught inconsistency.`, res))); } /** * Create a commit comment. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoCommitsShaCodeComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoCommitsShaCodeCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.CommitComments>; postReposOwnerRepoCommitsShaCodeComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoCommitsShaCodeCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.CommitComments>>; postReposOwnerRepoCommitsShaCodeComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoCommitsShaCodeCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.CommitComments>>; postReposOwnerRepoCommitsShaCodeComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoCommitsShaCodeCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.CommitComments | HttpResponse<models.CommitComments> | HttpEvent<models.CommitComments>> { return super.postReposOwnerRepoCommitsShaCodeComments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCommitComments(res) || console.error(`TypeGuard for response 'models.CommitComments' caught inconsistency.`, res))); } /** * Compare two commits * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoCompareBaseIdHeadId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCompareBaseIdHeadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.CompareCommits>; getReposOwnerRepoCompareBaseIdHeadId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCompareBaseIdHeadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.CompareCommits>>; getReposOwnerRepoCompareBaseIdHeadId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCompareBaseIdHeadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.CompareCommits>>; getReposOwnerRepoCompareBaseIdHeadId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoCompareBaseIdHeadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.CompareCommits | HttpResponse<models.CompareCommits> | HttpEvent<models.CompareCommits>> { return super.getReposOwnerRepoCompareBaseIdHeadId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCompareCommits(res) || console.error(`TypeGuard for response 'models.CompareCommits' caught inconsistency.`, res))); } /** * Delete a file. * This method deletes a file in a repository. * * Response generated for [ 200 ] HTTP response code. */ deleteReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.DeleteFile>; deleteReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.DeleteFile>>; deleteReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.DeleteFile>>; deleteReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.DeleteFile | HttpResponse<models.DeleteFile> | HttpEvent<models.DeleteFile>> { return super.deleteReposOwnerRepoContentsPath(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isDeleteFile(res) || console.error(`TypeGuard for response 'models.DeleteFile' caught inconsistency.`, res))); } /** * Get contents. * This method returns the contents of a file or directory in a repository. * Files and symlinks support a custom media type for getting the raw content. * Directories and submodules do not support custom media types. * Note: This API supports files up to 1 megabyte in size. * Here can be many outcomes. For details see "http://developer.github.com/v3/repos/contents/" * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.ContentsPath>; getReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.ContentsPath>>; getReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.ContentsPath>>; getReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.ContentsPath | HttpResponse<models.ContentsPath> | HttpEvent<models.ContentsPath>> { return super.getReposOwnerRepoContentsPath(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isContentsPath(res) || console.error(`TypeGuard for response 'models.ContentsPath' caught inconsistency.`, res))); } /** * Create a file. * Response generated for [ 200 ] HTTP response code. */ putReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.CreateFile>; putReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.CreateFile>>; putReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.CreateFile>>; putReposOwnerRepoContentsPath( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoContentsPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.CreateFile | HttpResponse<models.CreateFile> | HttpEvent<models.CreateFile>> { return super.putReposOwnerRepoContentsPath(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCreateFile(res) || console.error(`TypeGuard for response 'models.CreateFile' caught inconsistency.`, res))); } /** * Get list of contributors. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoContributors( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoContributorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Contributors>; getReposOwnerRepoContributors( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoContributorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Contributors>>; getReposOwnerRepoContributors( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoContributorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Contributors>>; getReposOwnerRepoContributors( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoContributorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Contributors | HttpResponse<models.Contributors> | HttpEvent<models.Contributors>> { return super.getReposOwnerRepoContributors(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isContributors(res) || console.error(`TypeGuard for response 'models.Contributors' caught inconsistency.`, res))); } /** * Users with pull access can view deployments for a repository * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoDeployments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDeploymentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.RepoDeployments>; getReposOwnerRepoDeployments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDeploymentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.RepoDeployments>>; getReposOwnerRepoDeployments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDeploymentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.RepoDeployments>>; getReposOwnerRepoDeployments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDeploymentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.RepoDeployments | HttpResponse<models.RepoDeployments> | HttpEvent<models.RepoDeployments>> { return super.getReposOwnerRepoDeployments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRepoDeployments(res) || console.error(`TypeGuard for response 'models.RepoDeployments' caught inconsistency.`, res))); } /** * Users with push access can create a deployment for a given ref * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoDeployments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoDeploymentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.DeploymentResp>; postReposOwnerRepoDeployments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoDeploymentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.DeploymentResp>>; postReposOwnerRepoDeployments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoDeploymentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.DeploymentResp>>; postReposOwnerRepoDeployments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoDeploymentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.DeploymentResp | HttpResponse<models.DeploymentResp> | HttpEvent<models.DeploymentResp>> { return super.postReposOwnerRepoDeployments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isDeploymentResp(res) || console.error(`TypeGuard for response 'models.DeploymentResp' caught inconsistency.`, res))); } /** * Users with pull access can view deployment statuses for a deployment * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoDeploymentsIdStatuses( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDeploymentsIdStatusesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.DeploymentStatuses>; getReposOwnerRepoDeploymentsIdStatuses( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDeploymentsIdStatusesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.DeploymentStatuses>>; getReposOwnerRepoDeploymentsIdStatuses( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDeploymentsIdStatusesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.DeploymentStatuses>>; getReposOwnerRepoDeploymentsIdStatuses( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDeploymentsIdStatusesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.DeploymentStatuses | HttpResponse<models.DeploymentStatuses> | HttpEvent<models.DeploymentStatuses>> { return super.getReposOwnerRepoDeploymentsIdStatuses(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isDeploymentStatuses(res) || console.error(`TypeGuard for response 'models.DeploymentStatuses' caught inconsistency.`, res))); } /** * Create a Deployment Status * Users with push access can create deployment statuses for a given deployment: * * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoDeploymentsIdStatuses( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoDeploymentsIdStatusesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; postReposOwnerRepoDeploymentsIdStatuses( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoDeploymentsIdStatusesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; postReposOwnerRepoDeploymentsIdStatuses( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoDeploymentsIdStatusesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; postReposOwnerRepoDeploymentsIdStatuses( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoDeploymentsIdStatusesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.postReposOwnerRepoDeploymentsIdStatuses(args, requestHttpOptions, observe); } /** * Deprecated. List downloads for a repository. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoDownloads( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDownloadsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Downloads>; getReposOwnerRepoDownloads( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDownloadsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Downloads>>; getReposOwnerRepoDownloads( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDownloadsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Downloads>>; getReposOwnerRepoDownloads( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDownloadsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Downloads | HttpResponse<models.Downloads> | HttpEvent<models.Downloads>> { return super.getReposOwnerRepoDownloads(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isDownloads(res) || console.error(`TypeGuard for response 'models.Downloads' caught inconsistency.`, res))); } /** * Deprecated. Delete a download. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoDownloadsDownloadId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoDownloadsDownloadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoDownloadsDownloadId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoDownloadsDownloadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoDownloadsDownloadId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoDownloadsDownloadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoDownloadsDownloadId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoDownloadsDownloadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoDownloadsDownloadId(args, requestHttpOptions, observe); } /** * Deprecated. Get a single download. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoDownloadsDownloadId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDownloadsDownloadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Downloads>; getReposOwnerRepoDownloadsDownloadId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDownloadsDownloadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Downloads>>; getReposOwnerRepoDownloadsDownloadId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDownloadsDownloadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Downloads>>; getReposOwnerRepoDownloadsDownloadId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoDownloadsDownloadIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Downloads | HttpResponse<models.Downloads> | HttpEvent<models.Downloads>> { return super.getReposOwnerRepoDownloadsDownloadId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isDownloads(res) || console.error(`TypeGuard for response 'models.Downloads' caught inconsistency.`, res))); } /** * Get list of repository events. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Events>; getReposOwnerRepoEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Events>>; getReposOwnerRepoEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Events>>; getReposOwnerRepoEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Events | HttpResponse<models.Events> | HttpEvent<models.Events>> { return super.getReposOwnerRepoEvents(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isEvents(res) || console.error(`TypeGuard for response 'models.Events' caught inconsistency.`, res))); } /** * List forks. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoForks( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoForksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Forks>; getReposOwnerRepoForks( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoForksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Forks>>; getReposOwnerRepoForks( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoForksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Forks>>; getReposOwnerRepoForks( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoForksParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Forks | HttpResponse<models.Forks> | HttpEvent<models.Forks>> { return super.getReposOwnerRepoForks(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isForks(res) || console.error(`TypeGuard for response 'models.Forks' caught inconsistency.`, res))); } /** * Create a fork. * Forking a Repository happens asynchronously. Therefore, you may have to wai * a short period before accessing the git objects. If this takes longer than 5 * minutes, be sure to contact Support. * * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoForks( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoForksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Fork>; postReposOwnerRepoForks( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoForksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Fork>>; postReposOwnerRepoForks( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoForksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Fork>>; postReposOwnerRepoForks( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoForksParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Fork | HttpResponse<models.Fork> | HttpEvent<models.Fork>> { return super.postReposOwnerRepoForks(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isFork(res) || console.error(`TypeGuard for response 'models.Fork' caught inconsistency.`, res))); } /** * Create a Blob. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoGitBlobs( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitBlobsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Blobs>; postReposOwnerRepoGitBlobs( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitBlobsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Blobs>>; postReposOwnerRepoGitBlobs( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitBlobsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Blobs>>; postReposOwnerRepoGitBlobs( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitBlobsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Blobs | HttpResponse<models.Blobs> | HttpEvent<models.Blobs>> { return super.postReposOwnerRepoGitBlobs(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isBlobs(res) || console.error(`TypeGuard for response 'models.Blobs' caught inconsistency.`, res))); } /** * Get a Blob. * Since blobs can be any arbitrary binary data, the input and responses for * the blob API takes an encoding parameter that can be either utf-8 or * base64. If your data cannot be losslessly sent as a UTF-8 string, you can * base64 encode it. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoGitBlobsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitBlobsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Blob>; getReposOwnerRepoGitBlobsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitBlobsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Blob>>; getReposOwnerRepoGitBlobsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitBlobsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Blob>>; getReposOwnerRepoGitBlobsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitBlobsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Blob | HttpResponse<models.Blob> | HttpEvent<models.Blob>> { return super.getReposOwnerRepoGitBlobsShaCode(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isBlob(res) || console.error(`TypeGuard for response 'models.Blob' caught inconsistency.`, res))); } /** * Create a Commit. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoGitCommits( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.GitCommit>; postReposOwnerRepoGitCommits( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.GitCommit>>; postReposOwnerRepoGitCommits( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.GitCommit>>; postReposOwnerRepoGitCommits( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.GitCommit | HttpResponse<models.GitCommit> | HttpEvent<models.GitCommit>> { return super.postReposOwnerRepoGitCommits(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isGitCommit(res) || console.error(`TypeGuard for response 'models.GitCommit' caught inconsistency.`, res))); } /** * Get a Commit. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoGitCommitsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitCommitsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.RepoCommit>; getReposOwnerRepoGitCommitsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitCommitsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.RepoCommit>>; getReposOwnerRepoGitCommitsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitCommitsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.RepoCommit>>; getReposOwnerRepoGitCommitsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitCommitsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.RepoCommit | HttpResponse<models.RepoCommit> | HttpEvent<models.RepoCommit>> { return super.getReposOwnerRepoGitCommitsShaCode(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRepoCommit(res) || console.error(`TypeGuard for response 'models.RepoCommit' caught inconsistency.`, res))); } /** * Get all References * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoGitRefs( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitRefsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Refs>; getReposOwnerRepoGitRefs( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitRefsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Refs>>; getReposOwnerRepoGitRefs( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitRefsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Refs>>; getReposOwnerRepoGitRefs( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitRefsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Refs | HttpResponse<models.Refs> | HttpEvent<models.Refs>> { return super.getReposOwnerRepoGitRefs(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRefs(res) || console.error(`TypeGuard for response 'models.Refs' caught inconsistency.`, res))); } /** * Create a Reference * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoGitRefs( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitRefsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.HeadBranch>; postReposOwnerRepoGitRefs( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitRefsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.HeadBranch>>; postReposOwnerRepoGitRefs( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitRefsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.HeadBranch>>; postReposOwnerRepoGitRefs( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitRefsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.HeadBranch | HttpResponse<models.HeadBranch> | HttpEvent<models.HeadBranch>> { return super.postReposOwnerRepoGitRefs(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isHeadBranch(res) || console.error(`TypeGuard for response 'models.HeadBranch' caught inconsistency.`, res))); } /** * Delete a Reference * Example: Deleting a branch: DELETE /repos/octocat/Hello-World/git/refs/heads/feature-a * Example: Deleting a tag: DELETE /repos/octocat/Hello-World/git/refs/tags/v1.0 * * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoGitRefsRef(args, requestHttpOptions, observe); } /** * Get a Reference * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.HeadBranch>; getReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.HeadBranch>>; getReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.HeadBranch>>; getReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.HeadBranch | HttpResponse<models.HeadBranch> | HttpEvent<models.HeadBranch>> { return super.getReposOwnerRepoGitRefsRef(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isHeadBranch(res) || console.error(`TypeGuard for response 'models.HeadBranch' caught inconsistency.`, res))); } /** * Update a Reference * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.HeadBranch>; patchReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.HeadBranch>>; patchReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.HeadBranch>>; patchReposOwnerRepoGitRefsRef( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoGitRefsRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.HeadBranch | HttpResponse<models.HeadBranch> | HttpEvent<models.HeadBranch>> { return super.patchReposOwnerRepoGitRefsRef(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isHeadBranch(res) || console.error(`TypeGuard for response 'models.HeadBranch' caught inconsistency.`, res))); } /** * Create a Tag Object. * Note that creating a tag object does not create the reference that makes a * tag in Git. If you want to create an annotated tag in Git, you have to do * this call to create the tag object, and then create the refs/tags/[tag] * reference. If you want to create a lightweight tag, you only have to create * the tag reference - this call would be unnecessary. * * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoGitTags( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitTagsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Tags>; postReposOwnerRepoGitTags( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitTagsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Tags>>; postReposOwnerRepoGitTags( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitTagsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Tags>>; postReposOwnerRepoGitTags( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitTagsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Tags | HttpResponse<models.Tags> | HttpEvent<models.Tags>> { return super.postReposOwnerRepoGitTags(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isTags(res) || console.error(`TypeGuard for response 'models.Tags' caught inconsistency.`, res))); } /** * Get a Tag. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoGitTagsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitTagsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Tag>; getReposOwnerRepoGitTagsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitTagsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Tag>>; getReposOwnerRepoGitTagsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitTagsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Tag>>; getReposOwnerRepoGitTagsShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitTagsShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Tag | HttpResponse<models.Tag> | HttpEvent<models.Tag>> { return super.getReposOwnerRepoGitTagsShaCode(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isTag(res) || console.error(`TypeGuard for response 'models.Tag' caught inconsistency.`, res))); } /** * Create a Tree. * The tree creation API will take nested entries as well. If both a tree and * a nested path modifying that tree are specified, it will overwrite the * contents of that tree with the new path contents and write a new tree out. * * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoGitTrees( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitTreesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Trees>; postReposOwnerRepoGitTrees( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitTreesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Trees>>; postReposOwnerRepoGitTrees( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitTreesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Trees>>; postReposOwnerRepoGitTrees( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoGitTreesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Trees | HttpResponse<models.Trees> | HttpEvent<models.Trees>> { return super.postReposOwnerRepoGitTrees(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isTrees(res) || console.error(`TypeGuard for response 'models.Trees' caught inconsistency.`, res))); } /** * Get a Tree. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoGitTreesShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitTreesShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Tree>; getReposOwnerRepoGitTreesShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitTreesShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Tree>>; getReposOwnerRepoGitTreesShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitTreesShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Tree>>; getReposOwnerRepoGitTreesShaCode( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoGitTreesShaCodeParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Tree | HttpResponse<models.Tree> | HttpEvent<models.Tree>> { return super.getReposOwnerRepoGitTreesShaCode(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isTree(res) || console.error(`TypeGuard for response 'models.Tree' caught inconsistency.`, res))); } /** * Get list of hooks. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoHooks( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoHooksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Hook>; getReposOwnerRepoHooks( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoHooksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Hook>>; getReposOwnerRepoHooks( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoHooksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Hook>>; getReposOwnerRepoHooks( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoHooksParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Hook | HttpResponse<models.Hook> | HttpEvent<models.Hook>> { return super.getReposOwnerRepoHooks(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isHook(res) || console.error(`TypeGuard for response 'models.Hook' caught inconsistency.`, res))); } /** * Create a hook. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoHooks( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoHooksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Hook>; postReposOwnerRepoHooks( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoHooksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Hook>>; postReposOwnerRepoHooks( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoHooksParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Hook>>; postReposOwnerRepoHooks( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoHooksParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Hook | HttpResponse<models.Hook> | HttpEvent<models.Hook>> { return super.postReposOwnerRepoHooks(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isHook(res) || console.error(`TypeGuard for response 'models.Hook' caught inconsistency.`, res))); } /** * Delete a hook. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoHooksHookId(args, requestHttpOptions, observe); } /** * Get single hook. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Hook>; getReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Hook>>; getReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Hook>>; getReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Hook | HttpResponse<models.Hook> | HttpEvent<models.Hook>> { return super.getReposOwnerRepoHooksHookId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isHook(res) || console.error(`TypeGuard for response 'models.Hook' caught inconsistency.`, res))); } /** * Edit a hook. * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Hook>; patchReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Hook>>; patchReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Hook>>; patchReposOwnerRepoHooksHookId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoHooksHookIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Hook | HttpResponse<models.Hook> | HttpEvent<models.Hook>> { return super.patchReposOwnerRepoHooksHookId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isHook(res) || console.error(`TypeGuard for response 'models.Hook' caught inconsistency.`, res))); } /** * Test a push hook. * This will trigger the hook with the latest push to the current repository * if the hook is subscribed to push events. If the hook is not subscribed * to push events, the server will respond with 204 but no test POST will * be generated. * Note: Previously /repos/:owner/:repo/hooks/:id/tes * * Response generated for [ 204 ] HTTP response code. */ postReposOwnerRepoHooksHookIdTests( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoHooksHookIdTestsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; postReposOwnerRepoHooksHookIdTests( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoHooksHookIdTestsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; postReposOwnerRepoHooksHookIdTests( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoHooksHookIdTestsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; postReposOwnerRepoHooksHookIdTests( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoHooksHookIdTestsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.postReposOwnerRepoHooksHookIdTests(args, requestHttpOptions, observe); } /** * List issues for a repository. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoIssues( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Issues>; getReposOwnerRepoIssues( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Issues>>; getReposOwnerRepoIssues( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Issues>>; getReposOwnerRepoIssues( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Issues | HttpResponse<models.Issues> | HttpEvent<models.Issues>> { return super.getReposOwnerRepoIssues(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isIssues(res) || console.error(`TypeGuard for response 'models.Issues' caught inconsistency.`, res))); } /** * Create an issue. * Any user with pull access to a repository can create an issue. * * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoIssues( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Issue>; postReposOwnerRepoIssues( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Issue>>; postReposOwnerRepoIssues( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Issue>>; postReposOwnerRepoIssues( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Issue | HttpResponse<models.Issue> | HttpEvent<models.Issue>> { return super.postReposOwnerRepoIssues(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isIssue(res) || console.error(`TypeGuard for response 'models.Issue' caught inconsistency.`, res))); } /** * List comments in a repository. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoIssuesComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.IssuesComments>; getReposOwnerRepoIssuesComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.IssuesComments>>; getReposOwnerRepoIssuesComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.IssuesComments>>; getReposOwnerRepoIssuesComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.IssuesComments | HttpResponse<models.IssuesComments> | HttpEvent<models.IssuesComments>> { return super.getReposOwnerRepoIssuesComments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isIssuesComments(res) || console.error(`TypeGuard for response 'models.IssuesComments' caught inconsistency.`, res))); } /** * Delete a comment. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoIssuesCommentId(args, requestHttpOptions, observe); } /** * Get a single comment. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.IssuesComment>; getReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.IssuesComment>>; getReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.IssuesComment>>; getReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.IssuesComment | HttpResponse<models.IssuesComment> | HttpEvent<models.IssuesComment>> { return super.getReposOwnerRepoIssuesCommentId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isIssuesComment(res) || console.error(`TypeGuard for response 'models.IssuesComment' caught inconsistency.`, res))); } /** * Edit a comment. * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.IssuesComment>; patchReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.IssuesComment>>; patchReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.IssuesComment>>; patchReposOwnerRepoIssuesCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoIssuesCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.IssuesComment | HttpResponse<models.IssuesComment> | HttpEvent<models.IssuesComment>> { return super.patchReposOwnerRepoIssuesCommentId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isIssuesComment(res) || console.error(`TypeGuard for response 'models.IssuesComment' caught inconsistency.`, res))); } /** * List issue events for a repository. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoIssuesEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Events>; getReposOwnerRepoIssuesEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Events>>; getReposOwnerRepoIssuesEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Events>>; getReposOwnerRepoIssuesEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Events | HttpResponse<models.Events> | HttpEvent<models.Events>> { return super.getReposOwnerRepoIssuesEvents(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isEvents(res) || console.error(`TypeGuard for response 'models.Events' caught inconsistency.`, res))); } /** * Get a single event. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoIssuesEventId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesEventIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Event>; getReposOwnerRepoIssuesEventId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesEventIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Event>>; getReposOwnerRepoIssuesEventId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesEventIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Event>>; getReposOwnerRepoIssuesEventId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesEventIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Event | HttpResponse<models.Event> | HttpEvent<models.Event>> { return super.getReposOwnerRepoIssuesEventId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isEvent(res) || console.error(`TypeGuard for response 'models.Event' caught inconsistency.`, res))); } /** * Get a single issue * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoIssuesNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Issue>; getReposOwnerRepoIssuesNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Issue>>; getReposOwnerRepoIssuesNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Issue>>; getReposOwnerRepoIssuesNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Issue | HttpResponse<models.Issue> | HttpEvent<models.Issue>> { return super.getReposOwnerRepoIssuesNumber(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isIssue(res) || console.error(`TypeGuard for response 'models.Issue' caught inconsistency.`, res))); } /** * Edit an issue. * Issue owners and users with push access can edit an issue. * * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoIssuesNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoIssuesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Issue>; patchReposOwnerRepoIssuesNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoIssuesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Issue>>; patchReposOwnerRepoIssuesNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoIssuesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Issue>>; patchReposOwnerRepoIssuesNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoIssuesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Issue | HttpResponse<models.Issue> | HttpEvent<models.Issue>> { return super.patchReposOwnerRepoIssuesNumber(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isIssue(res) || console.error(`TypeGuard for response 'models.Issue' caught inconsistency.`, res))); } /** * List comments on an issue. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoIssuesNumberComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.IssuesComments>; getReposOwnerRepoIssuesNumberComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.IssuesComments>>; getReposOwnerRepoIssuesNumberComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.IssuesComments>>; getReposOwnerRepoIssuesNumberComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.IssuesComments | HttpResponse<models.IssuesComments> | HttpEvent<models.IssuesComments>> { return super.getReposOwnerRepoIssuesNumberComments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isIssuesComments(res) || console.error(`TypeGuard for response 'models.IssuesComments' caught inconsistency.`, res))); } /** * Create a comment. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoIssuesNumberComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.IssuesComment>; postReposOwnerRepoIssuesNumberComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.IssuesComment>>; postReposOwnerRepoIssuesNumberComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.IssuesComment>>; postReposOwnerRepoIssuesNumberComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.IssuesComment | HttpResponse<models.IssuesComment> | HttpEvent<models.IssuesComment>> { return super.postReposOwnerRepoIssuesNumberComments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isIssuesComment(res) || console.error(`TypeGuard for response 'models.IssuesComment' caught inconsistency.`, res))); } /** * List events for an issue. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoIssuesNumberEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Events>; getReposOwnerRepoIssuesNumberEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Events>>; getReposOwnerRepoIssuesNumberEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Events>>; getReposOwnerRepoIssuesNumberEvents( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Events | HttpResponse<models.Events> | HttpEvent<models.Events>> { return super.getReposOwnerRepoIssuesNumberEvents(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isEvents(res) || console.error(`TypeGuard for response 'models.Events' caught inconsistency.`, res))); } /** * Remove all labels from an issue. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoIssuesNumberLabels(args, requestHttpOptions, observe); } /** * List labels on an issue. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Labels>; getReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Labels>>; getReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Labels>>; getReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Labels | HttpResponse<models.Labels> | HttpEvent<models.Labels>> { return super.getReposOwnerRepoIssuesNumberLabels(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isLabels(res) || console.error(`TypeGuard for response 'models.Labels' caught inconsistency.`, res))); } /** * Add labels to an issue. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Label>; postReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Label>>; postReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Label>>; postReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Label | HttpResponse<models.Label> | HttpEvent<models.Label>> { return super.postReposOwnerRepoIssuesNumberLabels(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isLabel(res) || console.error(`TypeGuard for response 'models.Label' caught inconsistency.`, res))); } /** * Replace all labels for an issue. * Sending an empty array ([]) will remove all Labels from the Issue. * * Response generated for [ 201 ] HTTP response code. */ putReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Label>; putReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Label>>; putReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Label>>; putReposOwnerRepoIssuesNumberLabels( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoIssuesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Label | HttpResponse<models.Label> | HttpEvent<models.Label>> { return super.putReposOwnerRepoIssuesNumberLabels(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isLabel(res) || console.error(`TypeGuard for response 'models.Label' caught inconsistency.`, res))); } /** * Remove a label from an issue. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoIssuesNumberLabelsName( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesNumberLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoIssuesNumberLabelsName( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesNumberLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoIssuesNumberLabelsName( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesNumberLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoIssuesNumberLabelsName( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoIssuesNumberLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoIssuesNumberLabelsName(args, requestHttpOptions, observe); } /** * Get list of keys. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoKeys( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoKeysParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Keys>; getReposOwnerRepoKeys( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoKeysParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Keys>>; getReposOwnerRepoKeys( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoKeysParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Keys>>; getReposOwnerRepoKeys( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoKeysParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Keys | HttpResponse<models.Keys> | HttpEvent<models.Keys>> { return super.getReposOwnerRepoKeys(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isKeys(res) || console.error(`TypeGuard for response 'models.Keys' caught inconsistency.`, res))); } /** * Create a key. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoKeys( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoKeysParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.UserKeysKeyId>; postReposOwnerRepoKeys( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoKeysParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.UserKeysKeyId>>; postReposOwnerRepoKeys( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoKeysParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.UserKeysKeyId>>; postReposOwnerRepoKeys( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoKeysParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.UserKeysKeyId | HttpResponse<models.UserKeysKeyId> | HttpEvent<models.UserKeysKeyId>> { return super.postReposOwnerRepoKeys(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isUserKeysKeyId(res) || console.error(`TypeGuard for response 'models.UserKeysKeyId' caught inconsistency.`, res))); } /** * Delete a key. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoKeysKeyId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoKeysKeyIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoKeysKeyId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoKeysKeyIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoKeysKeyId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoKeysKeyIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoKeysKeyId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoKeysKeyIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoKeysKeyId(args, requestHttpOptions, observe); } /** * Get a key * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoKeysKeyId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoKeysKeyIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.UserKeysKeyId>; getReposOwnerRepoKeysKeyId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoKeysKeyIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.UserKeysKeyId>>; getReposOwnerRepoKeysKeyId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoKeysKeyIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.UserKeysKeyId>>; getReposOwnerRepoKeysKeyId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoKeysKeyIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.UserKeysKeyId | HttpResponse<models.UserKeysKeyId> | HttpEvent<models.UserKeysKeyId>> { return super.getReposOwnerRepoKeysKeyId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isUserKeysKeyId(res) || console.error(`TypeGuard for response 'models.UserKeysKeyId' caught inconsistency.`, res))); } /** * List all labels for this repository. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Labels>; getReposOwnerRepoLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Labels>>; getReposOwnerRepoLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Labels>>; getReposOwnerRepoLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Labels | HttpResponse<models.Labels> | HttpEvent<models.Labels>> { return super.getReposOwnerRepoLabels(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isLabels(res) || console.error(`TypeGuard for response 'models.Labels' caught inconsistency.`, res))); } /** * Create a label. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoLabels( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Label>; postReposOwnerRepoLabels( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Label>>; postReposOwnerRepoLabels( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Label>>; postReposOwnerRepoLabels( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Label | HttpResponse<models.Label> | HttpEvent<models.Label>> { return super.postReposOwnerRepoLabels(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isLabel(res) || console.error(`TypeGuard for response 'models.Label' caught inconsistency.`, res))); } /** * Delete a label. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoLabelsName(args, requestHttpOptions, observe); } /** * Get a single label. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Label>; getReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Label>>; getReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Label>>; getReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Label | HttpResponse<models.Label> | HttpEvent<models.Label>> { return super.getReposOwnerRepoLabelsName(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isLabel(res) || console.error(`TypeGuard for response 'models.Label' caught inconsistency.`, res))); } /** * Update a label. * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Label>; patchReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Label>>; patchReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Label>>; patchReposOwnerRepoLabelsName( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoLabelsNameParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Label | HttpResponse<models.Label> | HttpEvent<models.Label>> { return super.patchReposOwnerRepoLabelsName(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isLabel(res) || console.error(`TypeGuard for response 'models.Label' caught inconsistency.`, res))); } /** * List languages. * List languages for the specified repository. The value on the right of a * language is the number of bytes of code written in that language. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoLanguages( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLanguagesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Languages>; getReposOwnerRepoLanguages( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLanguagesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Languages>>; getReposOwnerRepoLanguages( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLanguagesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Languages>>; getReposOwnerRepoLanguages( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoLanguagesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Languages | HttpResponse<models.Languages> | HttpEvent<models.Languages>> { return super.getReposOwnerRepoLanguages(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isLanguages(res) || console.error(`TypeGuard for response 'models.Languages' caught inconsistency.`, res))); } /** * Perform a merge. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoMerges( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoMergesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.MergesSuccessful>; postReposOwnerRepoMerges( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoMergesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.MergesSuccessful>>; postReposOwnerRepoMerges( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoMergesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.MergesSuccessful>>; postReposOwnerRepoMerges( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoMergesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.MergesSuccessful | HttpResponse<models.MergesSuccessful> | HttpEvent<models.MergesSuccessful>> { return super.postReposOwnerRepoMerges(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isMergesSuccessful(res) || console.error(`TypeGuard for response 'models.MergesSuccessful' caught inconsistency.`, res))); } /** * List milestones for a repository. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoMilestones( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Milestone>; getReposOwnerRepoMilestones( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Milestone>>; getReposOwnerRepoMilestones( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Milestone>>; getReposOwnerRepoMilestones( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Milestone | HttpResponse<models.Milestone> | HttpEvent<models.Milestone>> { return super.getReposOwnerRepoMilestones(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isMilestone(res) || console.error(`TypeGuard for response 'models.Milestone' caught inconsistency.`, res))); } /** * Create a milestone. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoMilestones( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoMilestonesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Milestone>; postReposOwnerRepoMilestones( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoMilestonesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Milestone>>; postReposOwnerRepoMilestones( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoMilestonesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Milestone>>; postReposOwnerRepoMilestones( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoMilestonesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Milestone | HttpResponse<models.Milestone> | HttpEvent<models.Milestone>> { return super.postReposOwnerRepoMilestones(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isMilestone(res) || console.error(`TypeGuard for response 'models.Milestone' caught inconsistency.`, res))); } /** * Delete a milestone. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoMilestonesNumber(args, requestHttpOptions, observe); } /** * Get a single milestone. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Milestone>; getReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Milestone>>; getReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Milestone>>; getReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Milestone | HttpResponse<models.Milestone> | HttpEvent<models.Milestone>> { return super.getReposOwnerRepoMilestonesNumber(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isMilestone(res) || console.error(`TypeGuard for response 'models.Milestone' caught inconsistency.`, res))); } /** * Update a milestone. * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Milestone>; patchReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Milestone>>; patchReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Milestone>>; patchReposOwnerRepoMilestonesNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoMilestonesNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Milestone | HttpResponse<models.Milestone> | HttpEvent<models.Milestone>> { return super.patchReposOwnerRepoMilestonesNumber(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isMilestone(res) || console.error(`TypeGuard for response 'models.Milestone' caught inconsistency.`, res))); } /** * Get labels for every issue in a milestone. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoMilestonesNumberLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Labels>; getReposOwnerRepoMilestonesNumberLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Labels>>; getReposOwnerRepoMilestonesNumberLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Labels>>; getReposOwnerRepoMilestonesNumberLabels( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoMilestonesNumberLabelsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Labels | HttpResponse<models.Labels> | HttpEvent<models.Labels>> { return super.getReposOwnerRepoMilestonesNumberLabels(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isLabels(res) || console.error(`TypeGuard for response 'models.Labels' caught inconsistency.`, res))); } /** * List your notifications in a repository * List all notifications for the current user. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoNotifications( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoNotificationsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Notifications>; getReposOwnerRepoNotifications( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoNotificationsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Notifications>>; getReposOwnerRepoNotifications( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoNotificationsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Notifications>>; getReposOwnerRepoNotifications( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoNotificationsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Notifications | HttpResponse<models.Notifications> | HttpEvent<models.Notifications>> { return super.getReposOwnerRepoNotifications(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isNotifications(res) || console.error(`TypeGuard for response 'models.Notifications' caught inconsistency.`, res))); } /** * Mark notifications as read in a repository. * Marking all notifications in a repository as "read" removes them from the * default view on GitHub.com. * * Response generated for [ 205 ] HTTP response code. */ putReposOwnerRepoNotifications( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoNotificationsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; putReposOwnerRepoNotifications( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoNotificationsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; putReposOwnerRepoNotifications( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoNotificationsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; putReposOwnerRepoNotifications( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoNotificationsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.putReposOwnerRepoNotifications(args, requestHttpOptions, observe); } /** * List pull requests. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoPulls( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Pulls>; getReposOwnerRepoPulls( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Pulls>>; getReposOwnerRepoPulls( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Pulls>>; getReposOwnerRepoPulls( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Pulls | HttpResponse<models.Pulls> | HttpEvent<models.Pulls>> { return super.getReposOwnerRepoPulls(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isPulls(res) || console.error(`TypeGuard for response 'models.Pulls' caught inconsistency.`, res))); } /** * Create a pull request. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoPulls( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoPullsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Pulls>; postReposOwnerRepoPulls( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoPullsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Pulls>>; postReposOwnerRepoPulls( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoPullsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Pulls>>; postReposOwnerRepoPulls( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoPullsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Pulls | HttpResponse<models.Pulls> | HttpEvent<models.Pulls>> { return super.postReposOwnerRepoPulls(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isPulls(res) || console.error(`TypeGuard for response 'models.Pulls' caught inconsistency.`, res))); } /** * List comments in a repository. * By default, Review Comments are ordered by ascending ID. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoPullsComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.IssuesComments>; getReposOwnerRepoPullsComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.IssuesComments>>; getReposOwnerRepoPullsComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.IssuesComments>>; getReposOwnerRepoPullsComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.IssuesComments | HttpResponse<models.IssuesComments> | HttpEvent<models.IssuesComments>> { return super.getReposOwnerRepoPullsComments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isIssuesComments(res) || console.error(`TypeGuard for response 'models.IssuesComments' caught inconsistency.`, res))); } /** * Delete a comment. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoPullsCommentId(args, requestHttpOptions, observe); } /** * Get a single comment. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.PullsComment>; getReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.PullsComment>>; getReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.PullsComment>>; getReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.PullsComment | HttpResponse<models.PullsComment> | HttpEvent<models.PullsComment>> { return super.getReposOwnerRepoPullsCommentId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isPullsComment(res) || console.error(`TypeGuard for response 'models.PullsComment' caught inconsistency.`, res))); } /** * Edit a comment. * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.PullsComment>; patchReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.PullsComment>>; patchReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.PullsComment>>; patchReposOwnerRepoPullsCommentId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoPullsCommentIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.PullsComment | HttpResponse<models.PullsComment> | HttpEvent<models.PullsComment>> { return super.patchReposOwnerRepoPullsCommentId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isPullsComment(res) || console.error(`TypeGuard for response 'models.PullsComment' caught inconsistency.`, res))); } /** * Get a single pull request. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoPullsNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.PullRequest>; getReposOwnerRepoPullsNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.PullRequest>>; getReposOwnerRepoPullsNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.PullRequest>>; getReposOwnerRepoPullsNumber( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.PullRequest | HttpResponse<models.PullRequest> | HttpEvent<models.PullRequest>> { return super.getReposOwnerRepoPullsNumber(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isPullRequest(res) || console.error(`TypeGuard for response 'models.PullRequest' caught inconsistency.`, res))); } /** * Update a pull request. * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoPullsNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoPullsNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Repo>; patchReposOwnerRepoPullsNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoPullsNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Repo>>; patchReposOwnerRepoPullsNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoPullsNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Repo>>; patchReposOwnerRepoPullsNumber( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoPullsNumberParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Repo | HttpResponse<models.Repo> | HttpEvent<models.Repo>> { return super.patchReposOwnerRepoPullsNumber(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRepo(res) || console.error(`TypeGuard for response 'models.Repo' caught inconsistency.`, res))); } /** * List comments on a pull request. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoPullsNumberComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.PullsComment>; getReposOwnerRepoPullsNumberComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.PullsComment>>; getReposOwnerRepoPullsNumberComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.PullsComment>>; getReposOwnerRepoPullsNumberComments( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.PullsComment | HttpResponse<models.PullsComment> | HttpEvent<models.PullsComment>> { return super.getReposOwnerRepoPullsNumberComments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isPullsComment(res) || console.error(`TypeGuard for response 'models.PullsComment' caught inconsistency.`, res))); } /** * Create a comment. * * #TODO Alternative input * ( http://developer.github.com/v3/pulls/comments/ ) * * description: | * * Alternative Input. * * Instead of passing commit_id, path, and position you can reply to an * * existing Pull Request Comment like this: * * * * body * * Required string * * in_reply_to * * Required number - Comment id to reply to. * * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoPullsNumberComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoPullsNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.PullsComment>; postReposOwnerRepoPullsNumberComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoPullsNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.PullsComment>>; postReposOwnerRepoPullsNumberComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoPullsNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.PullsComment>>; postReposOwnerRepoPullsNumberComments( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoPullsNumberCommentsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.PullsComment | HttpResponse<models.PullsComment> | HttpEvent<models.PullsComment>> { return super.postReposOwnerRepoPullsNumberComments(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isPullsComment(res) || console.error(`TypeGuard for response 'models.PullsComment' caught inconsistency.`, res))); } /** * List commits on a pull request. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoPullsNumberCommits( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Commits>; getReposOwnerRepoPullsNumberCommits( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Commits>>; getReposOwnerRepoPullsNumberCommits( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Commits>>; getReposOwnerRepoPullsNumberCommits( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberCommitsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Commits | HttpResponse<models.Commits> | HttpEvent<models.Commits>> { return super.getReposOwnerRepoPullsNumberCommits(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCommits(res) || console.error(`TypeGuard for response 'models.Commits' caught inconsistency.`, res))); } /** * List pull requests files. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoPullsNumberFiles( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberFilesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Pulls>; getReposOwnerRepoPullsNumberFiles( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberFilesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Pulls>>; getReposOwnerRepoPullsNumberFiles( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberFilesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Pulls>>; getReposOwnerRepoPullsNumberFiles( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberFilesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Pulls | HttpResponse<models.Pulls> | HttpEvent<models.Pulls>> { return super.getReposOwnerRepoPullsNumberFiles(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isPulls(res) || console.error(`TypeGuard for response 'models.Pulls' caught inconsistency.`, res))); } /** * Get if a pull request has been merged. * Response generated for [ 204 ] HTTP response code. */ getReposOwnerRepoPullsNumberMerge( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberMergeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; getReposOwnerRepoPullsNumberMerge( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberMergeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; getReposOwnerRepoPullsNumberMerge( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberMergeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; getReposOwnerRepoPullsNumberMerge( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoPullsNumberMergeParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.getReposOwnerRepoPullsNumberMerge(args, requestHttpOptions, observe); } /** * Merge a pull request (Merge Button's) * Response generated for [ 200 ] HTTP response code. */ putReposOwnerRepoPullsNumberMerge( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoPullsNumberMergeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Merge>; putReposOwnerRepoPullsNumberMerge( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoPullsNumberMergeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Merge>>; putReposOwnerRepoPullsNumberMerge( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoPullsNumberMergeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Merge>>; putReposOwnerRepoPullsNumberMerge( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoPullsNumberMergeParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Merge | HttpResponse<models.Merge> | HttpEvent<models.Merge>> { return super.putReposOwnerRepoPullsNumberMerge(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isMerge(res) || console.error(`TypeGuard for response 'models.Merge' caught inconsistency.`, res))); } /** * Get the README. * This method returns the preferred README for a repository. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoReadme( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReadmeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.ContentsPath>; getReposOwnerRepoReadme( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReadmeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.ContentsPath>>; getReposOwnerRepoReadme( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReadmeParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.ContentsPath>>; getReposOwnerRepoReadme( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReadmeParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.ContentsPath | HttpResponse<models.ContentsPath> | HttpEvent<models.ContentsPath>> { return super.getReposOwnerRepoReadme(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isContentsPath(res) || console.error(`TypeGuard for response 'models.ContentsPath' caught inconsistency.`, res))); } /** * Users with push access to the repository will receive all releases (i.e., published releases and draft releases). Users with pull access will receive published releases only * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoReleases( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Releases>; getReposOwnerRepoReleases( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Releases>>; getReposOwnerRepoReleases( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Releases>>; getReposOwnerRepoReleases( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Releases | HttpResponse<models.Releases> | HttpEvent<models.Releases>> { return super.getReposOwnerRepoReleases(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isReleases(res) || console.error(`TypeGuard for response 'models.Releases' caught inconsistency.`, res))); } /** * Create a release * Users with push access to the repository can create a release. * * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoReleases( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoReleasesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Release>; postReposOwnerRepoReleases( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoReleasesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Release>>; postReposOwnerRepoReleases( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoReleasesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Release>>; postReposOwnerRepoReleases( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoReleasesParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Release | HttpResponse<models.Release> | HttpEvent<models.Release>> { return super.postReposOwnerRepoReleases(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRelease(res) || console.error(`TypeGuard for response 'models.Release' caught inconsistency.`, res))); } /** * Delete a release asset * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoReleasesAssetsId(args, requestHttpOptions, observe); } /** * Get a single release asset * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Asset>; getReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Asset>>; getReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Asset>>; getReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Asset | HttpResponse<models.Asset> | HttpEvent<models.Asset>> { return super.getReposOwnerRepoReleasesAssetsId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isAsset(res) || console.error(`TypeGuard for response 'models.Asset' caught inconsistency.`, res))); } /** * Edit a release asset * Users with push access to the repository can edit a release asset. * * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Asset>; patchReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Asset>>; patchReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Asset>>; patchReposOwnerRepoReleasesAssetsId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoReleasesAssetsIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Asset | HttpResponse<models.Asset> | HttpEvent<models.Asset>> { return super.patchReposOwnerRepoReleasesAssetsId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isAsset(res) || console.error(`TypeGuard for response 'models.Asset' caught inconsistency.`, res))); } /** * Users with push access to the repository can delete a release. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoReleasesId(args, requestHttpOptions, observe); } /** * Get a single release * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Release>; getReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Release>>; getReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Release>>; getReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Release | HttpResponse<models.Release> | HttpEvent<models.Release>> { return super.getReposOwnerRepoReleasesId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRelease(res) || console.error(`TypeGuard for response 'models.Release' caught inconsistency.`, res))); } /** * Users with push access to the repository can edit a release * Response generated for [ 200 ] HTTP response code. */ patchReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Release>; patchReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Release>>; patchReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Release>>; patchReposOwnerRepoReleasesId( args: Exclude<ReposAPIClientInterface['patchReposOwnerRepoReleasesIdParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Release | HttpResponse<models.Release> | HttpEvent<models.Release>> { return super.patchReposOwnerRepoReleasesId(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRelease(res) || console.error(`TypeGuard for response 'models.Release' caught inconsistency.`, res))); } /** * List assets for a release * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoReleasesIdAssets( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesIdAssetsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Assets>; getReposOwnerRepoReleasesIdAssets( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesIdAssetsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Assets>>; getReposOwnerRepoReleasesIdAssets( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesIdAssetsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Assets>>; getReposOwnerRepoReleasesIdAssets( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoReleasesIdAssetsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Assets | HttpResponse<models.Assets> | HttpEvent<models.Assets>> { return super.getReposOwnerRepoReleasesIdAssets(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isAssets(res) || console.error(`TypeGuard for response 'models.Assets' caught inconsistency.`, res))); } /** * List Stargazers. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoStargazers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStargazersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Users>; getReposOwnerRepoStargazers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStargazersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Users>>; getReposOwnerRepoStargazers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStargazersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Users>>; getReposOwnerRepoStargazers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStargazersParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Users | HttpResponse<models.Users> | HttpEvent<models.Users>> { return super.getReposOwnerRepoStargazers(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isUsers(res) || console.error(`TypeGuard for response 'models.Users' caught inconsistency.`, res))); } /** * Get the number of additions and deletions per week. * Returns a weekly aggregate of the number of additions and deletions pushed * to a repository. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoStatsCodeFrequency( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsCodeFrequencyParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.CodeFrequencyStats>; getReposOwnerRepoStatsCodeFrequency( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsCodeFrequencyParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.CodeFrequencyStats>>; getReposOwnerRepoStatsCodeFrequency( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsCodeFrequencyParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.CodeFrequencyStats>>; getReposOwnerRepoStatsCodeFrequency( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsCodeFrequencyParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.CodeFrequencyStats | HttpResponse<models.CodeFrequencyStats> | HttpEvent<models.CodeFrequencyStats>> { return super.getReposOwnerRepoStatsCodeFrequency(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCodeFrequencyStats(res) || console.error(`TypeGuard for response 'models.CodeFrequencyStats' caught inconsistency.`, res))); } /** * Get the last year of commit activity data. * Returns the last year of commit activity grouped by week. The days array * is a group of commits per day, starting on Sunday. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoStatsCommitActivity( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsCommitActivityParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.CommitActivityStats>; getReposOwnerRepoStatsCommitActivity( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsCommitActivityParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.CommitActivityStats>>; getReposOwnerRepoStatsCommitActivity( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsCommitActivityParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.CommitActivityStats>>; getReposOwnerRepoStatsCommitActivity( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsCommitActivityParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.CommitActivityStats | HttpResponse<models.CommitActivityStats> | HttpEvent<models.CommitActivityStats>> { return super.getReposOwnerRepoStatsCommitActivity(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCommitActivityStats(res) || console.error(`TypeGuard for response 'models.CommitActivityStats' caught inconsistency.`, res))); } /** * Get contributors list with additions, deletions, and commit counts. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoStatsContributors( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsContributorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.ContributorsStats>; getReposOwnerRepoStatsContributors( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsContributorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.ContributorsStats>>; getReposOwnerRepoStatsContributors( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsContributorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.ContributorsStats>>; getReposOwnerRepoStatsContributors( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsContributorsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.ContributorsStats | HttpResponse<models.ContributorsStats> | HttpEvent<models.ContributorsStats>> { return super.getReposOwnerRepoStatsContributors(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isContributorsStats(res) || console.error(`TypeGuard for response 'models.ContributorsStats' caught inconsistency.`, res))); } /** * Get the weekly commit count for the repo owner and everyone else. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoStatsParticipation( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsParticipationParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.ParticipationStats>; getReposOwnerRepoStatsParticipation( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsParticipationParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.ParticipationStats>>; getReposOwnerRepoStatsParticipation( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsParticipationParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.ParticipationStats>>; getReposOwnerRepoStatsParticipation( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsParticipationParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.ParticipationStats | HttpResponse<models.ParticipationStats> | HttpEvent<models.ParticipationStats>> { return super.getReposOwnerRepoStatsParticipation(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isParticipationStats(res) || console.error(`TypeGuard for response 'models.ParticipationStats' caught inconsistency.`, res))); } /** * Get the number of commits per hour in each day. * Each array contains the day number, hour number, and number of commits * 0-6 Sunday - Saturday * 0-23 Hour of day * Number of commits * * * For example, [2, 14, 25] indicates that there were 25 total commits, during * the 2.00pm hour on Tuesdays. All times are based on the time zone of * individual commits. * * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoStatsPunchCard( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsPunchCardParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.CodeFrequencyStats>; getReposOwnerRepoStatsPunchCard( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsPunchCardParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.CodeFrequencyStats>>; getReposOwnerRepoStatsPunchCard( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsPunchCardParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.CodeFrequencyStats>>; getReposOwnerRepoStatsPunchCard( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatsPunchCardParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.CodeFrequencyStats | HttpResponse<models.CodeFrequencyStats> | HttpEvent<models.CodeFrequencyStats>> { return super.getReposOwnerRepoStatsPunchCard(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isCodeFrequencyStats(res) || console.error(`TypeGuard for response 'models.CodeFrequencyStats' caught inconsistency.`, res))); } /** * List Statuses for a specific Ref. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoStatusesRef( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatusesRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Ref>; getReposOwnerRepoStatusesRef( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatusesRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Ref>>; getReposOwnerRepoStatusesRef( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatusesRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Ref>>; getReposOwnerRepoStatusesRef( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoStatusesRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Ref | HttpResponse<models.Ref> | HttpEvent<models.Ref>> { return super.getReposOwnerRepoStatusesRef(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRef(res) || console.error(`TypeGuard for response 'models.Ref' caught inconsistency.`, res))); } /** * Create a Status. * Response generated for [ 201 ] HTTP response code. */ postReposOwnerRepoStatusesRef( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoStatusesRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Ref>; postReposOwnerRepoStatusesRef( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoStatusesRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Ref>>; postReposOwnerRepoStatusesRef( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoStatusesRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Ref>>; postReposOwnerRepoStatusesRef( args: Exclude<ReposAPIClientInterface['postReposOwnerRepoStatusesRefParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Ref | HttpResponse<models.Ref> | HttpEvent<models.Ref>> { return super.postReposOwnerRepoStatusesRef(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isRef(res) || console.error(`TypeGuard for response 'models.Ref' caught inconsistency.`, res))); } /** * List watchers. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoSubscribers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoSubscribersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Users>; getReposOwnerRepoSubscribers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoSubscribersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Users>>; getReposOwnerRepoSubscribers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoSubscribersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Users>>; getReposOwnerRepoSubscribers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoSubscribersParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Users | HttpResponse<models.Users> | HttpEvent<models.Users>> { return super.getReposOwnerRepoSubscribers(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isUsers(res) || console.error(`TypeGuard for response 'models.Users' caught inconsistency.`, res))); } /** * Delete a Repository Subscription. * Response generated for [ 204 ] HTTP response code. */ deleteReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; deleteReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['deleteReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.deleteReposOwnerRepoSubscription(args, requestHttpOptions, observe); } /** * Get a Repository Subscription. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Subscribition>; getReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Subscribition>>; getReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Subscribition>>; getReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Subscribition | HttpResponse<models.Subscribition> | HttpEvent<models.Subscribition>> { return super.getReposOwnerRepoSubscription(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isSubscribition(res) || console.error(`TypeGuard for response 'models.Subscribition' caught inconsistency.`, res))); } /** * Set a Repository Subscription * Response generated for [ 200 ] HTTP response code. */ putReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Subscribition>; putReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Subscribition>>; putReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Subscribition>>; putReposOwnerRepoSubscription( args: Exclude<ReposAPIClientInterface['putReposOwnerRepoSubscriptionParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Subscribition | HttpResponse<models.Subscribition> | HttpEvent<models.Subscribition>> { return super.putReposOwnerRepoSubscription(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isSubscribition(res) || console.error(`TypeGuard for response 'models.Subscribition' caught inconsistency.`, res))); } /** * Get list of tags. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoTags( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoTagsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Tags>; getReposOwnerRepoTags( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoTagsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Tags>>; getReposOwnerRepoTags( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoTagsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Tags>>; getReposOwnerRepoTags( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoTagsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Tags | HttpResponse<models.Tags> | HttpEvent<models.Tags>> { return super.getReposOwnerRepoTags(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isTags(res) || console.error(`TypeGuard for response 'models.Tags' caught inconsistency.`, res))); } /** * Get list of teams * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoTeams( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoTeamsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Teams>; getReposOwnerRepoTeams( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoTeamsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Teams>>; getReposOwnerRepoTeams( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoTeamsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Teams>>; getReposOwnerRepoTeams( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoTeamsParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Teams | HttpResponse<models.Teams> | HttpEvent<models.Teams>> { return super.getReposOwnerRepoTeams(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isTeams(res) || console.error(`TypeGuard for response 'models.Teams' caught inconsistency.`, res))); } /** * List Stargazers. New implementation. * Response generated for [ 200 ] HTTP response code. */ getReposOwnerRepoWatchers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoWatchersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Users>; getReposOwnerRepoWatchers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoWatchersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Users>>; getReposOwnerRepoWatchers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoWatchersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Users>>; getReposOwnerRepoWatchers( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoWatchersParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<models.Users | HttpResponse<models.Users> | HttpEvent<models.Users>> { return super.getReposOwnerRepoWatchers(args, requestHttpOptions, observe) .pipe(tap((res: any) => guards.isUsers(res) || console.error(`TypeGuard for response 'models.Users' caught inconsistency.`, res))); } /** * Get archive link. * This method will return a 302 to a URL to download a tarball or zipball * archive for a repository. Please make sure your HTTP framework is * configured to follow redirects or you will need to use the Location header * to make a second GET request. * Note: For private repositories, these links are temporary and expire quickly. * * Response generated for [ default ] HTTP response code. */ getReposOwnerRepoArchiveFormatPath( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoArchiveFormatPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; getReposOwnerRepoArchiveFormatPath( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoArchiveFormatPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; getReposOwnerRepoArchiveFormatPath( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoArchiveFormatPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; getReposOwnerRepoArchiveFormatPath( args: Exclude<ReposAPIClientInterface['getReposOwnerRepoArchiveFormatPathParams'], undefined>, requestHttpOptions?: HttpOptions, observe: any = 'body', ): Observable<void | HttpResponse<void> | HttpEvent<void>> { return super.getReposOwnerRepoArchiveFormatPath(args, requestHttpOptions, observe); } }
the_stack
import * as msRest from "@azure/ms-rest-js"; import * as Models from "../models"; import * as Mappers from "../models/providersMappers"; import * as Parameters from "../models/parameters"; import { ResourceManagementClientContext } from "../resourceManagementClientContext"; /** Class representing a Providers. */ export class Providers { private readonly client: ResourceManagementClientContext; /** * Create a Providers. * @param {ResourceManagementClientContext} client Reference to the service client. */ constructor(client: ResourceManagementClientContext) { this.client = client; } /** * Unregisters a subscription from a resource provider. * @param resourceProviderNamespace The namespace of the resource provider to unregister. * @param [options] The optional parameters * @returns Promise<Models.ProvidersUnregisterResponse> */ unregister(resourceProviderNamespace: string, options?: msRest.RequestOptionsBase): Promise<Models.ProvidersUnregisterResponse>; /** * @param resourceProviderNamespace The namespace of the resource provider to unregister. * @param callback The callback */ unregister(resourceProviderNamespace: string, callback: msRest.ServiceCallback<Models.Provider>): void; /** * @param resourceProviderNamespace The namespace of the resource provider to unregister. * @param options The optional parameters * @param callback The callback */ unregister(resourceProviderNamespace: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Provider>): void; unregister(resourceProviderNamespace: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Provider>, callback?: msRest.ServiceCallback<Models.Provider>): Promise<Models.ProvidersUnregisterResponse> { return this.client.sendOperationRequest( { resourceProviderNamespace, options }, unregisterOperationSpec, callback) as Promise<Models.ProvidersUnregisterResponse>; } /** * Registers a subscription with a resource provider. * @param resourceProviderNamespace The namespace of the resource provider to register. * @param [options] The optional parameters * @returns Promise<Models.ProvidersRegisterResponse> */ register(resourceProviderNamespace: string, options?: msRest.RequestOptionsBase): Promise<Models.ProvidersRegisterResponse>; /** * @param resourceProviderNamespace The namespace of the resource provider to register. * @param callback The callback */ register(resourceProviderNamespace: string, callback: msRest.ServiceCallback<Models.Provider>): void; /** * @param resourceProviderNamespace The namespace of the resource provider to register. * @param options The optional parameters * @param callback The callback */ register(resourceProviderNamespace: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Provider>): void; register(resourceProviderNamespace: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Provider>, callback?: msRest.ServiceCallback<Models.Provider>): Promise<Models.ProvidersRegisterResponse> { return this.client.sendOperationRequest( { resourceProviderNamespace, options }, registerOperationSpec, callback) as Promise<Models.ProvidersRegisterResponse>; } /** * Gets all resource providers for a subscription. * @param [options] The optional parameters * @returns Promise<Models.ProvidersListResponse> */ list(options?: Models.ProvidersListOptionalParams): Promise<Models.ProvidersListResponse>; /** * @param callback The callback */ list(callback: msRest.ServiceCallback<Models.ProviderListResult>): void; /** * @param options The optional parameters * @param callback The callback */ list(options: Models.ProvidersListOptionalParams, callback: msRest.ServiceCallback<Models.ProviderListResult>): void; list(options?: Models.ProvidersListOptionalParams | msRest.ServiceCallback<Models.ProviderListResult>, callback?: msRest.ServiceCallback<Models.ProviderListResult>): Promise<Models.ProvidersListResponse> { return this.client.sendOperationRequest( { options }, listOperationSpec, callback) as Promise<Models.ProvidersListResponse>; } /** * Gets all resource providers for the tenant. * @param [options] The optional parameters * @returns Promise<Models.ProvidersListAtTenantScopeResponse> */ listAtTenantScope(options?: Models.ProvidersListAtTenantScopeOptionalParams): Promise<Models.ProvidersListAtTenantScopeResponse>; /** * @param callback The callback */ listAtTenantScope(callback: msRest.ServiceCallback<Models.ProviderListResult>): void; /** * @param options The optional parameters * @param callback The callback */ listAtTenantScope(options: Models.ProvidersListAtTenantScopeOptionalParams, callback: msRest.ServiceCallback<Models.ProviderListResult>): void; listAtTenantScope(options?: Models.ProvidersListAtTenantScopeOptionalParams | msRest.ServiceCallback<Models.ProviderListResult>, callback?: msRest.ServiceCallback<Models.ProviderListResult>): Promise<Models.ProvidersListAtTenantScopeResponse> { return this.client.sendOperationRequest( { options }, listAtTenantScopeOperationSpec, callback) as Promise<Models.ProvidersListAtTenantScopeResponse>; } /** * Gets the specified resource provider. * @param resourceProviderNamespace The namespace of the resource provider. * @param [options] The optional parameters * @returns Promise<Models.ProvidersGetResponse> */ get(resourceProviderNamespace: string, options?: Models.ProvidersGetOptionalParams): Promise<Models.ProvidersGetResponse>; /** * @param resourceProviderNamespace The namespace of the resource provider. * @param callback The callback */ get(resourceProviderNamespace: string, callback: msRest.ServiceCallback<Models.Provider>): void; /** * @param resourceProviderNamespace The namespace of the resource provider. * @param options The optional parameters * @param callback The callback */ get(resourceProviderNamespace: string, options: Models.ProvidersGetOptionalParams, callback: msRest.ServiceCallback<Models.Provider>): void; get(resourceProviderNamespace: string, options?: Models.ProvidersGetOptionalParams | msRest.ServiceCallback<Models.Provider>, callback?: msRest.ServiceCallback<Models.Provider>): Promise<Models.ProvidersGetResponse> { return this.client.sendOperationRequest( { resourceProviderNamespace, options }, getOperationSpec, callback) as Promise<Models.ProvidersGetResponse>; } /** * Gets the specified resource provider at the tenant level. * @param resourceProviderNamespace The namespace of the resource provider. * @param [options] The optional parameters * @returns Promise<Models.ProvidersGetAtTenantScopeResponse> */ getAtTenantScope(resourceProviderNamespace: string, options?: Models.ProvidersGetAtTenantScopeOptionalParams): Promise<Models.ProvidersGetAtTenantScopeResponse>; /** * @param resourceProviderNamespace The namespace of the resource provider. * @param callback The callback */ getAtTenantScope(resourceProviderNamespace: string, callback: msRest.ServiceCallback<Models.Provider>): void; /** * @param resourceProviderNamespace The namespace of the resource provider. * @param options The optional parameters * @param callback The callback */ getAtTenantScope(resourceProviderNamespace: string, options: Models.ProvidersGetAtTenantScopeOptionalParams, callback: msRest.ServiceCallback<Models.Provider>): void; getAtTenantScope(resourceProviderNamespace: string, options?: Models.ProvidersGetAtTenantScopeOptionalParams | msRest.ServiceCallback<Models.Provider>, callback?: msRest.ServiceCallback<Models.Provider>): Promise<Models.ProvidersGetAtTenantScopeResponse> { return this.client.sendOperationRequest( { resourceProviderNamespace, options }, getAtTenantScopeOperationSpec, callback) as Promise<Models.ProvidersGetAtTenantScopeResponse>; } /** * Gets all resource providers for a subscription. * @param nextPageLink The NextLink from the previous successful call to List operation. * @param [options] The optional parameters * @returns Promise<Models.ProvidersListNextResponse> */ listNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.ProvidersListNextResponse>; /** * @param nextPageLink The NextLink from the previous successful call to List operation. * @param callback The callback */ listNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.ProviderListResult>): void; /** * @param nextPageLink The NextLink from the previous successful call to List operation. * @param options The optional parameters * @param callback The callback */ listNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProviderListResult>): void; listNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProviderListResult>, callback?: msRest.ServiceCallback<Models.ProviderListResult>): Promise<Models.ProvidersListNextResponse> { return this.client.sendOperationRequest( { nextPageLink, options }, listNextOperationSpec, callback) as Promise<Models.ProvidersListNextResponse>; } /** * Gets all resource providers for the tenant. * @param nextPageLink The NextLink from the previous successful call to List operation. * @param [options] The optional parameters * @returns Promise<Models.ProvidersListAtTenantScopeNextResponse> */ listAtTenantScopeNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.ProvidersListAtTenantScopeNextResponse>; /** * @param nextPageLink The NextLink from the previous successful call to List operation. * @param callback The callback */ listAtTenantScopeNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.ProviderListResult>): void; /** * @param nextPageLink The NextLink from the previous successful call to List operation. * @param options The optional parameters * @param callback The callback */ listAtTenantScopeNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProviderListResult>): void; listAtTenantScopeNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProviderListResult>, callback?: msRest.ServiceCallback<Models.ProviderListResult>): Promise<Models.ProvidersListAtTenantScopeNextResponse> { return this.client.sendOperationRequest( { nextPageLink, options }, listAtTenantScopeNextOperationSpec, callback) as Promise<Models.ProvidersListAtTenantScopeNextResponse>; } } // Operation Specifications const serializer = new msRest.Serializer(Mappers); const unregisterOperationSpec: msRest.OperationSpec = { httpMethod: "POST", path: "subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/unregister", urlParameters: [ Parameters.resourceProviderNamespace, Parameters.subscriptionId ], queryParameters: [ Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.Provider }, default: { bodyMapper: Mappers.CloudError } }, serializer }; const registerOperationSpec: msRest.OperationSpec = { httpMethod: "POST", path: "subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/register", urlParameters: [ Parameters.resourceProviderNamespace, Parameters.subscriptionId ], queryParameters: [ Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.Provider }, default: { bodyMapper: Mappers.CloudError } }, serializer }; const listOperationSpec: msRest.OperationSpec = { httpMethod: "GET", path: "subscriptions/{subscriptionId}/providers", urlParameters: [ Parameters.subscriptionId ], queryParameters: [ Parameters.top, Parameters.expand, Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.ProviderListResult }, default: { bodyMapper: Mappers.CloudError } }, serializer }; const listAtTenantScopeOperationSpec: msRest.OperationSpec = { httpMethod: "GET", path: "providers", queryParameters: [ Parameters.top, Parameters.expand, Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.ProviderListResult }, default: { bodyMapper: Mappers.CloudError } }, serializer }; const getOperationSpec: msRest.OperationSpec = { httpMethod: "GET", path: "subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}", urlParameters: [ Parameters.resourceProviderNamespace, Parameters.subscriptionId ], queryParameters: [ Parameters.expand, Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.Provider }, default: { bodyMapper: Mappers.CloudError } }, serializer }; const getAtTenantScopeOperationSpec: msRest.OperationSpec = { httpMethod: "GET", path: "providers/{resourceProviderNamespace}", urlParameters: [ Parameters.resourceProviderNamespace ], queryParameters: [ Parameters.expand, Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.Provider }, default: { bodyMapper: Mappers.CloudError } }, serializer }; const listNextOperationSpec: msRest.OperationSpec = { httpMethod: "GET", baseUrl: "https://management.azure.com", path: "{nextLink}", urlParameters: [ Parameters.nextPageLink ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.ProviderListResult }, default: { bodyMapper: Mappers.CloudError } }, serializer }; const listAtTenantScopeNextOperationSpec: msRest.OperationSpec = { httpMethod: "GET", baseUrl: "https://management.azure.com", path: "{nextLink}", urlParameters: [ Parameters.nextPageLink ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.ProviderListResult }, default: { bodyMapper: Mappers.CloudError } }, serializer };
the_stack
import { appPrefix, gen, website } from '../../../../helpers' import { routes as react } from '../../../../helpers/react' import { routes as express } from '../../../../helpers/express' context('Settings failures with email profile', () => { ;[ { route: express.settings, base: express.base, app: 'express' as 'express', profile: 'email' }, { route: react.settings, base: react.base, app: 'react' as 'react', profile: 'spa' } ].forEach(({ route, profile, app, base }) => { describe(`for app ${app}`, () => { let email = gen.email() let password = gen.password() const emailSecond = `second-${gen.email()}` const passwordSecond = gen.password() const up = (value) => `not-${value}` before(() => { cy.proxy(app) cy.useConfigProfile(profile) cy.registerApi({ email: emailSecond, password: passwordSecond, fields: { 'traits.website': 'https://github.com/ory/kratos' } }) cy.registerApi({ email, password, fields: { 'traits.website': website } }) }) beforeEach(() => { cy.longPrivilegedSessionTime() cy.visit(base) cy.clearAllCookies() cy.login({ email, password, cookieUrl: base }) cy.visit(route) }) describe('profile', () => { beforeEach(() => { cy.visit(route) }) it('fails with validation errors', () => { cy.get('input[name="traits.website"]').clear().type('http://s') cy.get('[name="method"][value="profile"]').click() cy.get('[data-testid^="ui/message"]').should( 'contain.text', 'length must be >= 10' ) }) it('fails because reauth is another person', () => { // Force this because it is hidden cy.get('input[name="traits.email"]').clear().type(up(email)) cy.shortPrivilegedSessionTime() cy.location().then((loc) => { cy.get('button[value="profile"]').click() cy.reauthWithOtherAccount({ previousUrl: loc.toString(), expect: { email }, type: { email: emailSecond, password: passwordSecond } }) cy.location('pathname').should('contain', '/settings') }) // We end up in a new settings flow for the second user cy.get('input[name="traits.email"]').should('have.value', emailSecond) // Try to log in with updated credentials -> should fail cy.clearAllCookies() cy.login({ email: up(email), password, expectSession: false, cookieUrl: base }) }) it('does not update data because resumable session was removed', () => { cy.get('input[name="traits.email"]').clear().type(up(email)) cy.shortPrivilegedSessionTime() cy.get('button[value="profile"]').click() cy.clearAllCookies() cy.login({ email, password, cookieUrl: base }) cy.getSession().should((session) => { const { identity } = session expect(identity.traits.email).to.equal(email) }) }) it('does not update without re-auth', () => { cy.get('input[name="traits.email"]').clear().type(up(email)) cy.shortPrivilegedSessionTime() // wait for the privileged session to time out cy.get('button[value="profile"]').click() cy.visit(base) cy.getSession().should((session) => { const { identity } = session expect(identity.traits.email).to.equal(email) }) }) it('does not resume another failed request', () => { // checks here that we're checking settingsRequest.id == cookie.stored.id cy.get('input[name="traits.email"]').clear().type(up(email)) cy.shortPrivilegedSessionTime() // wait for the privileged session to time out cy.get('button[value="profile"]').click() cy.location('pathname').should('not.contain', '/settings') cy.visit(route) cy.get('input[name="traits.website"]') .clear() .type('http://github.com/aeneasr') cy.get('button[value="profile"]').click() cy.expectSettingsSaved() cy.getSession().should((session) => { const { identity } = session expect(identity.traits.email).to.equal(email) // this is NOT up(email) expect(identity.traits.website).to.equal( 'http://github.com/aeneasr' ) // this is NOT up(email) }) }) }) describe('password', () => { beforeEach(() => { cy.longPrivilegedSessionTime() }) afterEach(() => { cy.longPrivilegedSessionTime() }) it('fails if password policy is violated', () => { cy.get('input[name="password"]').clear().type('12345678') cy.get('button[value="password"]').click() cy.get('*[data-testid^="ui/message"]').should( 'contain.text', 'data breaches' ) }) it('fails because reauth is another person', () => { cy.shortPrivilegedSessionTime() // wait for the privileged session to time out cy.get('input[name="password"]').clear().type(up(password)) let firstSession cy.getSession().then((session) => { firstSession = session }) cy.location().then((loc) => { cy.get('button[value="password"]').click() cy.reauthWithOtherAccount({ previousUrl: loc.toString(), expect: { email }, type: { email: emailSecond, password: passwordSecond } }) cy.location('pathname').should('contain', '/settings') }) // We want to ensure that the reauth session is completely different from the one we had in the first place. cy.getSession().then((session) => { expect(session.authentication_methods).to.have.length(1) expect(session.identity.traits.email).to.eq(emailSecond) expect(session.id).to.not.eq(firstSession.id) expect(session.identity.id).to.not.eq(firstSession.identity.id) expect(session.authenticated_at).to.not.eq( firstSession.authenticated_at ) }) // We end up in a new settings flow for the second user cy.get('input[name="traits.email"]').should('have.value', emailSecond) // Try to log in with updated credentials -> should fail cy.clearAllCookies() cy.login({ email, password: up(password), expectSession: false, cookieUrl: base }) }) it('does not update without re-auth', () => { cy.get('input[name="password"]').clear().type(up(password)) cy.shortPrivilegedSessionTime() // wait for the privileged session to time out cy.get('button[value="password"]').click() cy.visit(base) cy.clearAllCookies() cy.login({ email, password: up(password), expectSession: false, cookieUrl: base }) }) it('does not update data because resumable session was removed', () => { cy.get('input[name="password"]').clear().type(up(password)) cy.shortPrivilegedSessionTime() // wait for the privileged session to time out cy.get('button[value="password"]').click() cy.clearAllCookies() cy.login({ email, password, cookieUrl: base }) cy.clearAllCookies() cy.login({ email, password: up(password), expectSession: false, cookieUrl: base }) }) it('does not resume another queued request', () => { const email = gen.email() const password = gen.password() cy.clearAllCookies() cy.register({ email, password, fields: { 'traits.website': website } }) cy.visit(route) // checks here that we're checking settingsRequest.id == cookie.stored.id const invalidPassword = 'invalid-' + gen.password() cy.get('input[name="password"]').clear().type(invalidPassword) cy.shortPrivilegedSessionTime() // wait for the privileged session to time out cy.get('button[value="password"]').click() cy.location('pathname').should('include', '/login') const validPassword = 'valid-' + gen.password() cy.visit(route) cy.get('input[name="password"]').clear().type(validPassword) cy.get('button[value="password"]').click() cy.location('pathname').should('include', '/login') cy.reauth({ expect: { email }, type: { password: password } }) cy.location('pathname').should('include', '/settings') cy.get('input[name="password"]').should('exist') // This should pass because it is the correct password cy.clearAllCookies() cy.login({ email, password: validPassword, cookieUrl: base }) // This should fail because it is the wrong password cy.clearAllCookies() cy.login({ email, password: invalidPassword, expectSession: false, cookieUrl: base }) cy.clearAllCookies() cy.login({ email, password: password, expectSession: false, cookieUrl: base }) }) }) describe('global errors', () => { it('fails when CSRF is incorrect', () => { cy.get(appPrefix(app) + 'input[name="password"]').type('12345678') cy.shouldHaveCsrfError({ app }) }) it('fails when a disallowed return_to url is requested', () => { cy.shouldErrorOnDisallowedReturnTo( route + '?return_to=https://not-allowed', { app } ) }) }) }) }) })
the_stack
import { Orbit } from '@orbit/core'; import { ClientError, NetworkError, requestOptionsForSource, ServerError } from '@orbit/data'; import { RecordKeyMap, InitializedRecord, RecordSchema, RecordQueryExpression, RecordTransform, RecordQuery } from '@orbit/records'; import { Dict } from '@orbit/utils'; import { InvalidServerResponse } from './lib/exceptions'; import { RecordTransformRequest } from './lib/transform-requests'; import { RecordQueryRequest } from './lib/query-requests'; import { deepMerge, toArray } from '@orbit/utils'; import { ResourceDocument } from './resource-document'; import { RecordDocument } from './record-document'; import { JSONAPIRequestOptions, buildFetchSettings } from './lib/jsonapi-request-options'; import { JSONAPIURLBuilder, JSONAPIURLBuilderSettings } from './jsonapi-url-builder'; import { JSONAPISerializer, JSONAPISerializerSettings } from './jsonapi-serializer'; import { SerializerForFn, SerializerClassForFn, SerializerSettingsForFn } from '@orbit/serializers'; import { buildJSONAPISerializerFor } from './serializers/jsonapi-serializer-builder'; import { JSONAPISerializers } from './serializers/jsonapi-serializers'; import { RecordOperation } from '@orbit/records'; import { JSONAPIResponse } from './jsonapi-response'; const { assert, deprecate } = Orbit; export interface FetchSettings { headers?: Dict<any>; method?: string; json?: Dict<any>; body?: string; params?: Dict<any>; timeout?: number; credentials?: string; cache?: string; redirect?: string; referrer?: string; referrerPolicy?: string; integrity?: string; } export interface JSONAPIRequestProcessorSettings { sourceName: string; serializerFor?: SerializerForFn; serializerClassFor?: SerializerClassForFn; serializerSettingsFor?: SerializerSettingsForFn; SerializerClass?: new ( settings: JSONAPISerializerSettings ) => JSONAPISerializer; URLBuilderClass?: new ( settings: JSONAPIURLBuilderSettings ) => JSONAPIURLBuilder; namespace?: string; host?: string; defaultFetchSettings?: FetchSettings; allowedContentTypes?: string[]; schema: RecordSchema; keyMap?: RecordKeyMap; } export class JSONAPIRequestProcessor { sourceName: string; urlBuilder: JSONAPIURLBuilder; allowedContentTypes: string[]; defaultFetchSettings!: FetchSettings; schema: RecordSchema; keyMap?: RecordKeyMap; protected _serializer?: JSONAPISerializer; protected _serializerFor: SerializerForFn; constructor(settings: JSONAPIRequestProcessorSettings) { let { sourceName, allowedContentTypes, schema, keyMap, SerializerClass, serializerFor, serializerClassFor, serializerSettingsFor } = settings; this.sourceName = sourceName; this.allowedContentTypes = allowedContentTypes || [ 'application/vnd.api+json', 'application/json' ]; this.schema = schema; this.keyMap = keyMap; if (SerializerClass) { deprecate( "The 'SerializerClass' setting for 'JSONAPIRequestProcessor' has been deprecated. Pass 'serializerFor', 'serializerClassFor', and/or 'serializerSettingsFor' instead." ); this._serializer = new SerializerClass({ schema, keyMap }); } this._serializerFor = buildJSONAPISerializerFor({ schema, keyMap, serializerFor, serializerClassFor, serializerSettingsFor }); const URLBuilderClass = settings.URLBuilderClass || JSONAPIURLBuilder; const urlBuilderOptions: JSONAPIURLBuilderSettings = { host: settings.host, namespace: settings.namespace, keyMap: settings.keyMap, serializer: this._serializer, serializerFor: this._serializerFor }; this.urlBuilder = new URLBuilderClass(urlBuilderOptions); this.initDefaultFetchSettings(settings); } get serializer(): JSONAPISerializer { deprecate( "'JSONAPIRequestProcessor#serializer' has been deprecated. Use 'serializerFor' instead." ); if (this._serializer) { return this._serializer; } else { return this._serializerFor( JSONAPISerializers.ResourceDocument ) as JSONAPISerializer; } } get serializerFor(): SerializerForFn { return this._serializerFor; } fetch(url: string, customSettings?: FetchSettings): Promise<JSONAPIResponse> { let settings = this.initFetchSettings(customSettings); let fullUrl = url; if (settings.params) { fullUrl = this.urlBuilder.appendQueryParams(fullUrl, settings.params); delete settings.params; } let fetchFn = (Orbit as any).fetch || Orbit.globals.fetch; // console.log('fetch', fullUrl, settings, 'polyfill', fetchFn.polyfill); if (settings.timeout !== undefined && settings.timeout > 0) { let timeout = settings.timeout; delete settings.timeout; return new Promise((resolve, reject) => { let timedOut: boolean; let timer = Orbit.globals.setTimeout(() => { timedOut = true; reject(new NetworkError(`No fetch response within ${timeout}ms.`)); }, timeout); fetchFn(fullUrl, settings) .catch((e: Error) => { Orbit.globals.clearTimeout(timer); if (!timedOut) { return this.handleFetchError(e); } }) .then((response: any) => { Orbit.globals.clearTimeout(timer); if (!timedOut) { return this.handleFetchResponse(response); } }) .then(resolve, reject); }); } else { return fetchFn(fullUrl, settings) .catch((e: Error) => this.handleFetchError(e)) .then((response: any) => this.handleFetchResponse(response)); } } initFetchSettings(customSettings: FetchSettings = {}): FetchSettings { let settings: FetchSettings = deepMerge( {}, this.defaultFetchSettings, customSettings ); if (settings.json) { assert( "`json` and `body` can't both be set for fetch requests.", !settings.body ); settings.body = JSON.stringify(settings.json); delete settings.json; } if (settings.headers && !settings.body) { delete (settings.headers as any)['Content-Type']; } return settings; } operationsFromDeserializedDocument( deserialized: RecordDocument ): RecordOperation[] { const records: InitializedRecord[] = []; Array.prototype.push.apply(records, toArray(deserialized.data)); if (deserialized.included) { Array.prototype.push.apply(records, deserialized.included); } return records.map((record) => { return { op: 'updateRecord', record }; }); } buildFetchSettings( options: JSONAPIRequestOptions = {}, customSettings?: FetchSettings ): FetchSettings { return buildFetchSettings(options, customSettings); } customRequestOptions( queryOrTransform: RecordQuery | RecordTransform, queryExpressionOrOperation: RecordQueryExpression | RecordOperation ): JSONAPIRequestOptions | undefined { return requestOptionsForSource( [queryOrTransform.options, queryExpressionOrOperation.options], this.sourceName ) as JSONAPIRequestOptions | undefined; } /* eslint-disable @typescript-eslint/no-unused-vars */ preprocessResponseDocument( document: ResourceDocument | undefined, request: RecordQueryRequest | RecordTransformRequest ): void {} /* eslint-enable @typescript-eslint/no-unused-vars */ protected responseHasContent(response: Response): boolean { if (response.status === 204) { return false; } let contentType = response.headers.get('Content-Type'); if (contentType) { for (let allowedContentType of this.allowedContentTypes) { if (contentType.indexOf(allowedContentType) > -1) { return true; } } } return false; } protected initDefaultFetchSettings( settings: JSONAPIRequestProcessorSettings ): void { this.defaultFetchSettings = { headers: { Accept: 'application/vnd.api+json', 'Content-Type': 'application/vnd.api+json' }, timeout: 5000 }; if (settings.defaultFetchSettings) { deepMerge(this.defaultFetchSettings, settings.defaultFetchSettings); } } protected async handleFetchResponse( response: Response ): Promise<JSONAPIResponse> { const responseDetail: JSONAPIResponse = { response }; if (response.status === 201) { if (this.responseHasContent(response)) { responseDetail.document = await response.json(); } else { throw new InvalidServerResponse( `Server responses with a ${ response.status } status should return content with one of the following content types: ${this.allowedContentTypes.join( ', ' )}.` ); } } else if (response.status >= 200 && response.status < 300) { if (this.responseHasContent(response)) { responseDetail.document = await response.json(); } } else if (response.status !== 304 && response.status !== 404) { if (this.responseHasContent(response)) { const document = await response.json(); await this.handleFetchResponseError(response, document); } else { await this.handleFetchResponseError(response); } } return responseDetail; } protected async handleFetchResponseError( response: Response, data?: unknown ): Promise<Error> { let error: any; if (response.status >= 400 && response.status < 500) { error = new ClientError(response.statusText); } else { error = new ServerError(response.statusText); } error.response = response; error.data = data; throw error; } protected async handleFetchError(e: Error | string): Promise<Error> { if (typeof e === 'string') { throw new NetworkError(e); } else { throw e; } } }
the_stack
import { IPivotValues } from './engine'; import { isNullOrUndefined } from '@syncfusion/ej2-base'; import { ExcelExportProperties, ExcelRow } from '@syncfusion/ej2-grids'; /** * This is a file to perform common utility for OLAP and Relational datasource * @hidden */ export class PivotExportUtil { /* eslint-disable @typescript-eslint/no-explicit-any */ private static getDefinedObj(data: { [key: string]: any }): { [key: string]: any } { let keyPos: number = 0; let framedSet: any = {}; /* eslint-enable @typescript-eslint/no-explicit-any */ if (!(data === null || data === undefined)) { let fields: string[] = Object.keys(data); while (keyPos < fields.length) { if (!(data[fields[keyPos]] === null || data[fields[keyPos]] === undefined)) { framedSet[fields[keyPos]] = data[fields[keyPos]]; } keyPos++; } } else { framedSet = data; } return framedSet; } public static getClonedPivotValues(pivotValues: IPivotValues): IPivotValues { let clonedSets: IPivotValues = []; for (let i: number = 0; i < pivotValues.length; i++) { if (pivotValues[i]) { clonedSets[i] = []; for (let j: number = 0; j < pivotValues[i].length; j++) { if (pivotValues[i][j]) { /* eslint-disable */ clonedSets[i][j] = this.getClonedPivotValueObj(pivotValues[i][j] as { [key: string]: Object }); /* eslint-enable */ } } } } return clonedSets; } /* eslint-disable */ private static getClonedPivotValueObj(data: { [key: string]: Object }): { [key: string]: Object } { /* eslint-enable */ let keyPos: number = 0; /* eslint-disable @typescript-eslint/no-explicit-any */ let framedSet: any = {}; /* eslint-enable @typescript-eslint/no-explicit-any */ if (!(data === null || data === undefined)) { let fields: string[] = Object.keys(data); while (keyPos < fields.length) { framedSet[fields[keyPos]] = data[fields[keyPos]]; keyPos++; } } else { framedSet = data; } return framedSet; } /* eslint-disable */ public static isContainCommonElements(collection1: Object[], collection2: Object[]): boolean { /* eslint-enable */ for (let i: number = 0, cnt: number = collection1.length; i < cnt; i++) { for (let j: number = 0, lnt: number = collection2.length; j < lnt; j++) { if (collection2[j] === collection1[i]) { return true; } } } return false; } /* eslint-disable */ public static formatPdfHeaderFooter(pdf: any): any { let contents: any = []; if (!isNullOrUndefined(pdf)) { for (let i: number = 0; i < pdf.length; i++) { let a = pdf[i]; let content = { /* eslint-enable */ type: a.Type, pageNumberType: a.PageNumberType, style: a.Style ? { penColor: a.Style.PenColor, penSize: a.Style.PenSize, dashStyle: a.Style.DashStyle, textBrushColor: a.Style.TextBrushColor, textPenColor: a.Style.TextPenColor, fontSize: a.Style.FontSize, hAlign: a.Style.HAlign, vAlign: a.Style.VAlign } : a.Style, points: a.Points !== null ? { x1: a.Points.X1, y1: a.Points.Y1, x2: a.Points.X2, y2: a.Points.Y2 } : null, format: a.Format, position: a.Position !== null ? { x: a.Position.X, y: a.Position.Y } : null, size: a.Size !== null ? { height: a.Size.Height, width: a.Size.Width } : null, src: a.Src, value: a.Value, font: a.Font }; contents.push(content); } } return contents; } /* eslint-disable */ public static formatPdfExportProperties(pdf: any): any { let values: any; /* eslint-enable */ values = this.getDefinedObj({ pageOrientation: typeof pdf.PageOrientation === 'string' ? pdf.PageOrientation : null, pageSize: typeof pdf.PageSize === 'string' ? pdf.PageSize : null, header: !isNullOrUndefined(pdf.Header) ? { fromTop: pdf.Header.FromTop, height: pdf.Header.Height, contents: this.formatPdfHeaderFooter(pdf.Header.Contents) } : null, columns: pdf.Columns, footer: !isNullOrUndefined(pdf.Footer) ? { fromTop: pdf.Footer.FromBottom, height: pdf.Footer.Height, contents: this.formatPdfHeaderFooter(pdf.Footer.Contents) } : null, includeHiddenColumn: pdf.IncludeHiddenColumn, dataSource: pdf.DataSource, exportType: typeof pdf.ExportType === 'string' ? pdf.ExportType : null, theme: !isNullOrUndefined(pdf.Theme) ? { header: pdf.Theme.Header, record: pdf.Theme.Record, caption: pdf.Theme.Caption } : null, fileName: pdf.FileName, hierarchyExportMode: typeof pdf.HierarchyExportMode === 'string' ? pdf.HierarchyExportMode : null, allowHorizontalOverflow: pdf.AllowHorizontalOverflow }); return values; } /* eslint-disable */ public static formatExcelStyle(style: any): any { let prop; /* eslint-enable */ if (!isNullOrUndefined(style)) { prop = this.getDefinedObj({ fontColor: style.FontColor, fontName: style.FontName, fontSize: style.FontSize, hAlign: style.HAlign === String ? style.HAlign : null, vAlign: style.VAlign === String ? style.VAlign : null, bold: style.Bold, indent: style.Indent, italic: style.Italic, underline: style.Underline, backColor: style.BackColor, wrapText: style.WrapText, borders: style.Borders, numberFormat: style.NumberFormat, type: style.Type }); } return prop; } /* eslint-disable */ public static formatExcelCell(cell: any): any { let cells: ExcelRow[] = []; if (!isNullOrUndefined(cell)) { for (let i: number = 0; i < cell.length; i++) { this.getDefinedObj({ index: !isNullOrUndefined(cell[i].Index) ? cell[i].Index : null, colSpan: !isNullOrUndefined(cell[i].ColSpan) ? cell[i].ColSpan : null, value: !isNullOrUndefined(cell[i].Value) ? cell[i].Value : null, hyperlink: { target: !isNullOrUndefined(cell[i].Hyperlink) ? cell[i].Hyperlink.Target : null, displayText: !isNullOrUndefined(cell[i].Hyperlink) ? cell[i].Hyperlink.DisplayText : null }, styles: this.formatExcelStyle(cell[i].Style), rowSpan: !isNullOrUndefined(cell[i].RowSpan) ? cell[i].RowSpan : null }); /* eslint-enable */ } } return cells; } /* eslint-disable */ public static formatExcelHeaderFooter(excel: any): any { let rows: ExcelRow[] = []; if (!isNullOrUndefined(excel)) { for (let i: number = 0; i < excel.Rows.length; i++) { let row = excel.Rows[i]; let prop = this.getDefinedObj({ index: !isNullOrUndefined(row.Index) ? row.Index : null, cells: this.formatExcelCell(row.Cells), grouping: !isNullOrUndefined(row.Grouping) ? row.Grouping : null }); rows.push(prop); } } return rows; } public static formatExcelExportProperties(excel: any): any { /* eslint-enable */ let prop: ExcelExportProperties; prop = this.getDefinedObj({ dataSource: excel.DataSource, query: excel.Query, multipleExport: this.getDefinedObj({ type: !isNullOrUndefined(excel.MultipleExport) ? excel.MultipleExport.Type : null, blankRows: !isNullOrUndefined(excel.MultipleExport) ? excel.MultipleExport.BlankRows : null }), header: this.getDefinedObj({ headerRows: !isNullOrUndefined(excel.Header) ? excel.Header.HeaderRows : null, rows: this.formatExcelHeaderFooter(excel.Header) }), footer: this.getDefinedObj({ footerRows: !isNullOrUndefined(excel.Footer) ? excel.Footer.FooterRows : null, rows: this.formatExcelHeaderFooter(excel.Footer) }), columns: excel.Columns, exportType: typeof excel.ExportType === 'string' ? excel.ExportType : undefined, includeHiddenColumn: excel.IncludeHiddenColumn, theme: !isNullOrUndefined(excel.Theme) ? { header: this.formatExcelStyle(excel.Theme.Header), record: this.formatExcelStyle(excel.Theme.Record), caption: this.formatExcelStyle(excel.Theme.Caption) } : undefined, fileName: excel.FileName, hierarchyExportMode: typeof excel.HierarchyExportMode === 'string' ? excel.HierarchyExportMode : undefined }); return prop; } }
the_stack
import { Ability, LogicError, UsesAbilities } from '@serenity-js/core'; import type { Browser } from 'webdriverio'; import { Key } from '../../input'; /** * @desc * An {@link @serenity-js/core/lib/screenplay~Ability} that enables the {@link @serenity-js/core/lib/screenplay/actor~Actor} * to interact with Web apps using [WebdriverIO](https://webdriver.io/). * * *Please note*: this class is still marked as experimental while new WebdriverIO Interactions and Questions are being developed. * This means that its interface can change without affecting the major version of Serenity/JS itself. * In particular, please don't rely on the `browser` field to remain `public` in future releases. * * @experimental * * @example <caption>Using the WebdriverIO browser</caption> * import { Actor } from '@serenity-js/core'; * import { BrowseTheWeb, by, Navigate, Target } from '@serenity-js/webdriverio' * import { Ensure, equals } from '@serenity-js/assertions'; * * const actor = Actor.named('Wendy').whoCan( * BrowseTheWeb.using(browser), * ); * * const HomePage = { * Title: Target.the('title').located(by.css('h1')), * }; * * actor.attemptsTo( * Navigate.to(`https://serenity-js.org`), * Ensure.that(Text.of(HomePage.Title), equals('Serenity/JS')), * ); * * @see https://webdriver.io/ * * @public * @implements {@serenity-js/core/lib/screenplay~Ability} * @see {@link @serenity-js/core/lib/screenplay/actor~Actor} */ export class BrowseTheWeb implements Ability { /** * @private */ private lastScriptExecutionSummary: LastScriptExecutionSummary; /** * @param {@wdio/types~Browser} browserInstance * @returns {BrowseTheWeb} */ static using(browserInstance: Browser<'async'>): BrowseTheWeb { return new BrowseTheWeb(browserInstance); } /** * @desc * Used to access the Actor's ability to {@link BrowseTheWeb} * from within the {@link @serenity-js/core/lib/screenplay~Interaction} classes, * such as {@link Navigate}. * * @param {@serenity-js/core/lib/screenplay/actor~UsesAbilities} actor * @return {BrowseTheWeb} */ static as(actor: UsesAbilities): BrowseTheWeb { return actor.abilityTo(BrowseTheWeb); } /** * @param {@wdio/types~Browser} browser */ constructor(public readonly browser: Browser<'async'>) { } /** * @desc * Navigate to a given destination, specified as an absolute URL * or a path relative to WebdriverIO `baseUrl`. * * @param {string} destination * @returns {Promise<void>} */ get(destination: string): Promise<void> { return this.browser.url(destination) as any; // todo: check if this returns a string or is mistyped } /** * @desc * Send a sequence of {@link Key} strokes to the active element. * * @param {Array<Key | string>} keys * Keys to enter * * @returns {Promise<void>} * * @see https://webdriver.io/docs/api/browser/keys/ */ sendKeys(keys: Array<Key | string>): Promise<void> { const keySequence = keys.map(key => { if (! Key.isKey(key)) { return key; } if (browser.isDevTools) { return key.devtoolsName; } return key.utf16codePoint; }); return this.browser.keys(keySequence); } /** * @desc * Take a screenshot of the top-level browsing context's viewport. * * @return {Promise<string>} * A promise that will resolve to a base64-encoded screenshot PNG */ takeScreenshot(): Promise<string> { return this.browser.takeScreenshot(); } /** * @desc * Schedules a command to execute JavaScript in the context of the currently selected frame or window. * The script fragment will be executed as the body of an anonymous function. * If the script is provided as a function object, that function will be converted to a string for injection * into the target window. * * Any arguments provided in addition to the script will be included as script arguments and may be referenced * using the `arguments` object. Arguments may be a `boolean`, `number`, `string` or `WebElement`. * Arrays and objects may also be used as script arguments as long as each item adheres * to the types previously mentioned. * * The script may refer to any variables accessible from the current window. * Furthermore, the script will execute in the window's context, thus `document` may be used to refer * to the current document. Any local variables will not be available once the script has finished executing, * though global variables will persist. * * If the script has a return value (i.e. if the script contains a `return` statement), * then the following steps will be taken for resolving this functions return value: * * For a HTML element, the value will resolve to a WebElement * - Null and undefined return values will resolve to null * - Booleans, numbers, and strings will resolve as is * - Functions will resolve to their string representation * - For arrays and objects, each member item will be converted according to the rules above * * @example <caption>Perform a sleep in the browser under test</caption> * BrowseTheWeb.as(actor).executeAsyncScript(` * return arguments[0].tagName; * `, Target.the('header').located(by.css(h1)) * * @see https://seleniumhq.github.io/selenium/docs/api/java/org/openqa/selenium/JavascriptExecutor.html#executeScript-java.lang.String-java.lang.Object...- * * @param {string | Function} script * @param {any[]} args * * @returns {Promise<any>} * * @see {@link BrowseTheWeb#getLastScriptExecutionResult} */ executeScript<Result, InnerArguments extends any[]>( script: string | ((...parameters: InnerArguments) => Result), ...args: InnerArguments ): Promise<Result> { return this.browser.execute(script, ...args) .then(result => { this.lastScriptExecutionSummary = new LastScriptExecutionSummary( result, ); return result; }); } /** * @desc * Schedules a command to execute asynchronous JavaScript in the context of the currently selected frame or window. * The script fragment will be executed as the body of an anonymous function. * If the script is provided as a function object, that function will be converted to a string for injection * into the target window. * * Any arguments provided in addition to the script will be included as script arguments and may be referenced * using the `arguments` object. Arguments may be a `boolean`, `number`, `string` or `WebElement` * Arrays and objects may also be used as script arguments as long as each item adheres * to the types previously mentioned. * * Unlike executing synchronous JavaScript with {@link BrowseTheWeb#executeScript}, * scripts executed with this function must explicitly signal they are finished by invoking the provided callback. * * This callback will always be injected into the executed function as the last argument, * and thus may be referenced with `arguments[arguments.length - 1]`. * * The following steps will be taken for resolving this functions return value against * the first argument to the script's callback function: * * - For a HTML element, the value will resolve to a WebElement * - Null and undefined return values will resolve to null * - Booleans, numbers, and strings will resolve as is * - Functions will resolve to their string representation * - For arrays and objects, each member item will be converted according to the rules above * * @example <caption>Perform a sleep in the browser under test</caption> * BrowseTheWeb.as(actor).executeAsyncScript(` * var delay = arguments[0]; * var callback = arguments[arguments.length - 1]; * * window.setTimeout(callback, delay); * `, 500) * * @example <caption>Return a value asynchronously</caption> * BrowseTheWeb.as(actor).executeAsyncScript(` * var callback = arguments[arguments.length - 1]; * * callback('some return value') * `).then(value => doSomethingWithThe(value)) * * @see https://seleniumhq.github.io/selenium/docs/api/java/org/openqa/selenium/JavascriptExecutor.html#executeAsyncScript-java.lang.String-java.lang.Object...- * * @param {string|Function} script * @param {any[]} args * * @returns {Promise<any>} * * @see {@link BrowseTheWeb#getLastScriptExecutionResult} */ executeAsyncScript<Result, Parameters extends any[]>( script: string | ((...args: [...parameters: Parameters, callback: (result: Result) => void]) => void), ...args: Parameters ): Promise<Result> { return this.browser.executeAsync<Result, Parameters>(script, ...args) .then(result => { this.lastScriptExecutionSummary = new LastScriptExecutionSummary<Result>( result, ); return result; }); } /** * @desc * Returns the last result of calling {@link BrowseTheWeb#executeAsyncScript} * or {@link BrowseTheWeb#executeScript} * * @returns {any} */ getLastScriptExecutionResult<Result = any>(): Result { if (! this.lastScriptExecutionSummary) { throw new LogicError(`Make sure to execute a script before checking on the result`); } return this.lastScriptExecutionSummary.result as Result; } } /** * @package */ class LastScriptExecutionSummary<Result = any> { constructor(public readonly result: Result) {} }
the_stack
import global from './global'; `!has('es6-iterator')`; import { isArrayLike, isIterable, Iterable } from './iterator'; import has from '../core/has'; `!has('es6-symbol')`; import './Symbol'; export interface Observable<T> extends ObservableObject { /** * Registers handlers for handling emitted values, error and completions from the observable, and * executes the observable's subscriber function, which will take action to set up the underlying data stream. * * @param observer The observer object that will handle events * * @return A Subscription object that can be used to manage the subscription. */ subscribe(observer: Observer<T>): Subscription; /** * Registers handlers for handling emitted values, error and completions from the observable, and * executes the observable's subscriber function, which will take action to set up the underlying data stream. * * @param onNext A function to handle an emitted value. Value is passed in as the first parameter to the function. * @param onError A function to handle errors that occur during onNext, or during subscription. * @param onComplete A function that gets called when the subscription is complete, and will not send any more values. This function will also get called if an error occurs and onError is not defined. * * @return {Subscription} A Subscription object that can be used to manage the subscription. */ subscribe( onNext: (value: T) => any, onError?: (error: any) => any, onComplete?: (completeValue?: any) => void ): Subscription; [Symbol.observable](): this; } export interface ObservableConstructor { /** * Create a new observerable with a subscriber function. The subscriber function will get called with a * SubscriptionObserver parameter for controlling the subscription. I a function is returned, it will be * run when the subscription is complete. * * @param subscriber The subscription function to be called when observers are subscribed * * @example * ```ts * const source = new Observer<number>((observer) => { * observer.next(1); * observer.next(2); * observer.next(3); * }); * ```ts */ new <T>(subscriber: Subscriber<T>): Observable<T>; /** * Create an Observable from another object. If the object is in itself Observable, the object will be returned. * Otherwise, the value will be wrapped in an Observable. If the object is iterable, an Observable will be created * that emits each item of the iterable. * * @param item The item to be turned into an Observable * @return An observable for the item you passed in */ from<T>(item: Iterable<T> | ArrayLike<T> | ObservableObject): Observable<T>; /** * Create an Observable from a list of values. * * @param items The values to be emitted * @return An Observable that will emit the specified values * * @example * ```ts * let source = Observable.of(1, 2, 3); * // will emit three separate values, 1, 2, and 3. * ``` */ of<T>(...items: T[]): Observable<T>; } /** * An object that implements a Symbol.observerable method. */ export interface ObservableObject { [Symbol.observable]: () => any; } /** * Handles events emitted from the subscription */ export interface Observer<T> { /** * Called to handle a single emitted event. * * @param value The value that was emitted. */ next?(value: T): any; /** * An optional method to be called when the subscription starts (before any events are emitted). * @param observer */ start?(observer: Subscription): void; /** * An optional method to be called if an error occurs during subscription or handling. * * @param errorValue The error */ error?(errorValue: any): any; /** * An optional method to be called when the subscription is completed (unless an error occurred and the error method was specified) * * @param completeValue The value passed to the completion method. */ complete?(completeValue?: any): void; } /** * Describes an object that can be subscribed to */ export interface Subscribable<T> { subscribe(observer: Observer<T>): Subscription; subscribe( onNext: (value: T) => any, onError?: (error: any) => any, onComplete?: (completeValue?: any) => void ): Subscription; } export interface Subscriber<T> { (observer: SubscriptionObserver<T>): (() => void) | void | { unsubscribe: () => void }; } /** * Handles an individual subscription to an Observable. */ export interface Subscription { /** * Whether or not the subscription is closed. Closed subscriptions will not emit values. */ closed: boolean; /** * A function to call to close the subscription. Calling this will call any associated tear down methods. */ unsubscribe: (() => void); } /** * An object used to control a single subscription and an observer. */ export interface SubscriptionObserver<T> { /** * Whether or not the subscription is closed. */ readonly closed: boolean; /** * Emit an event to the observer. * * @param value The value to be emitted. */ next(value: T): any; /** * Report an error. The subscription will be closed after an error has occurred. * * @param errorValue The error to be reported. */ error(errorValue: any): any; /** * Report completion of the subscription. The subscription will be closed, and no new values will be emitted, * after completion. * * @param completeValue A value to pass to the completion handler. */ complete(completeValue?: any): void; } export let Observable: ObservableConstructor = global.Observable; if (!has('es-observable')) { /* * Create a subscription observer for a given observer, and return the subscription. The "logic" for Observerables * is in here! */ const startSubscription = function startSubscription<T>( executor: Subscriber<T>, observer: Observer<T> ): Subscription { let closed = false; let cleanUp: () => void | undefined; function unsubscribe() { if (!closed) { closed = true; if (cleanUp) { cleanUp(); } } } function start(subscriptionObserver: SubscriptionObserver<T>) { if (observer.start) { observer.start(subscription); } if (closed) { return; } try { const result: any = executor(subscriptionObserver); if (typeof result === 'function') { cleanUp = result; } else if (result && 'unsubscribe' in result) { cleanUp = result.unsubscribe; } else if (result !== undefined && result !== null) { throw new TypeError('Subscriber must return a callable or subscription'); } if (closed) { if (cleanUp) { cleanUp(); } } } catch (e) { error(e); } } function next(value: T): any { if (closed) { return; } const next = observer.next; try { if (typeof next === 'function') { return next(value); } else if (next !== undefined && next !== null) { throw new TypeError('Observer.next is not a function'); } } catch (e) { error(e); } } function error(errorValue?: any): any { if (!closed) { let cleanUpError: Error | undefined = undefined; try { unsubscribe(); } catch (e) { cleanUpError = e; } const observerError = observer.error; if (observerError !== undefined && observerError !== null) { if (typeof observerError === 'function') { const errorResult = observerError(errorValue); if (cleanUpError !== undefined) { throw cleanUpError; } return errorResult; } else { throw new TypeError('Observer.error is not a function'); } } else if (observer.complete) { return observer.complete(errorValue); } else { throw errorValue; } } else { throw errorValue; } } function complete(completeValue?: any): any { if (!closed) { let cleanUpError: Error | undefined = undefined; try { unsubscribe(); } catch (e) { cleanUpError = e; } const observerComplete = observer.complete; if (observerComplete !== undefined && observerComplete !== null) { if (typeof observerComplete === 'function') { const completeResult = observerComplete(completeValue); if (cleanUpError !== undefined) { throw cleanUpError; } return completeResult; } else { throw new TypeError('Observer.complete is not a function'); } } else if (cleanUpError) { throw cleanUpError; } } } const subscription = Object.create( Object.create( {}, { closed: { enumerable: false, configurable: true, get() { return closed; } }, unsubscribe: { enumerable: false, configurable: true, writable: true, value: unsubscribe } } ) ); const prototype = Object.create( {}, { next: { enumerable: false, writable: true, value: next, configurable: true }, error: { enumerable: false, writable: true, value: error, configurable: true }, complete: { enumerable: false, writable: true, value: complete, configurable: true }, closed: { enumerable: false, configurable: true, get() { return closed; } } } ); // create the SubscriptionObserver and kick things off start(Object.create(prototype)); // the ONLY way to control the SubscriptionObserver is with the subscription or from a subscriber return subscription; }; Observable = (function() { function nonEnumerable(target: any, key: string | symbol, descriptor: PropertyDescriptor) { descriptor.enumerable = false; } class Observable<T> { private _executor: Subscriber<T>; @nonEnumerable [Symbol.observable](): this { return this; } constructor(subscriber: Subscriber<T>) { if (typeof subscriber !== 'function') { throw new TypeError('subscriber is not a function'); } this._executor = subscriber; } @nonEnumerable subscribe(observerOrNext: any, ...listeners: any[]) { const [onError, onComplete] = [...listeners]; if ( !observerOrNext || typeof observerOrNext === 'number' || typeof observerOrNext === 'string' || typeof observerOrNext === 'boolean' ) { throw new TypeError('parameter must be a function or an observer'); } let observer: Observer<T>; if (typeof observerOrNext === 'function') { observer = { next: observerOrNext }; if (typeof onError === 'function') { observer.error = onError; } if (typeof onComplete === 'function') { observer.complete = onComplete; } } else { observer = observerOrNext; } return startSubscription(this._executor, observer); } @nonEnumerable static of<U>(...items: U[]): Observable<U> { let constructor: typeof Observable; if (typeof this !== 'function') { constructor = Observable; } else { constructor = this; } return new constructor((observer: SubscriptionObserver<U>) => { for (const o of items) { observer.next(o); } observer.complete(); }); } @nonEnumerable static from<U>(item: Iterable<U> | ArrayLike<U> | Observable<U>): Observable<U> { if (item === null || item === undefined) { throw new TypeError('item cannot be null or undefined'); } let constructor: typeof Observable; if (typeof this !== 'function') { constructor = Observable; } else { constructor = this; } const observableSymbol = (item as Observable<U>)[Symbol.observable]; if (observableSymbol !== undefined) { if (typeof observableSymbol !== 'function') { throw new TypeError('Symbol.observable must be a function'); } const result: any = observableSymbol.call(item); if ( result === undefined || result === null || typeof result === 'number' || typeof result === 'boolean' || typeof result === 'string' ) { throw new TypeError('Return value of Symbol.observable must be object'); } if ((result.constructor && result.constructor === this) || result instanceof Observable) { return result; } else if (result.subscribe) { return new constructor(result.subscribe); } else { if (constructor.of) { return constructor.of(result); } else { return Observable.of(result); } } } else if (isIterable(item) || isArrayLike(item)) { return new constructor((observer: SubscriptionObserver<U>) => { if (isArrayLike(item)) { for (let i = 0; i < item.length; i++) { observer.next(item[i]); } } else { for (const o of item) { observer.next(o); } } observer.complete(); }); } else { throw new TypeError('Parameter is neither Observable nor Iterable'); } } } return Observable; })(); } export default Observable;
the_stack
import * as ts from "typescript"; import { getIdentifyingTypeLiteralParent, isNodeWithDefinedTypeArguments, isNodeWithDefinedTypeParameters, isNodeWithIdentifierName, } from "../../../../shared/nodeTypes"; import { isTypeArgumentsType } from "../../../../shared/typeNodes"; import { getTypeAtLocationIfNotError } from "../../../../shared/types"; import { FileMutationsRequest } from "../../../fileMutator"; export type InterfaceOrTypeLiteral = ts.InterfaceDeclaration | ts.TypeLiteralNode; export const collectGenericNodeReferences = (request: FileMutationsRequest, node: InterfaceOrTypeLiteral) => { // Find all generic references purported to be of this type const referencingNodes = collectDirectNodeReferences(request, node); if (referencingNodes === undefined) { return undefined; } // For any reference that uses the node as a generic type, // expand the node's references to include references to that generic type return expandReferencesForGenericTypes(request, node, referencingNodes); }; const collectDirectNodeReferences = (request: FileMutationsRequest, node: InterfaceOrTypeLiteral) => { // Interfaces are referenced by themselves, as they provide their own name // Type literals are referenced by their parent, which is often a type alias or variable const identifyingNode = ts.isInterfaceDeclaration(node) ? node : getIdentifyingTypeLiteralParent(node); return request.fileInfoCache.getNodeReferencesAsNodes(identifyingNode); }; export const expandReferencesForGenericTypes = ( request: FileMutationsRequest, interfaceOrTypeLiteral: InterfaceOrTypeLiteral, referencingNodes: ReadonlyArray<ts.Node>, ) => { const expandedReferences: ts.Node[] = []; // For each of the references we'll look at, there are two relevant nodes we need to map together: // * The templated declaration with type parameters: e.g. class Container<T> { ... } // * The templated instantiation with type arguments: e.g. new Container<T>() { ... } for (const referencingNode of referencingNodes) { // We only care about type references within generics if (!ts.isTypeReferenceNode(referencingNode)) { continue; } // Nodes with a "typeArguments" are the only ones we care about const templatedParentInstantiation = referencingNode.parent; if (!isNodeWithDefinedTypeArguments(templatedParentInstantiation)) { continue; } // Find the corresponding type node in the templated parent's type arguments // Note that call expressions (e.g. `container<T>({})`) store the type in the expression (e.g. `container`), // while other types (e.g. `container.value = {};`) store the type on themselves const relevantNodeWithTypeSignatures = ts.isCallExpression(templatedParentInstantiation) ? templatedParentInstantiation.expression : templatedParentInstantiation; const templatedDeclarationSymbol = getTypeAtLocationIfNotError(request, relevantNodeWithTypeSignatures)?.getSymbol(); if (templatedDeclarationSymbol === undefined) { continue; } // The templated declaration is the backing value declaration for the instantiation's symbol const templatedDeclaration = templatedDeclarationSymbol.valueDeclaration; if ( templatedDeclaration === undefined || !isNodeWithDefinedTypeParameters(templatedDeclaration) || templatedParentInstantiation.typeArguments === undefined ) { continue; } // Find the index of the corresponding type argument in the instantation of the node, // and finally use that to grab the node for the template parameter from the type argument's index const typeArgumentIndex = templatedParentInstantiation.typeArguments.indexOf(referencingNode); const templateDeclaredTypeNode = templatedDeclaration.typeParameters[typeArgumentIndex]; // Collect all nodes that are of the template declared type node when its value is // the interface or type alias we're considering expanding expandedReferences.push(...collectGenericReferencesOfType(request, interfaceOrTypeLiteral, templateDeclaredTypeNode)); } return expandedReferences; }; const collectGenericReferencesOfType = ( request: FileMutationsRequest, interfaceOrTypeLiteral: InterfaceOrTypeLiteral, templateDeclaredTypeNode: ts.TypeNode, ) => { const genericNodeReferences: ts.Node[] = []; const originalType = getTypeAtLocationIfNotError(request, interfaceOrTypeLiteral); // Find all the references to the declared template type within its declaration // For example, in `class Container<T> { member: T; }`, that would be the T in `member: T;` const referencingTypeNodes = request.fileInfoCache.getNodeReferencesAsNodes(templateDeclaredTypeNode); if (referencingTypeNodes === undefined) { return genericNodeReferences; } for (const referencingTypeNode of referencingTypeNodes) { // From the referencing type node, grab the node it's declared as the type of if (!ts.isTypeNode(referencingTypeNode)) { continue; } // The parent node of the referencing type node will typically be something like a parameter or variable declaration // For example, in `class Container<T> { member: T; }`, that would be the `member: T;` const parent = referencingTypeNode.parent; if (!isNodeWithIdentifierName(parent)) { continue; } // Try finding all references to the name of the parent (declaration) // For example, if the parent is `member: T;`, that would be `member` const allReferencingInstantiationNodes = request.fileInfoCache.getNodeReferencesAsNodes(parent.name); if (allReferencingInstantiationNodes !== undefined) { for (const referencingInstantiationNode of allReferencingInstantiationNodes) { if (getTypeAtLocationIfNotError(request, referencingInstantiationNode) === originalType) { genericNodeReferences.push(referencingInstantiationNode); } } } // If the is a parameter, try all objects passed into its containing signature if (ts.isParameter(parent) || ts.isParameterPropertyDeclaration(parent, parent.parent)) { genericNodeReferences.push(...findProvidedTypesForParameter(request, interfaceOrTypeLiteral, parent.parent, parent)); } } return genericNodeReferences; }; const findProvidedTypesForParameter = ( request: FileMutationsRequest, interfaceOrTypeLiteral: InterfaceOrTypeLiteral, signature: ts.SignatureDeclaration, parameter: ts.ParameterDeclaration | ts.ParameterPropertyDeclaration, ) => { const providedNodes: ts.Node[] = []; const allReferencingNodes = request.fileInfoCache.getNodeReferencesAsNodes(signature); if (allReferencingNodes === undefined) { return providedNodes; } const originalType = getTypeAtLocationIfNotError(request, interfaceOrTypeLiteral); if (originalType === undefined) { return providedNodes; } const parameterIndex = signature.parameters.indexOf(parameter); for (let referencingNode of allReferencingNodes) { // Call signatures might be found to be referenced by an expression, e.g. `container<T>({});` // We want the call expression within, e.g. `container<T>({})` (note the lack of `;`) if (ts.isExpressionStatement(referencingNode)) { referencingNode = referencingNode.expression; } for (const potentialCallOrNewExpression of [ // Direct call expressions will have arguments on themselves, e.g. `container<T>({})` referencingNode, // New expressions will have arguments on their parents, e.g. `new Container<T>({})` referencingNode.parent, // Method expressions will have arguments on their grandparents, e.g. `container.setValues({})` referencingNode.parent.parent, ]) { if (!ts.isCallOrNewExpression(potentialCallOrNewExpression)) { continue; } const providedArguments = potentialCallOrNewExpression.arguments; if (providedArguments === undefined) { continue; } if (expressionRefersToOriginalType(request, originalType, potentialCallOrNewExpression)) { providedNodes.push(providedArguments[parameterIndex]); break; } } } return providedNodes; }; /** * Given: * * Declaration of a call or construct signature with a type argument (on it or a parent) * * Usage of that same signature * * An original interface or type literal * How can we determine whether the signature usage is on that same type argument? * These are definitely not the right ways to do this... * ...but I spent three hours wrangling with it and don't know how... */ const expressionRefersToOriginalType = ( request: FileMutationsRequest, originalType: ts.Type, potentialCallOrNewExpression: ts.CallExpression | ts.NewExpression, ) => { const expressionNodeType = getTypeAtLocationIfNotError(request, potentialCallOrNewExpression); if (expressionNodeType === undefined) { return false; } // If the expression node's type already has type arguments, do they match the original type? // This is more likely with classes that are instantiated with a type // This can go wrong easily: e.g. with multiple type arguments that have intermixed usages if (isTypeArgumentsType(expressionNodeType) && expressionNodeType.typeArguments?.includes(originalType)) { return true; } // Alternately, what about functions that themselves have types? // Again, this can go wrong: e.g. with multiple type arguments that have intermixed usages if (isNodeWithDefinedTypeArguments(potentialCallOrNewExpression)) { for (const typeArgument of potentialCallOrNewExpression.typeArguments || []) { if (getTypeAtLocationIfNotError(request, typeArgument) === originalType) { return true; } } } return false; };
the_stack
import {ParamInitValuesTypeMap} from '../../../../params/types/ParamInitValuesTypeMap'; import {ParamType} from '../../../../poly/ParamType'; import {RampParam} from '../../../../params/Ramp'; // import {BaseTypedConnection} from './_Base'; // TODO: simplify GLDataType into one enum // http://learnwebgl.brown37.net/12_shader_language/glsl_data_types.html // // // GL Data types // // export enum GlConnectionPointType { BOOL = 'bool', // BVEC2 = 'bvec2', // BVEC3 = 'bvec3', // BVEC4 = 'bvec4', INT = 'int', // IVEC2 = 'ivec2', // IVEC3 = 'ivec3', // IVEC4 = 'ivec4', FLOAT = 'float', VEC2 = 'vec2', VEC3 = 'vec3', VEC4 = 'vec4', // matrices to be used later // MAT2 = 'mat2', // MAT3 = 'mat3', // MAT4 = 'mat4', SAMPLER_2D = 'sampler2D', SSS_MODEL = 'SSSModel', } // interface IGlConnectionPointType { // [EnumGlConnectionPointType.BOOL]: Readonly<'bool'>; // // BVEC2 = 'bvec2', // // BVEC3 = 'bvec3', // // BVEC4 = 'bvec4', // [EnumGlConnectionPointType.INT]: Readonly<'int'>; // // IVEC2 = 'ivec2', // // IVEC3 = 'ivec3', // // IVEC4 = 'ivec4', // [EnumGlConnectionPointType.FLOAT]: Readonly<'float'>; // [EnumGlConnectionPointType.VEC2]: Readonly<'vec2'>; // [EnumGlConnectionPointType.VEC3]: Readonly<'vec3'>; // [EnumGlConnectionPointType.VEC4]: Readonly<'vec4'>; // // matrices to be used later // // MAT2 = 'mat2', // // MAT3 = 'mat3', // // MAT4 = 'mat4', // [EnumGlConnectionPointType.SAMPLER_2D]: Readonly<'sampler2D'>; // } // export const GlConnectionPointType: IGlConnectionPointType = { // [EnumGlConnectionPointType.BOOL]: 'bool', // // BVEC2 = 'bvec2', // // BVEC3 = 'bvec3', // // BVEC4 = 'bvec4', // [EnumGlConnectionPointType.INT]: 'int', // // IVEC2 = 'ivec2', // // IVEC3 = 'ivec3', // // IVEC4 = 'ivec4', // [EnumGlConnectionPointType.FLOAT]: 'float', // [EnumGlConnectionPointType.VEC2]: 'vec2', // [EnumGlConnectionPointType.VEC3]: 'vec3', // [EnumGlConnectionPointType.VEC4]: 'vec4', // // matrices to be used later // // MAT2 = 'mat2', // // MAT3 = 'mat3', // // MAT4 = 'mat4', // [EnumGlConnectionPointType.SAMPLER_2D]: 'sampler2D', // }; // // // ALL GL Data types in an array // // export const GL_CONNECTION_POINT_TYPES: Array<GlConnectionPointType> = [ GlConnectionPointType.BOOL, // ConnectionPointType.BVEC2, // ConnectionPointType.BVEC3, // ConnectionPointType.BVEC4, GlConnectionPointType.INT, // ConnectionPointType.IVEC2, // ConnectionPointType.IVEC3, // ConnectionPointType.IVEC4, GlConnectionPointType.FLOAT, GlConnectionPointType.VEC2, GlConnectionPointType.VEC3, GlConnectionPointType.VEC4, ]; // // // Map to convert from a GL Data type to a ParamType // // type ConnectionPointTypeToParamTypeMapGeneric = {[key in GlConnectionPointType]: ParamType}; export interface IConnectionPointTypeToParamTypeMap extends ConnectionPointTypeToParamTypeMapGeneric { [GlConnectionPointType.BOOL]: ParamType.BOOLEAN; // [ConnectionPointType.BVEC2]: [ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC3]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC4]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] [GlConnectionPointType.INT]: ParamType.INTEGER; // [ConnectionPointType.IVEC2]: [ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC3]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC4]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; [GlConnectionPointType.FLOAT]: ParamType.FLOAT; [GlConnectionPointType.VEC2]: ParamType.VECTOR2; [GlConnectionPointType.VEC3]: ParamType.VECTOR3; [GlConnectionPointType.VEC4]: ParamType.VECTOR4; } export const GlConnectionPointTypeToParamTypeMap: IConnectionPointTypeToParamTypeMap = { [GlConnectionPointType.BOOL]: ParamType.BOOLEAN, // [ConnectionPointType.BVEC2]: [ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC3]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC4]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] [GlConnectionPointType.INT]: ParamType.INTEGER, // [ConnectionPointType.IVEC2]: [ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC3]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC4]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; [GlConnectionPointType.FLOAT]: ParamType.FLOAT, [GlConnectionPointType.VEC2]: ParamType.VECTOR2, [GlConnectionPointType.VEC3]: ParamType.VECTOR3, [GlConnectionPointType.VEC4]: ParamType.VECTOR4, [GlConnectionPointType.SAMPLER_2D]: ParamType.RAMP, [GlConnectionPointType.SSS_MODEL]: ParamType.STRING, }; // // // Map to convert from a ParamType to GL Data type // // type GLParamTypeToConnectionPointTypeMapGeneric = {[key in ParamType]: GlConnectionPointType | undefined}; export interface IGLParamTypeToConnectionPointTypeMap extends GLParamTypeToConnectionPointTypeMapGeneric { [ParamType.BOOLEAN]: GlConnectionPointType.BOOL; [ParamType.COLOR]: GlConnectionPointType.VEC3; // [ConnectionPointType.BVEC2]: [ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC3]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC4]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] [ParamType.INTEGER]: GlConnectionPointType.INT; // [ConnectionPointType.IVEC2]: [ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC3]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC4]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; [ParamType.FLOAT]: GlConnectionPointType.FLOAT; [ParamType.FOLDER]: undefined; [ParamType.VECTOR2]: GlConnectionPointType.VEC2; [ParamType.VECTOR3]: GlConnectionPointType.VEC3; [ParamType.VECTOR4]: GlConnectionPointType.VEC4; [ParamType.BUTTON]: undefined; [ParamType.OPERATOR_PATH]: undefined; [ParamType.NODE_PATH]: undefined; [ParamType.PARAM_PATH]: undefined; [ParamType.RAMP]: undefined; [ParamType.STRING]: undefined; } export const GLParamTypeToConnectionPointTypeMap: IGLParamTypeToConnectionPointTypeMap = { [ParamType.BOOLEAN]: GlConnectionPointType.BOOL, [ParamType.COLOR]: GlConnectionPointType.VEC3, // [ConnectionPointType.BVEC2]: [ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC3]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC4]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] [ParamType.INTEGER]: GlConnectionPointType.INT, // [ConnectionPointType.IVEC2]: [ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC3]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC4]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; [ParamType.FLOAT]: GlConnectionPointType.FLOAT, [ParamType.FOLDER]: undefined, [ParamType.VECTOR2]: GlConnectionPointType.VEC2, [ParamType.VECTOR3]: GlConnectionPointType.VEC3, [ParamType.VECTOR4]: GlConnectionPointType.VEC4, [ParamType.BUTTON]: undefined, [ParamType.OPERATOR_PATH]: undefined, [ParamType.PARAM_PATH]: undefined, [ParamType.NODE_PATH]: undefined, [ParamType.RAMP]: undefined, [ParamType.STRING]: undefined, }; // // // Map of GL Data type default values // // export type ConnectionPointInitValueMapGeneric = { [key in GlConnectionPointType]: ParamInitValuesTypeMap[IConnectionPointTypeToParamTypeMap[key]]; }; export const GlConnectionPointInitValueMap: ConnectionPointInitValueMapGeneric = { [GlConnectionPointType.BOOL]: false, // [ConnectionPointType.BVEC2]: [ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC3]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC4]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] [GlConnectionPointType.INT]: 0, // [ConnectionPointType.IVEC2]: [ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC3]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC4]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; [GlConnectionPointType.FLOAT]: 0, [GlConnectionPointType.VEC2]: [0, 0], [GlConnectionPointType.VEC3]: [0, 0, 0], [GlConnectionPointType.VEC4]: [0, 0, 0, 0], [GlConnectionPointType.SAMPLER_2D]: RampParam.DEFAULT_VALUE_JSON, [GlConnectionPointType.SSS_MODEL]: 'SSSModel()', }; // // // Map of GL Data type component counts // // export type ConnectionPointComponentsCountMapGeneric = { [key in GlConnectionPointType]: number; }; export const GlConnectionPointComponentsCountMap: ConnectionPointComponentsCountMapGeneric = { [GlConnectionPointType.BOOL]: 1, // [ConnectionPointType.BVEC2]: [ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC3]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] // [ConnectionPointType.BVEC4]: [ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN, ParamType.BOOLEAN] [GlConnectionPointType.INT]: 1, // [ConnectionPointType.IVEC2]: [ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC3]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; // [ConnectionPointType.IVEC4]: [ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER, ParamType.INTEGER]; [GlConnectionPointType.FLOAT]: 1, [GlConnectionPointType.VEC2]: 2, [GlConnectionPointType.VEC3]: 3, [GlConnectionPointType.VEC4]: 4, [GlConnectionPointType.SAMPLER_2D]: 1, [GlConnectionPointType.SSS_MODEL]: 1, }; // import { // ConnectionPointType, // ConnectionPointInitValueMapGeneric, // ConnectionPointInitValueMap, // ConnectionPointTypeToParamTypeMap, // IConnectionPointTypeToParamTypeMap, // } from '../ConnectionPointType'; // import {ParamInitValuesTypeMap} from '../params/ParamsController'; export interface GlConnectionPointData<T extends GlConnectionPointType> { name: string; type: T; } import {BaseConnectionPoint} from './_Base'; export class GlConnectionPoint<T extends GlConnectionPointType> extends BaseConnectionPoint { protected _json: GlConnectionPointData<T> | undefined; // protected _init_value: any; constructor( protected _name: string, protected _type: T, protected _init_value?: ConnectionPointInitValueMapGeneric[T] ) { super(_name, _type); // if (this._init_value === undefined) { this._init_value = this._init_value || GlConnectionPointInitValueMap[this._type]; // } } type() { return this._type; } are_types_matched(src_type: string, dest_type: string): boolean { return src_type == dest_type; } get param_type(): IConnectionPointTypeToParamTypeMap[T] { return GlConnectionPointTypeToParamTypeMap[this._type]; } get init_value() { return this._init_value; } toJSON(): GlConnectionPointData<T> { return (this._json = this._json || this._create_json()); } protected _create_json(): GlConnectionPointData<T> { return { name: this._name, type: this._type, }; } } export type BaseGlConnectionPoint = GlConnectionPoint<GlConnectionPointType>;
the_stack
import { Component, Element, Prop, Watch, h, Event, EventEmitter } from '@stencil/core'; import { select, event } from 'd3-selection'; import { max, min } from 'd3-array'; import { scaleBand, scaleLinear } from 'd3-scale'; import { nest } from 'd3-collection'; import { easeCircleIn } from 'd3-ease'; import { IBoxModelType, IHoverStyleType, IClickStyleType, IAxisType, IReferenceStyleType, IDataLabelType, ITooltipLabelType, IAccessibilityType, IAnimationConfig, ILegendType } from '@visa/charts-types'; import { ClusteredBarChartDefaultValues } from './clustered-bar-chart-default-values'; import 'd3-transition'; import Utils from '@visa/visa-charts-utils'; import { v4 as uuid } from 'uuid'; const { verifyTextHasSpace, checkAttributeTransitions, createTextStrokeFilter, drawHoverStrokes, removeHoverStrokes, buildStrokes, convertColorsToTextures, initializeDescriptionRoot, initializeElementAccess, setElementFocusHandler, setElementAccessID, setAccessibilityController, hideNonessentialGroups, setAccessTitle, setAccessSubtitle, setAccessLongDescription, setAccessExecutiveSummary, setAccessPurpose, setAccessContext, setAccessStatistics, setAccessChartCounts, setAccessXAxis, setAccessYAxis, setAccessStructure, setAccessAnnotation, retainAccessFocus, checkAccessFocus, setElementInteractionAccessState, setAccessibilityDescriptionWidth, autoTextColor, annotate, chartAccessors, convertVisaColor, checkInteraction, checkClicked, checkHovered, drawAxis, drawGrid, drawLegend, setLegendInteractionState, drawTooltip, formatDataLabel, formatDate, getColors, getLicenses, getPadding, getScopedData, initTooltipStyle, overrideTitleTooltip, placeDataLabels, scopeDataKeys, transitionEndAll, visaColors, validateAccessibilityProps, findTagLevel, prepareRenderChange, roundTo, resolveLabelCollision } = Utils; @Component({ tag: 'clustered-bar-chart', styleUrl: 'clustered-bar-chart.scss' }) export class ClusteredBarChart { @Event() clickFunc: EventEmitter; @Event() hoverFunc: EventEmitter; @Event() mouseOutFunc: EventEmitter; // Chart Attributes (1/7) @Prop({ mutable: true }) mainTitle: string = ClusteredBarChartDefaultValues.mainTitle; @Prop({ mutable: true }) subTitle: string = ClusteredBarChartDefaultValues.subTitle; @Prop({ mutable: true }) height: number = ClusteredBarChartDefaultValues.height; @Prop({ mutable: true }) width: number = ClusteredBarChartDefaultValues.width; @Prop({ mutable: true }) layout: string = ClusteredBarChartDefaultValues.layout; @Prop({ mutable: true }) margin: IBoxModelType = ClusteredBarChartDefaultValues.margin; @Prop({ mutable: true }) padding: IBoxModelType = ClusteredBarChartDefaultValues.padding; @Prop({ mutable: true }) highestHeadingLevel: string | number = ClusteredBarChartDefaultValues.highestHeadingLevel; // Data (2/7) @Prop() data; @Prop() uniqueID; @Prop({ mutable: true }) ordinalAccessor: string = ClusteredBarChartDefaultValues.ordinalAccessor; @Prop({ mutable: true }) valueAccessor: string = ClusteredBarChartDefaultValues.valueAccessor; @Prop({ mutable: true }) groupAccessor: string = ClusteredBarChartDefaultValues.groupAccessor; @Prop({ mutable: true }) reverseOrder: boolean = ClusteredBarChartDefaultValues.reverseOrder; // Axis (3/7) @Prop({ mutable: true }) xAxis: IAxisType = ClusteredBarChartDefaultValues.xAxis; @Prop({ mutable: true }) yAxis: IAxisType = ClusteredBarChartDefaultValues.yAxis; @Prop({ mutable: true }) wrapLabel: boolean = ClusteredBarChartDefaultValues.wrapLabel; // Color & Shape (4/7) @Prop({ mutable: true }) colorPalette: string = ClusteredBarChartDefaultValues.colorPalette; @Prop({ mutable: true }) colors: string[]; @Prop({ mutable: true }) hoverStyle: IHoverStyleType = ClusteredBarChartDefaultValues.hoverStyle; @Prop({ mutable: true }) clickStyle: IClickStyleType = ClusteredBarChartDefaultValues.clickStyle; @Prop({ mutable: true }) referenceStyle: IReferenceStyleType = ClusteredBarChartDefaultValues.referenceStyle; @Prop({ mutable: true }) cursor: string = ClusteredBarChartDefaultValues.cursor; @Prop({ mutable: true }) roundedCorner: number = ClusteredBarChartDefaultValues.roundedCorner; @Prop({ mutable: true }) barIntervalRatio: number = ClusteredBarChartDefaultValues.barIntervalRatio; @Prop({ mutable: true }) groupIntervalRatio: number = ClusteredBarChartDefaultValues.groupIntervalRatio; @Prop({ mutable: true }) hoverOpacity: number = ClusteredBarChartDefaultValues.hoverOpacity; @Prop({ mutable: true }) animationConfig: IAnimationConfig = ClusteredBarChartDefaultValues.animationConfig; // Data label (5/7) @Prop({ mutable: true }) dataLabel: IDataLabelType = ClusteredBarChartDefaultValues.dataLabel; @Prop({ mutable: true }) showTooltip: boolean = ClusteredBarChartDefaultValues.showTooltip; @Prop({ mutable: true }) tooltipLabel: ITooltipLabelType = ClusteredBarChartDefaultValues.tooltipLabel; @Prop({ mutable: true }) accessibility: IAccessibilityType = ClusteredBarChartDefaultValues.accessibility; @Prop({ mutable: true }) legend: ILegendType = ClusteredBarChartDefaultValues.legend; @Prop({ mutable: true }) annotations: object[] = ClusteredBarChartDefaultValues.annotations; // Calculation (6/7) @Prop({ mutable: true }) minValueOverride: number; @Prop({ mutable: true }) maxValueOverride: number; @Prop({ mutable: true }) referenceLines: object[] = ClusteredBarChartDefaultValues.referenceLines; // Interactivity (7/7) @Prop({ mutable: true }) suppressEvents: boolean = ClusteredBarChartDefaultValues.suppressEvents; @Prop({ mutable: true }) hoverHighlight: object; @Prop({ mutable: true }) clickHighlight: object[] = ClusteredBarChartDefaultValues.clickHighlight; @Prop({ mutable: true }) interactionKeys: string[]; // Testing & Debug (8/8) @Prop() unitTest: boolean = false; // @Prop() debugMode: boolean = false; // Element @Element() clusteredBarChartEl: HTMLElement; shouldValidateAccessibility: boolean = true; svg: any; root: any; rootG: any; barG: any; gridG: any; bars: any; labelG: any; labels: any; references: any; defaults: boolean; current: any; enter: any; exit: any; update: any; enterBarWrappers: any; updateBarWrappers: any; exitBarWrappers: any; // vertical layout x0: any; x1: any; y: any; // horizontal layout y0: any; y1: any; x: any; innerHeight: number; innerWidth: number; innerPaddedHeight: number; innerPaddedWidth: number; innerXAxis: any; innerYAxis: any; nest: any = []; datakeys: any = []; legendData: any = []; colorArr: any; preparedColors: any; duration: number; legendG: any; tooltipG: any; updated: boolean = true; enterSize: number; exitSize: number; enterLabels: any; exitLabels: any; updateLabels: any; enterLabelWrappers: any; updateLabelWrappers: any; exitLabelWrappers: any; tableData: any; tableColumns: any; chartID: string; innerInteractionKeys: any; innerLabelAccessor: string; shouldValidate: boolean = false; shouldUpdateData: boolean = false; shouldSetDimensions: boolean = false; shouldUpdateScales: boolean = false; shouldValidateAxes: boolean = false; shouldUpdateAnnotations: boolean = false; shouldResetRoot: boolean = false; shouldUpdateTableData: boolean = false; shouldSetColors: boolean = false; shouldValidateLabelPlacement: boolean = false; shouldValidateDataLabelAccessor: boolean = false; shouldValidateInteractionKeys: boolean = false; shouldSetSelectionClass: boolean = false; shouldUpdateXAxis: boolean = false; shouldUpdateYAxis: boolean = false; shouldUpdateXGrid: boolean = false; shouldUpdateYGrid: boolean = false; shouldUpdateBaseline: boolean = false; shouldCheckValueAxis: boolean = false; shouldCheckLabelAxis: boolean = false; shouldSetGlobalSelections: boolean = false; shouldEnterUpdateExit: boolean = false; shouldUpdateGeometries: boolean = false; shouldUpdateCorners: boolean = false; shouldUpdateLegendData: boolean = false; shouldUpdateLegend: boolean = false; shouldUpdateLegendInteractivity: boolean = false; shouldSetLegendCursor: boolean = false; shouldUpdateReferenceLines: boolean = false; shouldUpdateCursor: boolean = false; shouldDrawInteractionState: boolean = false; shouldSetLabelOpacity: boolean = false; shouldSetLabelPosition: boolean = false; shouldSetLabelContent: boolean = false; shouldCheckLabelColor: boolean = false; shouldBindInteractivity: boolean = false; shouldUpdateDescriptionWrapper: boolean = false; shouldSetChartAccessibilityTitle: boolean = false; shouldSetChartAccessibilitySubtitle: boolean = false; shouldSetChartAccessibilityLongDescription: boolean = false; shouldSetChartAccessibilityExecutiveSummary: boolean = false; shouldSetChartAccessibilityStatisticalNotes: boolean = false; shouldSetChartAccessibilityStructureNotes: boolean = false; shouldSetParentSVGAccessibility: boolean = false; shouldSetGeometryAccessibilityAttributes: boolean = false; shouldSetGeometryAriaLabels: boolean = false; shouldSetGroupAccessibilityLabel: boolean = false; shouldSetChartAccessibilityPurpose: boolean = false; shouldSetChartAccessibilityContext: boolean = false; shouldSetTestingAttributes: boolean = false; shouldRedrawWrapper: boolean = false; shouldSetTagLevels: boolean = false; shouldSetChartAccessibilityCount: boolean = false; shouldSetYAxisAccessibility: boolean = false; shouldSetXAxisAccessibility: boolean = false; shouldSetAnnotationAccessibility: boolean = false; shouldSetTextures: boolean = false; shouldSetStrokes: boolean = false; strokes: any = {}; topLevel: string = 'h2'; bottomLevel: string = 'p'; bitmaps: any; @Watch('data') dataWatcher(_newData, _oldData) { this.updated = true; this.shouldUpdateData = true; this.shouldSetColors = true; this.shouldSetTextures = true; this.shouldUpdateTableData = true; this.shouldSetGlobalSelections = true; this.shouldSetTestingAttributes = true; this.shouldEnterUpdateExit = true; this.shouldSetLabelContent = true; this.shouldSetLabelPosition = true; // this.shouldDrawInteractionState = true; // called from updateGeometries // this.shouldCheckLabelColor = true; // called from updateGeometries this.shouldSetGeometryAccessibilityAttributes = true; this.shouldSetGeometryAriaLabels = true; this.shouldUpdateLegendData = true; this.shouldUpdateScales = true; this.shouldValidate = true; this.shouldUpdateGeometries = true; this.shouldUpdateXAxis = true; this.shouldSetXAxisAccessibility = true; this.shouldUpdateYAxis = true; this.shouldSetYAxisAccessibility = true; this.shouldUpdateXGrid = true; this.shouldUpdateYGrid = true; this.shouldUpdateLegend = true; this.shouldUpdateReferenceLines = true; this.shouldUpdateBaseline = true; this.shouldUpdateAnnotations = true; this.shouldSetStrokes = true; } @Watch('uniqueID') idWatcher(newID, _oldID) { this.chartID = newID || 'clustered-bar-chart-' + uuid(); this.clusteredBarChartEl.id = this.chartID; this.shouldValidate = true; this.shouldUpdateDescriptionWrapper = true; this.shouldSetParentSVGAccessibility = true; this.shouldDrawInteractionState = true; this.shouldUpdateLegend = true; this.shouldSetStrokes = true; } @Watch('highestHeadingLevel') headingWatcher(_newVal, _oldVal) { this.shouldRedrawWrapper = true; this.shouldSetTagLevels = true; this.shouldSetChartAccessibilityCount = true; this.shouldSetYAxisAccessibility = true; this.shouldSetXAxisAccessibility = true; this.shouldSetAnnotationAccessibility = true; this.shouldUpdateDescriptionWrapper = true; this.shouldSetChartAccessibilityTitle = true; this.shouldSetChartAccessibilitySubtitle = true; this.shouldSetChartAccessibilityLongDescription = true; this.shouldSetChartAccessibilityContext = true; this.shouldSetChartAccessibilityExecutiveSummary = true; this.shouldSetChartAccessibilityPurpose = true; this.shouldSetChartAccessibilityStatisticalNotes = true; this.shouldSetChartAccessibilityStructureNotes = true; } @Watch('mainTitle') titleWatcher(_newVal, _oldVal) { this.shouldValidate = true; this.shouldUpdateDescriptionWrapper = true; this.shouldSetChartAccessibilityTitle = true; this.shouldSetParentSVGAccessibility = true; } @Watch('subTitle') subtitleWatcher(_newVal, _oldVal) { this.shouldSetChartAccessibilitySubtitle = true; this.shouldSetParentSVGAccessibility = true; } @Watch('height') @Watch('width') @Watch('padding') @Watch('margin') dimensionWatcher(_newVal, _oldVal) { this.shouldSetDimensions = true; this.shouldUpdateScales = true; this.shouldResetRoot = true; this.shouldUpdateGeometries = true; this.shouldUpdateXAxis = true; this.shouldUpdateYAxis = true; this.shouldUpdateXGrid = true; this.shouldUpdateYGrid = true; this.shouldSetLabelPosition = true; this.shouldCheckLabelColor = true; this.shouldUpdateLegend = true; this.shouldUpdateReferenceLines = true; this.shouldUpdateBaseline = true; this.shouldUpdateAnnotations = true; } @Watch('layout') layoutWatcher(_newVal, _oldVal) { this.shouldValidateLabelPlacement = true; this.shouldUpdateScales = true; this.shouldValidateAxes = true; this.shouldResetRoot = true; this.shouldUpdateGeometries = true; this.shouldUpdateXAxis = true; this.shouldUpdateYAxis = true; this.shouldUpdateXGrid = true; this.shouldUpdateYGrid = true; this.shouldSetLabelPosition = true; this.shouldCheckLabelColor = true; this.shouldUpdateLegend = true; this.shouldUpdateReferenceLines = true; this.shouldUpdateBaseline = true; this.shouldUpdateAnnotations = true; this.shouldSetGeometryAccessibilityAttributes = true; this.shouldSetTestingAttributes = true; } @Watch('ordinalAccessor') ordinalAccessorWatcher(_newVal, _oldVal) { this.shouldUpdateTableData = true; this.shouldUpdateLegendData = true; this.shouldUpdateScales = true; this.shouldSetColors = true; this.shouldUpdateGeometries = true; this.shouldDrawInteractionState = true; this.shouldCheckLabelColor = true; this.shouldUpdateLegend = true; this.shouldUpdateReferenceLines = true; this.shouldUpdateAnnotations = true; this.shouldSetGeometryAriaLabels = true; this.shouldSetStrokes = true; this.shouldSetTextures = true; if (!(this.interactionKeys && this.interactionKeys.length)) { this.shouldValidateInteractionKeys = true; this.shouldSetSelectionClass = true; } } @Watch('valueAccessor') valueAccessorWatcher(_newVal, _oldVal) { this.shouldUpdateData = true; this.shouldUpdateTableData = true; this.shouldUpdateScales = true; this.shouldSetColors = true; this.shouldUpdateGeometries = true; this.shouldDrawInteractionState = true; this.shouldCheckValueAxis = true; this.shouldValidateDataLabelAccessor = true; this.shouldSetLabelContent = true; this.shouldSetLabelPosition = true; this.shouldCheckLabelColor = true; this.shouldUpdateReferenceLines = true; this.shouldUpdateAnnotations = true; this.shouldSetGeometryAriaLabels = true; this.shouldSetStrokes = true; this.shouldSetTextures = true; } @Watch('groupAccessor') groupAccessorWatcher(_newVal, _oldVal) { this.shouldUpdateData = true; this.shouldSetGlobalSelections = true; this.shouldEnterUpdateExit = true; this.shouldUpdateTableData = true; this.shouldUpdateScales = true; this.shouldUpdateGeometries = true; this.shouldDrawInteractionState = true; this.shouldCheckLabelColor = true; this.shouldCheckLabelAxis = true; this.shouldUpdateBaseline = true; this.shouldUpdateReferenceLines = true; this.shouldUpdateAnnotations = true; this.shouldSetGeometryAriaLabels = true; this.shouldSetGroupAccessibilityLabel = true; this.shouldSetStrokes = true; this.shouldSetTextures = true; if (!(this.interactionKeys && this.interactionKeys.length)) { this.shouldValidateInteractionKeys = true; this.shouldSetSelectionClass = true; } } @Watch('reverseOrder') reverseOrderWatcher(_newVal, _oldVal) { this.shouldUpdateTableData = true; this.shouldUpdateScales = true; this.shouldUpdateGeometries = true; this.shouldSetLabelPosition = true; this.shouldCheckLabelColor = true; this.shouldUpdateAnnotations = true; this.shouldSetGeometryAccessibilityAttributes = true; this.shouldSetGeometryAriaLabels = true; if (this.layout === 'vertical') { this.shouldValidateAxes = true; this.shouldUpdateXAxis = true; this.shouldSetXAxisAccessibility = true; } else if (this.layout === 'horizontal') { this.shouldValidateAxes = true; this.shouldUpdateYAxis = true; this.shouldSetYAxisAccessibility = true; } } @Watch('xAxis') xAxisWatcher(_newVal, _oldVal) { this.shouldValidateAxes = true; this.shouldUpdateXAxis = true; this.shouldSetXAxisAccessibility = true; const newGridVal = _newVal && _newVal.gridVisible; const oldGridVal = _oldVal && _oldVal.gridVisible; const newTickInterval = _newVal && _newVal.tickInterval ? _newVal.tickInterval : 0; const oldTickInterval = _oldVal && _oldVal.tickInterval ? _oldVal.tickInterval : 0; if (newGridVal !== oldGridVal || newTickInterval !== oldTickInterval) { this.shouldUpdateXGrid = true; } } @Watch('yAxis') yAxisWatcher(_newVal, _oldVal) { this.shouldValidateAxes = true; this.shouldUpdateYAxis = true; this.shouldSetYAxisAccessibility = true; const newGridVal = _newVal && _newVal.gridVisible; const oldGridVal = _oldVal && _oldVal.gridVisible; const newTickInterval = _newVal && _newVal.tickInterval ? _newVal.tickInterval : 0; const oldTickInterval = _oldVal && _oldVal.tickInterval ? _oldVal.tickInterval : 0; if (newGridVal !== oldGridVal || newTickInterval !== oldTickInterval) { this.shouldUpdateYGrid = true; } } @Watch('wrapLabel') wrapLabelWatcher(_newVal, _oldVal) { this.shouldUpdateXAxis = true; this.shouldUpdateYAxis = true; } @Watch('colors') @Watch('colorPalette') colorsWatcher(_newVal, _oldVal) { this.shouldSetColors = true; this.shouldDrawInteractionState = true; this.shouldUpdateLegend = true; this.shouldCheckLabelColor = true; this.shouldSetStrokes = true; this.shouldSetTextures = true; } @Watch('hoverStyle') hoverStyleWatcher(_newVal, _oldVal) { this.shouldDrawInteractionState = true; this.shouldCheckLabelColor = true; this.shouldSetStrokes = true; } @Watch('clickStyle') clickStyleWatcher(_newVal, _oldVal) { this.shouldDrawInteractionState = true; this.shouldCheckLabelColor = true; this.shouldSetStrokes = true; } @Watch('referenceLines') @Watch('referenceStyle') referenceWatcher(_newVal, _oldVal) { this.shouldUpdateReferenceLines = true; } @Watch('cursor') cursorWatcher(_newVal, _oldVal) { this.shouldUpdateCursor = true; this.shouldSetLegendCursor = true; } @Watch('roundedCorner') cornerWatcher(_newVal, _oldVal) { this.shouldUpdateCorners = true; } @Watch('barIntervalRatio') intervalRatioWatcher(_newVal, _oldVal) { this.shouldUpdateScales = true; this.shouldUpdateGeometries = true; this.shouldCheckLabelAxis = true; this.shouldSetLabelPosition = true; this.shouldCheckLabelColor = true; this.shouldUpdateAnnotations = true; } @Watch('groupIntervalRatio') groupIntervalRatioWatcher(_newVal, _oldVal) { this.shouldUpdateScales = true; this.shouldUpdateGeometries = true; this.shouldSetLabelPosition = true; this.shouldCheckLabelColor = true; this.shouldUpdateAnnotations = true; } @Watch('hoverOpacity') hoverOpacityWatcher(_newVal, _oldVal) { this.shouldDrawInteractionState = true; this.shouldCheckLabelColor = true; } @Watch('dataLabel') labelWatcher(_newVal, _oldVal) { const newPlacementVal = _newVal && _newVal.placement ? _newVal.placement : false; const oldPlacementVal = _oldVal && _oldVal.placement ? _oldVal.placement : false; const newCollisionPlacementVal = _newVal && _newVal.collisionPlacement ? _newVal.collisionPlacement : false; const oldCollisionPlacementVal = _oldVal && _oldVal.collisionPlacement ? _oldVal.collisionPlacement : false; const newVisibleVal = _newVal && _newVal.visible; const oldVisibleVal = _oldVal && _oldVal.visible; const newAccessor = _newVal && _newVal.labelAccessor ? _newVal.labelAccessor : false; const oldAccessor = _oldVal && _oldVal.labelAccessor ? _oldVal.labelAccessor : false; const newFormatVal = _newVal && _newVal.format ? _newVal.format : false; const oldFormatVal = _oldVal && _oldVal.format ? _oldVal.format : false; const newCollisionHideOnlyVal = _newVal && _newVal.collisionHideOnly ? _newVal.collisionHideOnly : false; const oldCollisionHideOnlyVal = _oldVal && _oldVal.collisionHideOnly ? _oldVal.collisionHideOnly : false; // if only visible changes we just flip opacity, but don't redraw if (newVisibleVal !== oldVisibleVal) { this.shouldSetLabelOpacity = true; } // any placement related stuff and we do a full redraw, no opacity if ( newPlacementVal !== oldPlacementVal || newCollisionPlacementVal !== oldCollisionPlacementVal || newCollisionHideOnlyVal !== oldCollisionHideOnlyVal ) { this.shouldValidateLabelPlacement = true; this.shouldSetLabelPosition = true; this.shouldCheckLabelColor = true; } // text/format required redraw/table but no opacity if (newAccessor !== oldAccessor || newFormatVal !== oldFormatVal) { this.shouldValidateDataLabelAccessor = true; this.shouldUpdateTableData = true; this.shouldSetLabelContent = true; this.shouldCheckLabelColor = true; } } @Watch('tooltipLabel') tooltipLabelWatcher(_newVal, _oldVal) { this.shouldUpdateTableData = true; } @Watch('accessibility') accessibilityWatcher(_newVal, _oldVal) { this.shouldValidate = true; const newTitle = _newVal && _newVal.title ? _newVal.title : false; const oldTitle = _oldVal && _oldVal.title ? _oldVal.title : false; if (newTitle !== oldTitle) { this.shouldUpdateDescriptionWrapper = true; this.shouldSetChartAccessibilityTitle = true; this.shouldSetParentSVGAccessibility = true; } const newExecutiveSummary = _newVal && _newVal.executiveSummary ? _newVal.executiveSummary : false; const oldExecutiveSummary = _oldVal && _oldVal.executiveSummary ? _oldVal.executiveSummary : false; if (newExecutiveSummary !== oldExecutiveSummary) { this.shouldSetChartAccessibilityExecutiveSummary = true; } const newPurpose = _newVal && _newVal.purpose ? _newVal.purpose : false; const oldPurpose = _oldVal && _oldVal.purpose ? _oldVal.purpose : false; if (newPurpose !== oldPurpose) { this.shouldSetChartAccessibilityPurpose = true; } const newLongDescription = _newVal && _newVal.longDescription ? _newVal.longDescription : false; const oldLongDescription = _oldVal && _oldVal.longDescription ? _oldVal.longDescription : false; if (newLongDescription !== oldLongDescription) { this.shouldSetChartAccessibilityLongDescription = true; } const newContext = _newVal && _newVal.contextExplanation ? _newVal.contextExplanation : false; const oldContext = _oldVal && _oldVal.contextExplanation ? _oldVal.contextExplanation : false; if (newContext !== oldContext) { this.shouldSetChartAccessibilityContext = true; } const newStatisticalNotes = _newVal && _newVal.statisticalNotes ? _newVal.statisticalNotes : false; const oldStatisticalNotes = _oldVal && _oldVal.statisticalNotes ? _oldVal.statisticalNotes : false; if (newStatisticalNotes !== oldStatisticalNotes) { this.shouldSetChartAccessibilityStatisticalNotes = true; } const newStructureNotes = _newVal && _newVal.structureNotes ? _newVal.structureNotes : false; const oldStructureNotes = _oldVal && _oldVal.structureNotes ? _oldVal.structureNotes : false; if (newStructureNotes !== oldStructureNotes) { this.shouldSetChartAccessibilityStructureNotes = true; } const newincludeDataKeyNames = _newVal && _newVal.includeDataKeyNames; const oldincludeDataKeyNames = _oldVal && _oldVal.includeDataKeyNames; const newElementDescriptionAccessor = _newVal && _newVal.elementDescriptionAccessor ? _newVal.elementDescriptionAccessor : false; const oldElementDescriptionAccessor = _oldVal && _oldVal.elementDescriptionAccessor ? _oldVal.elementDescriptionAccessor : false; if ( newincludeDataKeyNames !== oldincludeDataKeyNames || newElementDescriptionAccessor !== oldElementDescriptionAccessor ) { if (newincludeDataKeyNames !== oldincludeDataKeyNames) { // this one is tricky because it needs to run after the lifecycle // AND it could run in the off-chance this prop is changed this.shouldSetGroupAccessibilityLabel = true; } this.shouldSetGeometryAriaLabels = true; this.shouldSetParentSVGAccessibility = true; } const newTextures = _newVal && _newVal.hideTextures ? _newVal.hideTextures : false; const oldTextures = _oldVal && _oldVal.hideTextures ? _oldVal.hideTextures : false; if (newTextures !== oldTextures) { this.shouldSetTextures = true; this.shouldUpdateLegend = true; this.shouldDrawInteractionState = true; } const newSmallValue = _newVal && _newVal.showSmallLabels ? _newVal.showSmallLabels : false; const oldSmallValue = _oldVal && _oldVal.showSmallLabels ? _oldVal.showSmallLabels : false; if (newSmallValue !== oldSmallValue) { this.shouldSetLabelOpacity = true; } const newStrokes = _newVal && _newVal.hideStrokes ? _newVal.hideStrokes : false; const oldStrokes = _oldVal && _oldVal.hideStrokes ? _oldVal.hideStrokes : false; if (newStrokes !== oldStrokes) { this.shouldUpdateLegend = true; this.shouldSetStrokes = true; this.shouldDrawInteractionState = true; } const newKeyNav = _newVal && _newVal.keyboardNavConfig && _newVal.keyboardNavConfig.disabled ? _newVal.keyboardNavConfig.disabled : false; const oldKeyNav = _oldVal && _oldVal.keyboardNavConfig && _oldVal.keyboardNavConfig.disabled ? _oldVal.keyboardNavConfig.disabled : false; const newInterface = _newVal && _newVal.elementsAreInterface ? _newVal.elementsAreInterface : false; const oldInterface = _oldVal && _oldVal.elementsAreInterface ? _oldVal.elementsAreInterface : false; if (newKeyNav !== oldKeyNav || newInterface !== oldInterface) { this.shouldSetGeometryAriaLabels = true; this.shouldSetParentSVGAccessibility = true; this.shouldUpdateDescriptionWrapper = true; this.shouldRedrawWrapper = true; this.shouldSetChartAccessibilityTitle = true; this.shouldSetChartAccessibilitySubtitle = true; this.shouldSetChartAccessibilityLongDescription = true; this.shouldSetChartAccessibilityContext = true; this.shouldSetChartAccessibilityExecutiveSummary = true; this.shouldSetChartAccessibilityPurpose = true; this.shouldSetChartAccessibilityStatisticalNotes = true; this.shouldSetChartAccessibilityStructureNotes = true; } if (newInterface !== oldInterface) { this.shouldDrawInteractionState = true; } } @Watch('legend') legendWatcher(_newVal, _oldVal) { this.shouldUpdateLegend = true; const newInteractiveVal = _newVal && _newVal.interactive; const oldInteractiveVal = _oldVal && _oldVal.interactive; if (newInteractiveVal !== oldInteractiveVal) { this.shouldSetLegendCursor = true; this.shouldUpdateLegendInteractivity = true; } } @Watch('annotations') annotationsWatcher(_newVal, _oldVal) { this.shouldValidate = true; this.shouldUpdateAnnotations = true; this.shouldSetAnnotationAccessibility = true; } @Watch('maxValueOverride') @Watch('minValueOverride') valueOverrideWatcher(_newVal, _oldVal) { this.shouldUpdateScales = true; this.shouldCheckValueAxis = true; this.shouldUpdateGeometries = true; this.shouldSetLabelPosition = true; this.shouldCheckLabelColor = true; this.shouldUpdateReferenceLines = true; this.shouldUpdateBaseline = true; this.shouldUpdateAnnotations = true; } @Watch('clickHighlight') clickWatcher(_newVal, _oldVal) { this.shouldDrawInteractionState = true; this.shouldCheckLabelColor = true; this.shouldSetSelectionClass = true; } @Watch('hoverHighlight') hoverWatcher(_newVal, _oldVal) { this.shouldDrawInteractionState = true; this.shouldCheckLabelColor = true; } @Watch('interactionKeys') interactionWatcher(_newVal, _oldVal) { this.shouldValidateInteractionKeys = true; this.shouldDrawInteractionState = true; this.shouldCheckLabelColor = true; this.shouldSetSelectionClass = true; this.shouldUpdateTableData = true; this.shouldSetGeometryAriaLabels = true; } @Watch('suppressEvents') suppressWatcher(_newVal, _oldVal) { this.shouldBindInteractivity = true; this.shouldUpdateCursor = true; this.shouldSetLegendCursor = true; this.shouldUpdateLegendInteractivity = true; this.shouldSetGeometryAriaLabels = true; this.shouldSetParentSVGAccessibility = true; this.shouldUpdateDescriptionWrapper = true; this.shouldRedrawWrapper = true; this.shouldValidate = true; this.shouldSetChartAccessibilityTitle = true; this.shouldSetChartAccessibilitySubtitle = true; this.shouldSetChartAccessibilityLongDescription = true; this.shouldSetChartAccessibilityContext = true; this.shouldSetChartAccessibilityExecutiveSummary = true; this.shouldSetChartAccessibilityPurpose = true; this.shouldSetChartAccessibilityStatisticalNotes = true; this.shouldSetChartAccessibilityStructureNotes = true; } @Watch('unitTest') unitTestWatcher(_newVal, _oldVal) { this.shouldSetTestingAttributes = true; } componentWillLoad() { // contrary to componentWillUpdate, this method appears safe to use for // any calculations we need. Keeping them here reduces future refactor, // since componentWillUpdate should eventually mirror this method return new Promise(resolve => { this.duration = 0; this.defaults = true; this.chartID = this.uniqueID || 'clustered-bar-chart-' + uuid(); this.clusteredBarChartEl.id = this.chartID; this.setTagLevels(); this.prepareData(); this.prepareLegendData(); this.setDimensions(); this.prepareScales(); this.validateInteractionKeys(); this.validateDataLabelAccessor(); this.validateAxes(); this.validateLabelPlacement(); this.setTableData(); this.shouldValidateAccessibilityProps(); this.setColors(); resolve('component will load'); }); } componentWillUpdate() { // NEVER put items in this method that rely on props (until stencil bug is resolved) // All items that belong here are currently at the top of render // see: https://github.com/ionic-team/stencil/issues/2061#issuecomment-578282178 return new Promise(resolve => { resolve('component will update'); }); } componentDidLoad() { return new Promise(resolve => { this.shouldValidateAccessibilityProps(); this.renderRootElements(); this.setTooltipInitialStyle(); this.setChartDescriptionWrapper(); this.setChartAccessibilityTitle(); this.setChartAccessibilitySubtitle(); this.setChartAccessibilityLongDescription(); this.setChartAccessibilityExecutiveSummary(); this.setChartAccessibilityPurpose(); this.setChartAccessibilityContext(); this.setChartAccessibilityStatisticalNotes(); this.setChartAccessibilityStructureNotes(); this.setParentSVGAccessibility(); this.reSetRoot(); this.setTextures(); this.setStrokes(); this.drawXGrid(); this.drawYGrid(); this.setGlobalSelections(); this.setTestingAttributes(); this.enterGeometries(); this.updateGeometries(); this.exitGeometries(); this.enterDataLabels(); this.updateDataLabels(); this.exitDataLabels(); this.drawGeometries(); this.setChartCountAccessibility(); this.setGeometryAccessibilityAttributes(); this.setGeometryAriaLabels(); this.drawLegendElements(); this.bindLegendInteractivity(); this.setLabelContent(); this.processLabelPosition(this.updateLabels, false, true, false); this.drawReferenceLines(); this.setSelectedClass(); this.checkLabelColorAgainstBackground(); this.updateCursor(); this.bindInteractivity(); this.drawAnnotations(); this.setAnnotationAccessibility(); this.drawXAxis(); this.setXAxisAccessibility(); this.drawYAxis(); this.setYAxisAccessibility(); this.drawBaseline(); this.onChangeHandler(); // we want to hide all child <g> of this.root BUT we want to make sure not to hide the // parent<g> that contains our geometries! In a subGroup chart (like stacked bars), // we want to pass the PARENT of all the <g>s that contain bars hideNonessentialGroups(this.root.node(), this.barG.node()); this.setGroupAccessibilityID(); this.defaults = false; // catch all to remove entering class from labels once we have loaded component this.updateLabels.classed('entering', false); resolve('component did load'); }); } componentDidUpdate() { return new Promise(resolve => { this.duration = !this.animationConfig || !this.animationConfig.disabled ? 750 : 0; if (this.shouldUpdateDescriptionWrapper) { this.setChartDescriptionWrapper(); this.shouldUpdateDescriptionWrapper = false; } if (this.shouldSetChartAccessibilityCount) { this.setChartCountAccessibility(); this.shouldSetChartAccessibilityCount = false; } if (this.shouldSetChartAccessibilityTitle) { this.setChartAccessibilityTitle(); this.shouldSetChartAccessibilityTitle = false; } if (this.shouldSetChartAccessibilitySubtitle) { this.setChartAccessibilitySubtitle(); this.shouldSetChartAccessibilitySubtitle = false; } if (this.shouldSetChartAccessibilityLongDescription) { this.setChartAccessibilityLongDescription(); this.shouldSetChartAccessibilityLongDescription = false; } if (this.shouldSetChartAccessibilityExecutiveSummary) { this.setChartAccessibilityExecutiveSummary(); this.shouldSetChartAccessibilityExecutiveSummary = false; } if (this.shouldSetChartAccessibilityPurpose) { this.setChartAccessibilityPurpose(); this.shouldSetChartAccessibilityPurpose = false; } if (this.shouldSetChartAccessibilityContext) { this.setChartAccessibilityContext(); this.shouldSetChartAccessibilityContext = false; } if (this.shouldSetChartAccessibilityStatisticalNotes) { this.setChartAccessibilityStatisticalNotes(); this.shouldSetChartAccessibilityStatisticalNotes = false; } if (this.shouldSetChartAccessibilityStructureNotes) { this.setChartAccessibilityStructureNotes(); this.shouldSetChartAccessibilityStructureNotes = false; } if (this.shouldSetParentSVGAccessibility) { this.setParentSVGAccessibility(); this.shouldSetParentSVGAccessibility = false; } if (this.shouldResetRoot) { this.reSetRoot(); this.shouldResetRoot = false; } if (this.shouldSetTextures) { this.setTextures(); this.shouldSetTextures = false; } if (this.shouldSetStrokes) { this.setStrokes(); this.shouldSetStrokes = false; } if (this.shouldSetGlobalSelections) { this.setGlobalSelections(); this.shouldSetGlobalSelections = false; } if (this.shouldSetTestingAttributes) { this.setTestingAttributes(); this.shouldSetTestingAttributes = false; } if (this.shouldUpdateXGrid) { this.drawXGrid(); this.shouldUpdateXGrid = false; } if (this.shouldUpdateYGrid) { this.drawYGrid(); this.shouldUpdateYGrid = false; } if (this.shouldEnterUpdateExit) { this.enterGeometries(); this.updateGeometries(); this.exitGeometries(); this.enterDataLabels(); this.updateDataLabels(); this.exitDataLabels(); this.shouldEnterUpdateExit = false; } if (this.shouldUpdateGeometries) { this.drawGeometries(); this.shouldUpdateGeometries = false; } if (this.shouldSetGeometryAccessibilityAttributes) { this.setGeometryAccessibilityAttributes(); this.shouldSetGeometryAccessibilityAttributes = false; } if (this.shouldSetGeometryAriaLabels) { this.setGeometryAriaLabels(); this.shouldSetGeometryAriaLabels = false; } if (this.shouldSetGroupAccessibilityLabel) { this.setGroupAccessibilityID(); this.shouldSetGroupAccessibilityLabel = false; } if (this.shouldUpdateCorners) { this.setRoundedCorners(); this.shouldUpdateCorners = false; } if (this.shouldUpdateLegend) { this.drawLegendElements(); this.shouldUpdateLegend = false; } if (this.shouldSetLabelContent) { this.setLabelContent(); this.shouldSetLabelContent = false; } if (this.shouldSetLabelPosition) { this.setLabelPosition(); this.shouldSetLabelPosition = false; } if (this.shouldUpdateReferenceLines) { this.drawReferenceLines(); this.shouldUpdateReferenceLines = false; } if (this.shouldDrawInteractionState) { this.updateInteractionState(); this.shouldDrawInteractionState = false; } if (this.shouldSetLabelOpacity) { this.setLabelOpacity(); this.shouldSetLabelOpacity = false; } if (this.shouldCheckLabelColor) { this.checkLabelColorAgainstBackground(); this.shouldCheckLabelColor = false; } if (this.shouldSetSelectionClass) { this.setSelectedClass(); this.shouldSetSelectionClass = false; } if (this.shouldUpdateLegendInteractivity) { this.bindLegendInteractivity(); this.shouldUpdateLegendInteractivity = false; } if (this.shouldSetLegendCursor) { this.setLegendCursor(); this.shouldSetLegendCursor = false; } if (this.shouldUpdateCursor) { this.updateCursor(); this.shouldUpdateCursor = false; } if (this.shouldBindInteractivity) { this.bindInteractivity(); this.shouldBindInteractivity = false; } if (this.shouldUpdateAnnotations) { this.drawAnnotations(); this.shouldUpdateAnnotations = false; } if (this.shouldSetAnnotationAccessibility) { this.setAnnotationAccessibility(); this.shouldSetAnnotationAccessibility = false; } if (this.shouldUpdateXAxis) { this.drawXAxis(); this.shouldUpdateXAxis = false; } if (this.shouldSetXAxisAccessibility) { this.setXAxisAccessibility(); this.shouldSetXAxisAccessibility = false; } if (this.shouldUpdateYAxis) { this.drawYAxis(); this.shouldUpdateYAxis = false; } if (this.shouldSetYAxisAccessibility) { this.setYAxisAccessibility(); this.shouldSetYAxisAccessibility = false; } if (this.shouldUpdateBaseline) { this.drawBaseline(); this.shouldUpdateBaseline = false; } this.onChangeHandler(); this.updateLabels.classed('entering', false); resolve('component did update'); }); } shouldValidateAccessibilityProps() { if (this.shouldValidateAccessibility && !this.accessibility.disableValidation) { this.shouldValidateAccessibility = false; validateAccessibilityProps( this.chartID, { ...this.accessibility }, { annotations: this.annotations, data: this.data, uniqueID: this.uniqueID, context: { mainTitle: this.mainTitle, onClickFunc: !this.suppressEvents ? this.clickFunc.emit : undefined } } ); } } validateInteractionKeys() { this.innerInteractionKeys = this.interactionKeys && this.interactionKeys.length ? this.interactionKeys : [this.ordinalAccessor]; } validateDataLabelAccessor() { this.innerLabelAccessor = this.dataLabel.labelAccessor ? this.dataLabel.labelAccessor : this.valueAccessor; } validateLabelPlacement() { // check data label placement assignment based on layout if (this.layout === 'vertical') { if ( this.dataLabel.placement !== 'top' && this.dataLabel.placement !== 'bottom' && this.dataLabel.placement !== 'auto' ) { this.dataLabel.placement = 'top'; } } else { if ( this.dataLabel.placement !== 'right' && this.dataLabel.placement !== 'left' && this.dataLabel.placement !== 'auto' ) { this.dataLabel.placement = 'right'; } } } setDimensions() { this.padding = typeof this.padding === 'string' ? getPadding(this.padding) : this.padding; // before we render/load we need to set our height and width based on props this.innerHeight = this.height - this.margin.top - this.margin.bottom; this.innerWidth = this.width - this.margin.left - this.margin.right; this.innerPaddedHeight = this.innerHeight - this.padding.top - this.padding.bottom; this.innerPaddedWidth = this.innerWidth - this.padding.left - this.padding.right; } prepareData() { // check data format & offset time object this.data.map(d => { d[this.valueAccessor] = parseFloat(d[this.valueAccessor]); d[this.groupAccessor] = d[this.groupAccessor] instanceof Date ? formatDate({ date: d[this.groupAccessor], format: this.layout === 'vertical' ? this.xAxis.format : this.yAxis.format, offsetTimezone: true }) : d[this.groupAccessor]; }); this.nest = nest() .key(d => d[this.groupAccessor]) .entries(this.data); // Get all item categories this.datakeys = this.nest.map(d => d.key); } setTableData() { // generate scoped and formatted data for data-table component const keys = scopeDataKeys(this, chartAccessors, 'clustered-bar-chart'); this.tableData = getScopedData(this.data, keys); this.tableColumns = Object.keys(keys); } prepareScales() { const minBarValue = this.minValueOverride && this.minValueOverride < min(this.data, d => d[this.valueAccessor]) ? this.minValueOverride : min(this.data, d => d[this.valueAccessor]); // scale band based on layout of chart if (this.layout === 'vertical') { this.y = scaleLinear() .domain([Math.min(0, minBarValue), this.maxValueOverride || max(this.data, d => d[this.valueAccessor])]) .range([this.innerPaddedHeight, 0]); this.x0 = scaleBand() .domain(this.datakeys) .range(this.reverseOrder ? [this.innerPaddedWidth, 0] : [0, this.innerPaddedWidth]) .padding(this.groupIntervalRatio); this.x1 = scaleBand() .domain(this.nest[0].values.map(d => d[this.ordinalAccessor])) .rangeRound([0, this.x0.bandwidth()]) .padding(this.barIntervalRatio); } else if (this.layout === 'horizontal') { this.x = scaleLinear() .domain([Math.min(0, minBarValue), this.maxValueOverride || max(this.data, d => d[this.valueAccessor])]) .range([0, this.innerPaddedWidth]); this.y0 = scaleBand() .domain(this.datakeys) .range(this.reverseOrder ? [this.innerPaddedHeight, 0] : [0, this.innerPaddedHeight]) .padding(this.groupIntervalRatio); this.y1 = scaleBand() .domain(this.nest[0].values.map(d => d[this.ordinalAccessor])) .rangeRound([0, this.y0.bandwidth()]) .padding(this.barIntervalRatio); } } validateAxes() { // check whether we are going to display axis and then update props this.innerXAxis = { ...this.xAxis, gridVisible: !(this.layout === 'vertical') && this.xAxis.gridVisible }; this.innerYAxis = { ...this.yAxis, gridVisible: this.layout === 'vertical' && this.yAxis.gridVisible }; } setColors() { this.preparedColors = this.colors ? convertVisaColor(this.colors) : getColors(this.colorPalette, this.nest[0].values.length); } setTextures() { const colorsArray = this.preparedColors.range ? this.preparedColors.range() : this.preparedColors; if (this.accessibility.hideTextures || colorsArray.length > 6) { this.colorArr = this.preparedColors; } else { const colorsToConvert = colorsArray; const textures = convertColorsToTextures({ colors: colorsToConvert, rootSVG: this.svg.node(), id: this.chartID, scheme: 'categorical', disableTransitions: !this.duration }); this.colorArr = this.preparedColors.range ? this.preparedColors.copy().range(textures) : textures; } } setStrokes() { this.strokes = buildStrokes({ root: this.svg.node(), id: this.chartID, colors: this.preparedColors.range ? this.preparedColors.range() : this.preparedColors, clickStyle: this.clickStyle, hoverStyle: this.hoverStyle }); } renderRootElements() { this.svg = select(this.clusteredBarChartEl) .select('.visa-viz-d3-clustered-bar-container') .append('svg') .attr('width', this.width) .attr('height', this.height) .attr('viewBox', '0 0 ' + this.width + ' ' + this.height); this.root = this.svg.append('g').attr('id', 'visa-viz-margin-container-g-' + this.chartID); this.rootG = this.root.append('g').attr('id', 'visa-viz-padding-container-g-' + this.chartID); this.gridG = this.rootG.append('g').attr('class', 'grid-group'); this.barG = this.rootG.append('g').attr('class', 'clustered-bar-group'); this.labelG = this.rootG.append('g').attr('class', 'clustered-bar-dataLabel-group'); this.legendG = select(this.clusteredBarChartEl) .select('.clustered-bar-legend') .append('svg'); this.tooltipG = select(this.clusteredBarChartEl).select('.clustered-bar-tooltip'); this.references = this.rootG.append('g').attr('class', 'clustered-bar-reference-line-group'); } setTestingAttributes() { if (this.unitTest) { select(this.clusteredBarChartEl) .select('.visa-viz-d3-clustered-bar-container') .attr('data-testid', 'chart-container'); select(this.clusteredBarChartEl) .select('.clustered-bar-main-title') .attr('data-testid', 'main-title'); select(this.clusteredBarChartEl) .select('.clustered-bar-sub-title') .attr('data-testid', 'sub-title'); this.svg.attr('data-testid', 'root-svg'); this.root.attr('data-testid', 'margin-container'); this.rootG.attr('data-testid', 'padding-container'); this.legendG.attr('data-testid', 'legend-container'); this.tooltipG.attr('data-testid', 'tooltip-container'); this.barG.attr('data-testid', 'clustered-bar-group'); this.updateBarWrappers .attr('data-testid', 'clustered-bar-wrapper') .attr('data-id', d => `clustered-bar-wrapper-${d.key}`); this.update .attr('data-testid', 'bar') .attr('data-id', d => `bar-${d[this.groupAccessor]}-${d[this.ordinalAccessor]}`); this.labelG.attr('data-testid', 'clustered-bar-dataLabel-group'); this.updateLabelWrappers .attr('data-testid', 'clustered-bar-dataLabel-wrapper') .attr('data-id', d => `clustered-bar-dataLabel-wrapper-${d.key}`); this.updateLabels .attr('data-testid', 'dataLabel') .attr('data-id', d => `dataLabel-${d[this.groupAccessor]}-${d[this.ordinalAccessor]}`); this.references.attr('data-testid', 'reference-line-group'); this.svg.select('defs').attr('data-testid', 'pattern-defs'); // reference lines do not have global selections this.references.selectAll('.clustered-bar-reference-line').attr('data-testid', 'reference-line'); this.references.selectAll('.clustered-bar-reference-line-label').attr('data-testid', 'reference-line-label'); } else { select(this.clusteredBarChartEl) .select('.visa-viz-d3-clustered-bar-container') .attr('data-testid', null); select(this.clusteredBarChartEl) .select('.clustered-bar-main-title') .attr('data-testid', null); select(this.clusteredBarChartEl) .select('.clustered-bar-sub-title') .attr('data-testid', null); this.svg.attr('data-testid', null); this.root.attr('data-testid', null); this.rootG.attr('data-testid', null); this.legendG.attr('data-testid', null); this.tooltipG.attr('data-testid', null); this.barG.attr('data-testid', null); this.updateBarWrappers.attr('data-testid', null).attr('data-id', null); this.update.attr('data-testid', null).attr('data-id', null); this.labelG.attr('data-testid', null); this.updateLabelWrappers.attr('data-testid', null).attr('data-id', null); this.updateLabels.attr('data-testid', null).attr('data-id', null); this.references.attr('data-testid', null); this.svg.select('defs').attr('data-testid', null); // reference lines do not have global selections this.references.selectAll('.clustered-bar-reference-line').attr('data-testid', null); this.references.selectAll('.clustered-bar-reference-line-label').attr('data-testid', null); } } // reset graph size based on window size reSetRoot() { const changeSvg = prepareRenderChange({ selection: this.svg, duration: this.duration, namespace: 'root_reset', easing: easeCircleIn }); changeSvg .attr('width', this.width) .attr('height', this.height) .attr('viewBox', '0 0 ' + this.width + ' ' + this.height); const changeRoot = prepareRenderChange({ selection: this.root, duration: this.duration, namespace: 'root_reset', easing: easeCircleIn }); changeRoot.attr('transform', `translate(${this.margin.left}, ${this.margin.top})`); const changeRootG = prepareRenderChange({ selection: this.rootG, duration: this.duration, namespace: 'root_reset', easing: easeCircleIn }); changeRootG.attr('transform', `translate(${this.padding.left}, ${this.padding.top})`); setAccessibilityDescriptionWidth(this.chartID, this.width); } // draw axis line drawXAxis() { drawAxis({ root: this.rootG, height: this.innerPaddedHeight, width: this.innerPaddedWidth, axisScale: this.layout === 'vertical' ? this.x0 : this.x, left: false, wrapLabel: this.wrapLabel && this.layout === 'vertical' ? this.x0.bandwidth() : '', format: this.xAxis.format, tickInterval: this.xAxis.tickInterval, label: this.xAxis.label, padding: this.padding, hide: !this.innerXAxis.visible, duration: this.duration }); } drawYAxis() { drawAxis({ root: this.rootG, height: this.innerPaddedHeight, width: this.innerPaddedWidth, axisScale: this.layout === 'vertical' ? this.y : this.y0, left: true, wrapLabel: this.wrapLabel ? this.padding.left || 100 : '', format: this.yAxis.format, tickInterval: this.yAxis.tickInterval, label: this.yAxis.label, padding: this.padding, hide: !this.innerYAxis.visible, duration: this.duration }); } setXAxisAccessibility() { setAccessXAxis({ rootEle: this.clusteredBarChartEl, hasXAxis: this.innerXAxis ? this.innerXAxis.visible : false, xAxis: this.layout === 'vertical' ? this.x0 || false : this.x || false, // this is optional for some charts, if hasXAxis is always false xAxisLabel: this.xAxis.label ? this.xAxis.label : '' // this is optional for some charts, if hasXAxis is always false }); } setYAxisAccessibility() { setAccessYAxis({ rootEle: this.clusteredBarChartEl, hasYAxis: this.innerYAxis ? this.innerYAxis.visible : false, yAxis: this.layout === 'vertical' ? this.y || false : this.y0 || false, // this is optional for some charts, if hasXAxis is always false yAxisLabel: this.yAxis.label ? this.yAxis.label : '' // this is optional for some charts, if hasXAxis is always false }); } drawBaseline() { drawAxis({ root: this.rootG, height: this.innerPaddedHeight, width: this.innerPaddedWidth, axisScale: this.layout === 'vertical' ? this.x0 : this.x, left: false, padding: this.padding, markOffset: this.layout === 'vertical' ? this.y(0) || -1 : this.y0(0) || -1, hide: !(this.layout === 'vertical'), duration: this.duration }); drawAxis({ root: this.rootG, height: this.innerPaddedHeight, width: this.innerPaddedWidth, axisScale: this.layout === 'vertical' ? this.y : this.y0, left: true, padding: this.padding, markOffset: this.layout === 'vertical' ? this.x0(0) || -1 : this.x(0) || -1, hide: this.layout === 'vertical', duration: this.duration }); } // dashed line grid for chart drawXGrid() { drawGrid( this.gridG, this.innerPaddedHeight, this.innerPaddedWidth, this.layout === 'vertical' ? this.x0 : this.x, false, !this.innerXAxis.gridVisible, this.xAxis.tickInterval, this.duration ); } drawYGrid() { drawGrid( this.gridG, this.innerPaddedHeight, this.innerPaddedWidth, this.layout === 'vertical' ? this.y : this.y0, true, !this.innerYAxis.gridVisible, this.yAxis.tickInterval, this.duration ); } setGlobalSelections() { const dataBoundToWrappers = this.barG.selectAll('.clustered-bar-wrapper').data(this.nest, d => d.key); this.enterBarWrappers = dataBoundToWrappers.enter().append('g'); this.exitBarWrappers = dataBoundToWrappers.exit(); this.updateBarWrappers = dataBoundToWrappers.merge(this.enterBarWrappers); const dataBoundToGeometries = this.updateBarWrappers .selectAll('.clustered-bar') .data(d => d.values, d => d[this.ordinalAccessor]); this.enter = dataBoundToGeometries.enter().append('rect'); this.exit = dataBoundToGeometries.exit(); this.update = dataBoundToGeometries.merge(this.enter); this.enterSize = this.enter.size(); this.exitSize = this.exit.size(); const dataBoundToLabelWrappers = this.labelG.selectAll('g').data(this.nest, d => d.key); this.enterLabelWrappers = dataBoundToLabelWrappers.enter().append('g'); this.exitLabelWrappers = dataBoundToLabelWrappers.exit(); this.updateLabelWrappers = dataBoundToLabelWrappers.merge(this.enterLabelWrappers); const dataBoundToLabels = this.updateLabelWrappers .selectAll('text') .data(d => d.values, d => d[this.ordinalAccessor]); this.enterLabels = dataBoundToLabels.enter().append('text'); this.exitLabels = dataBoundToLabels.exit(); this.updateLabels = dataBoundToLabels.merge(this.enterLabels); // this.labels } enterGeometries() { this.enter.interrupt(); const ordinalAxis = this.layout === 'vertical' ? 'x' : 'y'; const ordinalDimension = this.layout === 'vertical' ? 'width' : 'height'; const valueAxis = this.layout === 'vertical' ? 'y' : 'x'; const valueDimension = this.layout === 'vertical' ? 'height' : 'width'; const choice = this.layout === 'vertical' ? 'max' : 'min'; this.enterBarWrappers .attr('class', 'clustered-bar-wrapper') .classed('entering', true) .attr('transform', d => this.layout === 'vertical' ? 'translate(' + this.x0(d.key) + ',0)' : 'translate(0,' + this.y0(d.key) + ')' ) .each((_, i, n) => { initializeElementAccess(n[i]); }); this.enter .attr('class', 'clustered-bar') .attr('cursor', !this.suppressEvents ? this.cursor : null) .attr('rx', this.roundedCorner) .attr('ry', this.roundedCorner) .each((_d, i, n) => { initializeElementAccess(n[i]); }) .on('click', !this.suppressEvents ? d => this.onClickHandler(d) : null) .on('mouseover', !this.suppressEvents ? d => this.onHoverHandler(d) : null) .on('mouseout', !this.suppressEvents ? () => this.onMouseOutHandler() : null) .attr('fill', (d, i) => { const clicked = this.clickHighlight && this.clickHighlight.length > 0 && checkClicked(d, this.clickHighlight, this.innerInteractionKeys); const hovered = this.hoverHighlight && checkHovered(d, this.hoverHighlight, this.innerInteractionKeys); const baseColor = this.colorArr[i]; return clicked && this.clickStyle.color ? visaColors[this.clickStyle.color] || this.clickStyle.color : clicked ? baseColor : hovered && this.hoverStyle.color ? visaColors[this.hoverStyle.color] || this.hoverStyle.color : baseColor; }) .attr('opacity', 0) .attr(valueAxis, d => this[valueAxis](Math[choice](0, d[this.valueAccessor]))) .attr(valueDimension, d => Math.abs( this.layout === 'vertical' ? this[valueAxis](0) - this[valueAxis](d[this.valueAccessor]) : this[valueAxis](d[this.valueAccessor]) - this[valueAxis](0) ) ) .attr(ordinalAxis, d => this[ordinalAxis + '1'](d[this.ordinalAccessor])) .attr(ordinalDimension, this[ordinalAxis + '1'].bandwidth()); if (!this.defaults) { this.enter .classed('entering', true) .attr(ordinalAxis, (d, i, n) => { const p = select(n[i].parentNode); const groupEntering = p.classed('entering'); let xStart = groupEntering ? this[ordinalAxis + '0'](p.datum().key) : this[ordinalAxis + '1'](d[this.ordinalAccessor]); const xCenter = groupEntering ? this[ordinalAxis + '0'].bandwidth() / 2 : this[ordinalAxis + '1'].bandwidth() / 2; const xPercent = (xStart + xCenter) / (this.innerPaddedWidth / 2); xStart = groupEntering ? 0 : xStart; const shift = xStart + xCenter * xPercent; return shift; }) .attr(ordinalDimension, 0); } this.enterBarWrappers.order(); this.enter.order(); } updateGeometries() { this.update.interrupt(); this.update .transition('opacity') .duration((_, i, n) => { if (select(n[i]).classed('entering')) { select(n[i]).classed('entering', false); return this.duration; } return 0; }) .ease(easeCircleIn) .attr('opacity', d => checkInteraction(d, 1, this.hoverOpacity, this.hoverHighlight, this.clickHighlight, this.innerInteractionKeys) ); } exitGeometries() { this.exitBarWrappers.interrupt(); this.exit.interrupt(); const axisOfShift = this.layout === 'vertical' ? 'x' : 'y'; const dimensionOfShift = this.layout === 'vertical' ? 'width' : 'height'; const innerPaddedDimension = 'innerPadded' + dimensionOfShift[0].toUpperCase() + dimensionOfShift.substring(1); this.exit .transition('exit') .duration(this.duration) .ease(easeCircleIn) .attr('opacity', 0) .attr(axisOfShift, (_d, i, n) => { const self = select(n[i]); let shift = +self.attr(axisOfShift) + +self.attr(dimensionOfShift) / 2; shift = +self.attr(axisOfShift) + (+self.attr(dimensionOfShift) / 2) * (shift / (this[innerPaddedDimension] / 2)); return shift; }) .attr(dimensionOfShift, 0); this.exitSize += this.exitBarWrappers.selectAll('.clustered-bar').size(); this.exitBarWrappers .selectAll('.clustered-bar') .transition('exit_wrappers') .duration(this.duration * 0.75) .ease(easeCircleIn) .attr('opacity', 0) .attr(axisOfShift, (_, i, n) => { const p = n[i].parentNode; const xStart = p.transform.baseVal.consolidate().matrix.e; const xCenter = this[axisOfShift + '0'].bandwidth() / 2; const xPercent = (xStart + xCenter) / (this[innerPaddedDimension] / 2); const shift = xCenter * xPercent; return shift; }) .attr(dimensionOfShift, 0); this.update .transition('accessibilityAfterExit') .duration(this.duration) .ease(easeCircleIn) .call(transitionEndAll, () => { // before we exit geometries, we need to check if a focus exists or not const focusDidExist = checkAccessFocus(this.rootG.node()); // then we must remove the exiting elements this.exit.remove(); this.exitBarWrappers.remove(); // then our util can count geometries this.setChartCountAccessibility(); // our group's label should update with new counts too this.setGroupAccessibilityID(); // since items exited, labels must receive updated values this.setGeometryAriaLabels(); // and also make sure the user's focus isn't lost retainAccessFocus({ parentGNode: this.rootG.node(), focusDidExist // recursive: true }); }); } drawGeometries() { const ordinalAxis = this.layout === 'vertical' ? 'x' : 'y'; const ordinalDimension = this.layout === 'vertical' ? 'width' : 'height'; const valueAxis = this.layout === 'vertical' ? 'y' : 'x'; const valueDimension = this.layout === 'vertical' ? 'height' : 'width'; const choice = this.layout === 'vertical' ? 'max' : 'min'; this.updateBarWrappers .transition('update') .duration(this.duration) .ease(easeCircleIn) .attr('transform', d => this.layout === 'vertical' ? 'translate(' + this.x0(d.key) + ',0)' : 'translate(0,' + this.y0(d.key) + ')' ) .call(transitionEndAll, () => { this.updateBarWrappers.classed('entering', false); }); this.update .classed('geometryIsMoving', (d, i, n) => { const geometryIsUpdating = checkAttributeTransitions(select(n[i]), [ { attr: ordinalAxis, numeric: true, newValue: this[ordinalAxis + '1'](d[this.ordinalAccessor]) }, { attr: ordinalDimension, numeric: true, newValue: this[ordinalAxis + '1'].bandwidth() }, { attr: valueAxis, numeric: true, newValue: this[valueAxis](Math[choice](0, d[this.valueAccessor])) // this.y(d[this.valueAccessor])) }, { attr: valueDimension, numeric: true, newValue: Math.abs( this.layout === 'vertical' ? this[valueAxis](0) - this[valueAxis](d[this.valueAccessor]) : this[valueAxis](d[this.valueAccessor]) - this[valueAxis](0) ) } ]); return geometryIsUpdating; }) .attr( `data-${ordinalAxis}`, d => this.layout === 'vertical' ? this[ordinalAxis + '1'](d[this.ordinalAccessor]) //+ this.x0(d[this.groupAccessor]) : this[ordinalAxis + '1'](d[this.ordinalAccessor]) //+ this.y0(d[this.groupAccessor]) ) .attr( `data-translate-x`, d => (this.layout === 'vertical' ? this.x0(d[this.groupAccessor]) : 0) + this.padding.left + this.margin.left ) .attr( `data-translate-y`, d => (this.layout === 'vertical' ? 0 : this.y0(d[this.groupAccessor])) + this.padding.top + this.margin.top ) .attr(`data-${ordinalDimension}`, this[ordinalAxis + '1'].bandwidth()) .attr(`data-${valueAxis}`, d => this[valueAxis](Math[choice](0, d[this.valueAccessor]))) .attr(`data-${valueDimension}`, d => Math.abs( this.layout === 'vertical' ? this[valueAxis](0) - this[valueAxis](d[this.valueAccessor]) : this[valueAxis](d[this.valueAccessor]) - this[valueAxis](0) ) ) .transition('update') .duration((_, i, n) => { return select(n[i]).classed('entering') ? this.duration / 2 : this.duration; }) .delay((_, i, n) => { return select(n[i]).classed('entering') ? this.duration / 2 : 0; }) .ease(easeCircleIn) .attr(valueAxis, d => this[valueAxis](Math[choice](0, d[this.valueAccessor]))) .attr(valueDimension, d => Math.abs( this.layout === 'vertical' ? this[valueAxis](0) - this[valueAxis](d[this.valueAccessor]) : this[valueAxis](d[this.valueAccessor]) - this[valueAxis](0) ) ) .attr(ordinalAxis, d => this[ordinalAxis + '1'](d[this.ordinalAccessor])) .attr(ordinalDimension, this[ordinalAxis + '1'].bandwidth()) .call(transitionEndAll, () => { this.update.classed('geometryIsMoving', false); this.updateInteractionState(); this.checkLabelColorAgainstBackground(); // we must make sure if geometries move, that our focus indicator does too retainAccessFocus({ parentGNode: this.rootG.node() }); }); } updateInteractionState() { removeHoverStrokes(this.svg.node()); // we created an "opacity" transition namespace in update's transition // we override it here to instantly display opacity state (below) this.update.interrupt('opacity'); // we use this.update and this.updateLabels from setGlobalSelection here // the lifecycle state does not matter (enter/update/exit) // since interaction state can happen at any time // first we address interaction state on marks/bars this.update .attr('opacity', d => checkInteraction(d, 1, this.hoverOpacity, this.hoverHighlight, this.clickHighlight, this.innerInteractionKeys) ) .attr('fill', (d, i) => { const clicked = this.clickHighlight && this.clickHighlight.length > 0 && checkClicked(d, this.clickHighlight, this.innerInteractionKeys); const hovered = this.hoverHighlight && checkHovered(d, this.hoverHighlight, this.innerInteractionKeys); const baseColor = this.colorArr[i]; return clicked && this.clickStyle.color ? visaColors[this.clickStyle.color] || this.clickStyle.color : clicked ? baseColor : hovered && this.hoverStyle.color ? visaColors[this.hoverStyle.color] || this.hoverStyle.color : baseColor; }) .attr('filter', (d, i, n) => { if (!this.accessibility.hideStrokes && !select(n[i]).classed('geometryIsMoving')) { const clicked = this.clickHighlight && this.clickHighlight.length > 0 && checkClicked(d, this.clickHighlight, this.innerInteractionKeys); const hovered = this.hoverHighlight && checkHovered(d, this.hoverHighlight, this.innerInteractionKeys); const baseColor = this.preparedColors[i]; const state = clicked ? 'click' : hovered && !select(n[i]).classed('geometryIsMoving') ? 'hover' : 'rest'; const color = clicked && this.clickStyle.color ? visaColors[this.clickStyle.color] || this.clickStyle.color : clicked ? baseColor : hovered && this.hoverStyle.color ? visaColors[this.hoverStyle.color] || this.hoverStyle.color : baseColor; if (state === 'hover') { drawHoverStrokes({ inputElement: n[i], id: this.chartID, key: d[this.ordinalAccessor] + d[this.groupAccessor], strokeWidth: this.hoverStyle.strokeWidth, fill: color }); } return this.strokes[state + color]; } return null; }); retainAccessFocus({ parentGNode: this.rootG.node() }); // then we set the legend interactive state setLegendInteractionState({ root: this.legendG, uniqueID: this.chartID, interactionKeys: this.innerInteractionKeys, groupAccessor: this.ordinalAccessor, hoverHighlight: this.hoverHighlight, clickHighlight: this.clickHighlight, hoverStyle: this.hoverStyle, clickStyle: this.clickStyle, hoverOpacity: this.hoverOpacity }); // and lastly we have to check for labels, especially when auto placement is in place this.updateLabels.interrupt('opacity'); const addCollisionClass = this.dataLabel.placement === 'auto' || this.dataLabel.collisionHideOnly; const hideOnly = this.dataLabel.placement !== 'auto' && this.dataLabel.collisionHideOnly; this.processLabelOpacity(this.updateLabels, addCollisionClass); // if we have collision on, we need to update the bitmap on interaction if (addCollisionClass) { const labelsAdded = this.updateLabels.filter((_, i, n) => select(n[i]).classed('collision-added')); const labelsRemoved = this.updateLabels .filter((_, i, n) => select(n[i]).classed('collision-removed')) .attr('data-use-dx', hideOnly) // need to add this for remove piece of collision below .attr('data-use-dy', hideOnly); // .transition().duration(0); // we can now remove labels as well if we need to... if (labelsRemoved.size() > 0) { this.bitmaps = resolveLabelCollision({ bitmaps: this.bitmaps, labelSelection: labelsRemoved, avoidMarks: [], validPositions: ['middle'], offsets: [1], accessors: ['key'], size: [roundTo(this.width, 0), roundTo(this.height, 0)], hideOnly: false, removeOnly: true }); // remove temporary class now labelsRemoved.classed('collision-removed', false); } // we can now add labels as well if we need to... if (labelsAdded.size() > 0) { this.processLabelPosition(labelsAdded, false, false, true); // remove temporary class now labelsAdded.classed('collision-added', false); } } } setLabelOpacity() { this.processLabelOpacity(this.updateLabels); } processLabelOpacity(selection, addCollisionClass?) { const opacity = this.dataLabel.visible ? 1 : 0; const ordinalAxis = this.layout === 'vertical' ? 'x' : 'y'; const ordinalDimension = this.layout === 'vertical' ? 'width' : 'height'; const valueAxis = this.layout === 'vertical' ? 'y' : 'x'; const valueDimension = this.layout === 'vertical' ? 'height' : 'width'; const fullBandwidth = this[ordinalAxis + '0'].bandwidth() / this.nest[0].values.length; selection.attr('opacity', (d, i, n) => { const prevOpacity = +select(n[i]).attr('opacity'); const styleVisibility = select(n[i]).style('visibility'); const dimensions = {}; dimensions[ordinalDimension] = this.dataLabel.placement === 'left' || this.dataLabel.placement === 'bottom' ? this[ordinalAxis + '1'].bandwidth() : fullBandwidth; if (this.dataLabel.placement === 'left' || this.dataLabel.placement === 'bottom') { dimensions[valueDimension] = Math.abs( this.layout === 'vertical' ? this[valueAxis](0) - this[valueAxis](d[this.valueAccessor]) : this[valueAxis](d[this.valueAccessor]) - this[valueAxis](0) ); } const hasRoom = this.dataLabel.placement === 'auto' || this.dataLabel.collisionHideOnly || this.accessibility.showSmallLabels || verifyTextHasSpace({ text: formatDataLabel(d, this.innerLabelAccessor, this.dataLabel.format), dimensions, fontSize: 14 }); const targetOpacity = hasRoom ? checkInteraction( d, opacity, this.hoverOpacity, this.hoverHighlight, this.clickHighlight, this.innerInteractionKeys ) < 1 ? 0 : 1 : 0; if ( ((targetOpacity === 1 && styleVisibility === 'hidden') || prevOpacity !== targetOpacity) && addCollisionClass ) { if (targetOpacity === 1) { select(n[i]) .classed('collision-added', true) .style('visibility', null); } else { select(n[i]).classed('collision-removed', true); } } return targetOpacity; }); } checkLabelColorAgainstBackground() { this.updateLabels.attr('fill', (d, i, n) => { return this.textTreatmentHandler(d, i, n); }); } setRoundedCorners() { this.update .transition('corners') .duration(this.duration) .ease(easeCircleIn) .attr('rx', this.roundedCorner) .attr('ry', this.roundedCorner); } textTreatmentHandler = (d, i, n) => { const me = select(n[i]); const bgColor = this.clickHighlight && this.clickHighlight.length > 0 && checkClicked(d, this.clickHighlight, this.innerInteractionKeys) && this.clickStyle.color ? visaColors[this.clickStyle.color] || this.clickStyle.color : this.hoverHighlight && checkHovered(d, this.hoverHighlight, this.innerInteractionKeys) && this.hoverStyle.color ? visaColors[this.hoverStyle.color] || this.hoverStyle.color : this.preparedColors[i]; const autoPlacementBackgroundColor = this.dataLabel.placement === 'auto' // can ignore this for collisionHideOnly ? this.layout === 'vertical' ? (this.dataLabel.collisionPlacement === 'top' && me.attr('data-baseline') !== 'bottom') || (this.dataLabel.collisionPlacement === 'middle' && me.attr('data-baseline') !== 'bottom') || (this.dataLabel.collisionPlacement === 'bottom' && me.attr('data-baseline') === 'bottom' && ((d[this.valueAccessor] >= 0 && this.y(0) - this.y(d[this.valueAccessor]) > 20) || (d[this.valueAccessor] < 0 && this.y(0) - this.y(d[this.valueAccessor]) < -20))) // if bottom we can check against baseline value of 0, helps to handle charts with negative values : (this.dataLabel.collisionPlacement === 'right' && me.attr('data-align') !== 'left') || (this.dataLabel.collisionPlacement === 'middle' && me.attr('data-align') !== 'left') || (this.dataLabel.collisionPlacement === 'left' && me.attr('data-align') !== 'right') : false; const color = autoPlacementBackgroundColor || this.dataLabel.placement === 'bottom' || this.dataLabel.placement === 'left' ? autoTextColor(bgColor) : visaColors.dark_text; me.attr( 'filter', !me.classed('textIsMoving') ? createTextStrokeFilter({ root: this.svg.node(), id: this.chartID, color: autoPlacementBackgroundColor || this.dataLabel.placement === 'bottom' || this.dataLabel.placement === 'left' ? bgColor : '#ffffff' }) : null ); return color; }; setSelectedClass() { this.update.classed('highlight', (d, i, n) => { let selected = checkInteraction(d, true, false, '', this.clickHighlight, this.innerInteractionKeys); selected = this.clickHighlight && this.clickHighlight.length ? selected : false; const selectable = this.accessibility.elementsAreInterface; setElementInteractionAccessState(n[i], selected, selectable); return selected; }); } updateCursor() { this.update.attr('cursor', !this.suppressEvents ? this.cursor : null); this.updateLabels.attr('cursor', !this.suppressEvents ? this.cursor : null); } enterDataLabels() { const ordinalAxis = this.layout === 'vertical' ? 'x' : 'y'; const opacity = this.dataLabel.visible ? 1 : 0; this.enterLabelWrappers .attr('class', 'clustered-bar-label-wrapper') .classed('entering', true) .attr('transform', d => this.layout === 'vertical' ? 'translate(' + this.x0(d.key) + ',0)' : 'translate(0,' + this.y0(d.key) + ')' ); this.enterLabels .attr('class', 'clustered-bar-dataLabel') .classed('entering', true) .classed('clustered-bar-dataLabel-horizontal', false) .classed('clustered-bar-dataLabel-vertical', false) .classed('clustered-bar-dataLabel-' + this.layout, true) .attr('cursor', !this.suppressEvents ? this.cursor : null) .attr('opacity', d => checkInteraction( d, opacity, this.hoverOpacity, this.hoverHighlight, this.clickHighlight, this.innerInteractionKeys ) < 1 ? 0 : Number.EPSILON ) .attr('fill', this.textTreatmentHandler) .on('click', !this.suppressEvents ? d => this.onClickHandler(d) : null) .on('mouseover', !this.suppressEvents ? d => this.onHoverHandler(d) : null) .on('mouseout', !this.suppressEvents ? () => this.onMouseOutHandler() : null); this.enterLabels.attr(ordinalAxis, (d, i, n) => { const zeroScale = ordinalAxis + '0'; const oneScale = ordinalAxis + '1'; const breadth = ordinalAxis === 'x' ? 'innerPaddedWidth' : 'innerPaddedHeight'; const p = select(n[i].parentNode); const groupEntering = p.classed('entering'); let xStart = groupEntering ? this[zeroScale](p.datum().key) : this[oneScale](d[this.ordinalAccessor]); const xCenter = groupEntering ? this[zeroScale].bandwidth() / 2 : this[oneScale].bandwidth() / 2; const xPercent = (xStart + xCenter) / (this[breadth] / 2); xStart = groupEntering ? 0 : xStart; const shift = xStart + xCenter * xPercent; return shift; }); } updateDataLabels() { this.updateLabels.interrupt(); const opacity = this.dataLabel.visible ? 1 : 0; this.updateLabels .transition('opacity') .duration((_, i, n) => { if (select(n[i]).classed('entering')) { // select(n[i]).classed('entering', false); return this.duration / 4; } return 0; }) .delay((_, i, n) => { if (select(n[i]).classed('entering')) { // select(n[i]).classed('entering', false); return (this.duration / 4) * 3; } return 0; }) .ease(easeCircleIn) .attr('opacity', d => checkInteraction( d, opacity, this.hoverOpacity, this.hoverHighlight, this.clickHighlight, this.innerInteractionKeys ) < 1 ? 0 : 1 ) .call(transitionEndAll, () => { this.updateLabels.classed('entering', false); // this.checkLabelColorAgainstBackground(); }); } exitDataLabels() { this.exitLabelWrappers .selectAll('text') .transition('exit') .ease(easeCircleIn) .duration(this.duration / 3) .attr('opacity', 0) .call(transitionEndAll, () => { this.exitLabelWrappers.remove(); }); this.exitLabels .transition('exit') .ease(easeCircleIn) .duration(this.duration / 3) .attr('opacity', 0) .remove(); } setLabelContent() { this.updateLabels.text(d => formatDataLabel(d, this.innerLabelAccessor, this.dataLabel.format)); } setLabelPosition() { // position the label wrappers with/without transition this.updateLabelWrappers .classed('clustered-bar-dataLabel-horizontal', false) .classed('clustered-bar-dataLabel-vertical', false) .classed('clustered-bar-dataLabel-' + this.layout, true); const changeLabelWrappers = prepareRenderChange({ selection: this.updateLabelWrappers, duration: this.duration, namespace: 'position-label-wrappers', easing: easeCircleIn }); changeLabelWrappers .attr('transform', d => this.layout === 'vertical' ? 'translate(' + this.x0(d.key) + ',0)' : 'translate(0,' + this.y0(d.key) + ')' ) .call(transitionEndAll, () => { this.updateLabelWrappers.classed('entering', false); }); // we have to run this filter as there are times when enter/update will one another's nodes in them. const enteringOnly = this.enterLabels.filter((_, i, n) => { return select(n[i]).classed('entering'); }); const updatingOnly = this.updateLabels.filter((_, i, n) => { return !select(n[i]).classed('entering'); }); // if we have enter and update we need to process them separately // enter with no transition, update with transition // the transition is the main reason we need to do this. if (enteringOnly.size() > 0) { this.processLabelPosition(enteringOnly, false, true, false); this.processLabelPosition(updatingOnly, true, false, true); } else { // otherwise we can just process update and do it all in one step with transition // doing this all at once saves processing time from calling resolveLabelCollision less this.processLabelPosition(updatingOnly, true, true, false); } } processLabelPosition(selection, runTransition?, redrawBitmap?, suppressMarkDraw?) { const ordinalAxis = this.layout === 'vertical' ? 'x' : 'y'; const ordinalDimension = this.layout === 'vertical' ? 'width' : 'height'; const valueAxis = this.layout === 'vertical' ? 'y' : 'x'; const valueDimension = this.layout === 'vertical' ? 'height' : 'width'; const choice = this.layout === 'vertical' ? 'max' : 'min'; const hideOnly = this.dataLabel.placement !== 'auto' && this.dataLabel.collisionHideOnly; let textHeight = 15; // default label is usually 15 const collisionSettings = { vertical: { top: { validPositions: ['top', 'bottom'], offsets: [4, 1] }, middle: { validPositions: ['middle', 'top'], offsets: [1, textHeight / 2] }, bottom: { validPositions: ['middle', 'top'], offsets: [1, textHeight / 2] } }, horizontal: { right: { validPositions: ['right', 'left'], offsets: [4, 8] }, middle: { validPositions: ['middle', 'right'], offsets: [1, 15] }, left: { validPositions: ['left', 'right'], offsets: [1, 20] } } }; const collisionPlacement = this.dataLabel && this.dataLabel.collisionPlacement; const boundsScope = collisionPlacement && collisionSettings[this.layout][collisionPlacement] // check whether placement provided maps correctly ? this.dataLabel.collisionPlacement : this.layout === 'vertical' ? 'top' // if we don't have collisionPlacement : 'right'; // prep the data- attributes for label collision algorithm // only needs to be run if we are running collision though selection .style('visibility', (_, i, n) => this.dataLabel.placement === 'auto' || this.dataLabel.collisionHideOnly ? select(n[i]).style('visibility') : null ) .attr(`data-${ordinalAxis}`, (d, i, n) => { if (i === 0) { // we just need to check this on one element const textElement = n[i]; const style = getComputedStyle(textElement); const fontSize = parseFloat(style.fontSize); textHeight = Math.max(fontSize - 1, 1); // clone.getBBox().height; } return this[ordinalAxis + '1'](d[this.ordinalAccessor]); }) .attr( `data-translate-x`, d => (this.layout === 'vertical' ? this.x0(d[this.groupAccessor]) : 0) + this.padding.left + this.margin.left ) .attr( `data-translate-y`, d => (this.layout === 'vertical' ? 0 : this.y0(d[this.groupAccessor])) + this.padding.top + this.margin.top ) .attr(`data-${ordinalDimension}`, this[ordinalAxis + '1'].bandwidth()) .attr(`data-${valueAxis}`, d => this[valueAxis](Math[choice](0, d[this.valueAccessor]))) .attr(`data-${valueDimension}`, d => Math.abs( this.layout === 'vertical' ? this[valueAxis](0) - this[valueAxis](d[this.valueAccessor]) : this[valueAxis](d[this.valueAccessor]) - this[valueAxis](0) ) ); // we use prepareRenderChange to use or not use .transition() // immediate - label enter and interaction effects // transition - update effects (e.g., data change) const changeLabels = prepareRenderChange({ selection: selection, duration: !runTransition ? 0 : this.duration, namespace: 'position-labels', easing: easeCircleIn }); this.bitmaps = placeDataLabels({ root: changeLabels, xScale: this.layout === 'vertical' ? this.x1 : this.x, yScale: this.layout === 'vertical' ? this.y : this.y1, ordinalAccessor: this.ordinalAccessor, valueAccessor: this.valueAccessor, placement: this.dataLabel.placement, layout: this.layout, chartType: 'bar', avoidCollision: { runOccupancyBitmap: this.dataLabel.visible && this.dataLabel.placement === 'auto', bitmaps: !redrawBitmap ? this.bitmaps : undefined, labelSelection: changeLabels, avoidMarks: [this.update], validPositions: hideOnly ? ['middle'] : collisionSettings[this.layout][boundsScope].validPositions, offsets: hideOnly ? [1] : collisionSettings[this.layout][boundsScope].offsets, accessors: [this.groupAccessor, this.ordinalAccessor, 'key'], // key is created for lines by nesting done in line, size: [roundTo(this.width, 0), roundTo(this.height, 0)], // for some reason the bitmap needs width instead of inner padded width here boundsScope: hideOnly ? undefined : boundsScope, hideOnly: this.dataLabel.visible && this.dataLabel.collisionHideOnly, suppressMarkDraw: suppressMarkDraw } }); } drawReferenceLines() { const currentReferences = this.references.selectAll('g').data(this.referenceLines, d => d.label); const enterReferences = currentReferences .enter() .append('g') .attr('class', 'clustered-bar-reference') .attr('opacity', 1); const enterLines = enterReferences.append('line'); enterLines // .attr('id', (_, i) => 'reference-line-' + i) .attr('class', 'clustered-bar-reference-line') .attr('opacity', 0); const enterLabels = enterReferences.append('text'); enterLabels // .attr('id', (_, i) => 'reference-line-' + i + '-label') .attr('class', 'clustered-bar-reference-line-label') .attr('opacity', 0); const mergeReferences = currentReferences.merge(enterReferences); const mergeLines = mergeReferences .selectAll('.clustered-bar-reference-line') .data(d => [d]) .transition('merge') .ease(easeCircleIn) .duration(this.duration); const mergeLabels = mergeReferences .selectAll('.clustered-bar-reference-line-label') .data(d => [d]) .transition('merge') .ease(easeCircleIn) .duration(this.duration) .text(d => d.label); const exitReferences = currentReferences.exit(); exitReferences .transition('exit') .ease(easeCircleIn) .duration(this.duration) .attr('opacity', 0) .remove(); if (this.layout === 'vertical') { enterReferences.attr('transform', d => { return 'translate(0,' + this.y(d.value) + ')'; }); mergeReferences .transition('merge') .ease(easeCircleIn) .duration(this.duration) .attr('transform', d => { return 'translate(0,' + this.y(d.value) + ')'; }); enterLines .attr('x1', 0) .attr('y1', 0) .attr('y2', 0) .attr('x2', this.innerPaddedWidth); enterLabels .attr('text-anchor', d => ((d.labelPlacementHorizontal || 'right') === 'right' ? 'start' : 'end')) .attr('x', d => ((d.labelPlacementHorizontal || 'right') === 'right' ? this.innerPaddedWidth : 0)) .attr('y', 0) .attr('dx', d => ((d.labelPlacementHorizontal || 'right') === 'right' ? '0.1em' : '-0.1em')) .attr('dy', '0.3em'); mergeLines .attr('x1', 0) .attr('y1', 0) .attr('y2', 0) .attr('x2', this.innerPaddedWidth); mergeLabels .attr('text-anchor', d => ((d.labelPlacementHorizontal || 'right') === 'right' ? 'start' : 'end')) .attr('x', d => ((d.labelPlacementHorizontal || 'right') === 'right' ? this.innerPaddedWidth : 0)) .attr('y', 0) .attr('dx', d => ((d.labelPlacementHorizontal || 'right') === 'right' ? '0.1em' : '-0.1em')) .attr('dy', '0.3em'); } else if (this.layout === 'horizontal') { enterReferences.attr('transform', d => { return 'translate(' + this.x(d.value) + ',0)'; }); mergeReferences .transition('merge') .ease(easeCircleIn) .duration(this.duration) .attr('transform', d => { return 'translate(' + this.x(d.value) + ',0)'; }); enterLines .attr('x1', 0) .attr('y1', this.innerPaddedHeight) .attr('x2', 0) .attr('y2', 0); mergeLines .attr('x1', 0) .attr('y1', this.innerPaddedHeight) .attr('x2', 0) .attr('y2', 0); enterLabels .attr('text-anchor', 'middle') .attr('x', 0) .attr('y', d => ((d.labelPlacementVertical || 'top') === 'top' ? 0 : this.innerPaddedHeight)) .attr('dx', 0) .attr('dy', d => ((d.labelPlacementVertical || 'top') === 'top' ? '-0.3em' : '1em')); mergeLabels .attr('text-anchor', 'middle') .attr('x', 0) .attr('y', d => { return (d.labelPlacementVertical || 'top') === 'top' ? 0 : this.innerPaddedHeight; }) .attr('dx', 0) .attr('dy', d => ((d.labelPlacementVertical || 'top') === 'top' ? '-0.3em' : '1em')); } mergeLines .style('stroke', visaColors[this.referenceStyle.color] || this.referenceStyle.color) .style('stroke-width', this.referenceStyle.strokeWidth) .attr('stroke-dasharray', this.referenceStyle.dashed ? this.referenceStyle.dashed : '') .attr('opacity', this.referenceStyle.opacity); mergeLabels.style('fill', visaColors[this.referenceStyle.color] || this.referenceStyle.color).attr('opacity', 1); } prepareLegendData() { this.legendData = this.nest[0].values; } drawLegendElements() { drawLegend({ root: this.legendG, uniqueID: this.chartID, width: this.innerPaddedWidth, height: this.margin.top + 20, colorArr: this.colorArr, baseColorArr: this.preparedColors, hideStrokes: this.accessibility.hideStrokes, margin: this.margin, padding: this.padding, duration: this.duration, type: 'bar', fontSize: 16, data: this.legendData, labelKey: this.ordinalAccessor, label: this.legend.labels, hide: !this.legend.visible, interactionKeys: this.innerInteractionKeys, groupAccessor: this.ordinalAccessor, hoverHighlight: this.hoverHighlight, clickHighlight: this.clickHighlight, hoverStyle: this.hoverStyle, clickStyle: this.clickStyle, hoverOpacity: this.hoverOpacity }); } bindLegendInteractivity() { select(this.clusteredBarChartEl) .selectAll('.legend') .style('cursor', this.legend.interactive && !this.suppressEvents ? this.cursor : '') .on('click', this.legend.interactive && !this.suppressEvents ? d => this.onClickHandler(d) : null) .on('mouseover', this.legend.interactive && !this.suppressEvents ? d => this.hoverFunc.emit(d) : null) .on('mouseout', this.legend.interactive && !this.suppressEvents ? () => this.onMouseOutHandler() : null); } setLegendCursor() { select(this.clusteredBarChartEl) .selectAll('.legend') .style('cursor', this.legend.interactive && !this.suppressEvents ? this.cursor : null); } bindInteractivity() { this.update .on('click', !this.suppressEvents ? d => this.onClickHandler(d) : null) .on('mouseover', !this.suppressEvents ? d => this.onHoverHandler(d) : null) .on('mouseout', !this.suppressEvents ? () => this.onMouseOutHandler() : null); this.updateLabels .on('click', !this.suppressEvents ? d => this.onClickHandler(d) : null) .on('mouseover', !this.suppressEvents ? d => this.onHoverHandler(d) : null) .on('mouseout', !this.suppressEvents ? () => this.onMouseOutHandler() : null); } drawAnnotations() { annotate({ source: this.rootG.node(), data: this.annotations, xScale: this.layout !== 'horizontal' ? this.x0 : this.x, xAccessor: this.layout !== 'horizontal' ? this.groupAccessor : this.valueAccessor, yScale: this.layout !== 'horizontal' ? this.y : this.y0, yAccessor: this.layout !== 'horizontal' ? this.valueAccessor : this.groupAccessor, width: this.width, height: this.height, padding: this.padding, margin: this.margin, bitmaps: this.bitmaps }); } setAnnotationAccessibility() { setAccessAnnotation(this.clusteredBarChartEl, this.annotations); } // new accessibility functions added here setTagLevels() { this.topLevel = findTagLevel(this.highestHeadingLevel); this.bottomLevel = findTagLevel(this.highestHeadingLevel, 3); } setChartDescriptionWrapper() { initializeDescriptionRoot({ rootEle: this.clusteredBarChartEl, title: this.accessibility.title || this.mainTitle, chartTag: 'clustered-bar-chart', uniqueID: this.chartID, highestHeadingLevel: this.highestHeadingLevel, redraw: this.shouldRedrawWrapper, disableKeyNav: this.suppressEvents && this.accessibility.elementsAreInterface === false && this.accessibility.keyboardNavConfig && this.accessibility.keyboardNavConfig.disabled }); this.shouldRedrawWrapper = false; } setParentSVGAccessibility() { setAccessibilityController({ node: this.svg.node(), chartTag: 'clustered-bar-chart', title: this.accessibility.title || this.mainTitle, description: this.subTitle, uniqueID: this.chartID, geomType: 'bar', includeKeyNames: this.accessibility.includeDataKeyNames, dataKeys: scopeDataKeys(this, chartAccessors, 'clustered-bar-chart'), groupAccessor: this.groupAccessor, groupName: 'cluster', disableKeyNav: this.suppressEvents && this.accessibility.elementsAreInterface === false && this.accessibility.keyboardNavConfig && this.accessibility.keyboardNavConfig.disabled // groupKeys: [], // nested: '', // recursive: true }); } setGeometryAccessibilityAttributes() { this.update.each((_d, i, n) => { initializeElementAccess(n[i]); }); } setGeometryAriaLabels() { const keys = scopeDataKeys(this, chartAccessors, 'clustered-bar-chart'); this.update.each((_d, i, n) => { setElementFocusHandler({ node: n[i], geomType: 'bar', includeKeyNames: this.accessibility.includeDataKeyNames, dataKeys: keys, groupName: 'cluster', uniqueID: this.chartID, disableKeyNav: this.suppressEvents && this.accessibility.elementsAreInterface === false && this.accessibility.keyboardNavConfig && this.accessibility.keyboardNavConfig.disabled }); setElementAccessID({ node: n[i], uniqueID: this.chartID }); }); } setGroupAccessibilityID() { this.updateBarWrappers.each((_, i, n) => { setElementAccessID({ node: n[i], uniqueID: this.chartID }); }); } setChartAccessibilityTitle() { setAccessTitle(this.clusteredBarChartEl, this.accessibility.title || this.mainTitle); } setChartAccessibilitySubtitle() { setAccessSubtitle(this.clusteredBarChartEl, this.subTitle); } setChartAccessibilityLongDescription() { setAccessLongDescription(this.clusteredBarChartEl, this.accessibility.longDescription); } setChartAccessibilityExecutiveSummary() { setAccessExecutiveSummary(this.clusteredBarChartEl, this.accessibility.executiveSummary); } setChartAccessibilityPurpose() { setAccessPurpose(this.clusteredBarChartEl, this.accessibility.purpose); } setChartAccessibilityContext() { setAccessContext(this.clusteredBarChartEl, this.accessibility.contextExplanation); } setChartAccessibilityStatisticalNotes() { setAccessStatistics(this.clusteredBarChartEl, this.accessibility.statisticalNotes); } setChartCountAccessibility() { setAccessChartCounts({ rootEle: this.clusteredBarChartEl, parentGNode: this.barG.node(), // pass the wrapper to <g> or geometries here, should be single node selection chartTag: 'clustered-bar-chart', geomType: 'bar', groupName: 'cluster' // recursive: true }); } setChartAccessibilityStructureNotes() { setAccessStructure(this.clusteredBarChartEl, this.accessibility.structureNotes); } // new accessibility stuff ends here onChangeHandler() { if (this.accessibility && typeof this.accessibility.onChangeFunc === 'function') { const d = { updated: this.updated, added: this.enterSize, removed: this.exitSize }; this.accessibility.onChangeFunc(d); } this.updated = false; this.enterSize = 0; this.exitSize = 0; } onClickHandler(d) { this.clickFunc.emit(d); } onHoverHandler(d) { overrideTitleTooltip(this.chartID, true); this.hoverFunc.emit(d); if (this.showTooltip && d[this.ordinalAccessor]) { this.eventsTooltip({ data: d, evt: event, isToShow: true }); } } onMouseOutHandler() { overrideTitleTooltip(this.chartID, false); this.mouseOutFunc.emit(); if (this.showTooltip) { this.eventsTooltip({ isToShow: false }); } } // set initial style (instead of copying css class across the lib) setTooltipInitialStyle() { initTooltipStyle(this.tooltipG); } // tooltip eventsTooltip({ data, evt, isToShow }: { data?: any; evt?: any; isToShow: boolean }) { drawTooltip({ root: this.tooltipG, data, event: evt, isToShow, tooltipLabel: this.tooltipLabel, xAxis: this.xAxis, yAxis: this.yAxis, dataLabel: this.dataLabel, layout: this.layout, ordinalAccessor: this.groupAccessor, // on purpose - to match tooltip util format groupAccessor: this.ordinalAccessor, // on purpose - to match tooltip util format valueAccessor: this.valueAccessor, chartType: 'clustered' }); } render() { // hardcoded theme to light until we add this functionality const theme = 'light'; // everything between this comment and the third should eventually // be moved into componentWillUpdate (if the stenicl bug is fixed) this.init(); if (this.shouldSetTagLevels) { this.setTagLevels(); this.shouldSetTagLevels = false; } if (this.shouldCheckValueAxis) { if (this.layout === 'horizontal') { this.shouldUpdateXAxis = true; this.shouldUpdateXGrid = true; } else if (this.layout === 'vertical') { this.shouldUpdateYAxis = true; this.shouldUpdateYGrid = true; } this.shouldCheckValueAxis = false; } if (this.shouldCheckLabelAxis) { if (this.layout === 'vertical') { this.shouldUpdateXAxis = true; this.shouldUpdateXGrid = true; this.shouldSetXAxisAccessibility = true; } else if (this.layout === 'horizontal') { this.shouldUpdateYAxis = true; this.shouldUpdateYGrid = true; this.shouldSetYAxisAccessibility = true; } this.shouldCheckLabelAxis = false; } if (this.shouldUpdateData) { this.prepareData(); this.shouldUpdateData = false; } if (this.shouldUpdateLegendData) { this.prepareLegendData(); this.shouldUpdateLegendData = false; } if (this.shouldSetDimensions) { this.setDimensions(); this.shouldSetDimensions = false; } if (this.shouldUpdateScales) { this.prepareScales(); this.shouldUpdateScales = false; } if (this.shouldValidateInteractionKeys) { this.validateInteractionKeys(); this.shouldValidateInteractionKeys = false; } if (this.shouldValidateDataLabelAccessor) { this.validateDataLabelAccessor(); this.shouldValidateDataLabelAccessor = false; } if (this.shouldValidateAxes) { this.validateAxes(); this.shouldValidateAxes = false; } if (this.shouldUpdateTableData) { this.setTableData(); this.shouldUpdateTableData = false; } if (this.shouldValidate) { this.shouldValidateAccessibilityProps(); this.shouldValidate = false; } if (this.shouldSetColors) { this.setColors(); this.shouldSetColors = false; } if (this.shouldValidateLabelPlacement) { this.validateLabelPlacement(); this.shouldValidateLabelPlacement = false; } // Everything between this comment and the first should eventually // be moved into componentWillUpdate (if the stenicl bug is fixed) return ( <div class={`o-layout is--${this.layout} ${theme}`}> <div class="o-layout--chart"> <this.topLevel class="clustered-bar-main-title vcl-main-title">{this.mainTitle}</this.topLevel> <this.bottomLevel class="visa-ui-text--instructions clustered-bar-sub-title vcl-sub-title"> {this.subTitle} </this.bottomLevel> <div class="clustered-bar-legend vcl-legend" style={{ display: this.legend.visible ? 'block' : 'none' }} /> <keyboard-instructions uniqueID={this.chartID} geomType={'bar'} groupName={'cluster'} // taken from initializeDescriptionRoot, on bar this should be "bar group", stacked bar is "stack", and clustered is "cluster" chartTag={'clustered-bar-chart'} width={this.width - (this.margin ? this.margin.right || 0 : 0)} isInteractive={this.accessibility.elementsAreInterface} hasCousinNavigation // on bar this requires checking for groupAccessor disabled={ this.suppressEvents && this.accessibility.elementsAreInterface === false && this.accessibility.keyboardNavConfig && this.accessibility.keyboardNavConfig.disabled } // the chart is "simple" /> <div class="visa-viz-d3-clustered-bar-container" /> <div class="clustered-bar-tooltip vcl-tooltip" style={{ display: this.showTooltip ? 'block' : 'none' }} /> <data-table uniqueID={this.chartID} isCompact tableColumns={this.tableColumns} data={this.tableData} padding={this.padding} margin={this.margin} hideDataTable={this.accessibility.hideDataTableButton} unitTest={this.unitTest} /> </div> {/* <canvas id="bitmap-render" /> */} </div> ); } private init() { // reading properties const keys = Object.keys(ClusteredBarChartDefaultValues); let i = 0; // accept 0 or false as default value const exceptions = { mainTitle: { exception: '' }, subTitle: { exception: '' }, barIntervalRatio: { exception: 0 }, groupIntervalRatio: { exception: 0 }, showTooltip: { exception: false }, wrapLabel: { exception: false }, hoverOpacity: { exception: 0 } }; for (i = 0; i < keys.length; i++) { const exception = !exceptions[keys[i]] ? false : this[keys[i]] === exceptions[keys[i]].exception; this[keys[i]] = this[keys[i]] || exception ? this[keys[i]] : ClusteredBarChartDefaultValues[keys[i]]; } } } // incorporate OSS licenses into build window['VisaChartsLibOSSLicenses'] = getLicenses(); // tslint:disable-line no-string-literal
the_stack
import { BinaryReader, common, crypto, PrivateKey, UInt256 } from '@neo-one/client-common'; import { ChangeViewConsensusMessage, ChangeViewPayloadCompact, ChangeViewReason, CommitConsensusMessage, CommitPayloadCompact, ConsensusContext, ConsensusMessage, ExtensiblePayload, Node, PreparationPayloadCompact, PrepareRequestConsensusMessage, PrepareResponseConsensusMessage, RecoveryConsensusMessage, RecoveryRequestConsensusMessage, UnsignedExtensiblePayload, } from '@neo-one/node-core'; import { utils } from '@neo-one/utils'; import { BN } from 'bn.js'; import _ from 'lodash'; import { ensureHeader, ensureMaxBlockLimitation } from './context'; const getPreparationHash = (context: ConsensusContext, preparationPayloads: readonly ExtensiblePayload[]): UInt256 => { // tslint:disable-next-line: no-array-mutation const result = Object.entries( _.groupBy(preparationPayloads, (p) => common.uInt256ToHex(context.getMessage<PrepareResponseConsensusMessage>(p).preparationHash), ), ).sort(([, { length: aLength }], [, { length: bLength }]) => utils.numCompDescending(aLength, bLength))[0]; // tslint:disable-next-line: strict-type-predicates if (result === undefined) { return common.ZERO_UINT256; } return common.hexToUInt256(result[0]); }; export const makeSignedPayload = async ({ node, privateKey, consensusMessage, }: { readonly node: Node; readonly privateKey: PrivateKey; readonly context: ConsensusContext; readonly consensusMessage: ConsensusMessage; }) => ExtensiblePayload.sign( new UnsignedExtensiblePayload({ category: 'dBFT', validBlockStart: 0, validBlockEnd: consensusMessage.validatorIndex, data: consensusMessage.serializeWire(), sender: crypto.privateKeyToScriptHash(privateKey), // TODO: not sure this is correct network: node.blockchain.deserializeWireContext.network, }), privateKey, node.blockchain.deserializeWireContext.network, ); export const makeCommit = async ({ context: contextIn, node, privateKey, }: { readonly context: ConsensusContext; readonly node: Node; readonly privateKey: PrivateKey; }) => { let context = contextIn; const maybePayload = context.commitPayloads[context.myIndex]; if (maybePayload !== undefined) { return { context, payload: maybePayload }; } const { context: newContext, block } = ensureHeader(context); context = newContext; if (block === undefined) { throw new Error(); } const signature = crypto.sign({ message: context.blockBuilder.getBlock().message, privateKey }); const signedPayload = await makeSignedPayload({ node, privateKey, context, consensusMessage: new CommitConsensusMessage({ validatorIndex: context.myIndex, blockIndex: utils.nullthrows(block.index), viewNumber: context.viewNumber, signature, }), }); const mutablePayloads = [...context.commitPayloads]; mutablePayloads[context.myIndex] = signedPayload; context = context.clone({ commitPayloads: mutablePayloads }); return { context, payload: signedPayload }; }; export const makePrepareResponse = async ({ context: contextIn, node, privateKey, }: { readonly context: ConsensusContext; readonly node: Node; readonly privateKey: PrivateKey; }) => { let context = contextIn; const payload = context.preparationPayloads[utils.nullthrows(context.blockBuilder.primaryIndex)]; if (payload === undefined) { throw new Error('makePrepareResponse expected payload to be defined'); } const preparationHash = payload.hash; const signedPayload = await makeSignedPayload({ node, privateKey, context, consensusMessage: new PrepareResponseConsensusMessage({ validatorIndex: context.myIndex, blockIndex: utils.nullthrows(context.blockBuilder.index), viewNumber: context.viewNumber, preparationHash, }), }); const mutablePreparationPayloads = [...context.preparationPayloads]; mutablePreparationPayloads[context.myIndex] = signedPayload; context = context.clone({ preparationPayloads: mutablePreparationPayloads }); return { context, payload: signedPayload }; }; export const makeRecovery = async ({ context: contextIn, node, privateKey, }: { readonly context: ConsensusContext; readonly node: Node; readonly privateKey: PrivateKey; }) => { const context = contextIn; const prepareRequestMessage = context.transactionHashes !== undefined ? new PrepareRequestConsensusMessage({ version: utils.nullthrows(context.blockBuilder.version), prevHash: utils.nullthrows(context.blockBuilder.previousHash), validatorIndex: context.myIndex, // TODO: not sure this is correct blockIndex: utils.nullthrows(context.blockBuilder.index), viewNumber: context.viewNumber, timestamp: utils.nullthrows(context.blockBuilder.timestamp), nonce: utils.nullthrows(context.blockBuilder.nonce), transactionHashes: context.transactionHashes, }) : undefined; const changeViewMessages = _.fromPairs( _.take( context.lastChangeViewPayloads .filter(utils.notNull) .map((p) => getChangeViewPayloadCompact(context, p)) .map((p) => [p.validatorIndex, p]), context.M, ), ); const filteredPreparationPayloads = context.preparationPayloads.filter(utils.notNull); const preparationHash = context.transactionHashes === undefined ? getPreparationHash(context, filteredPreparationPayloads) : undefined; const preparationMessages = _.fromPairs( filteredPreparationPayloads.map((p) => getPreparationPayloadCompact(context, p)).map((p) => [p.validatorIndex, p]), ); const commitMessages = context.commitSent ? _.fromPairs( context.commitPayloads .filter(utils.notNull) .map((p) => getCommitPayloadCompact(context, p)) .map((p) => [p.validatorIndex, p]), ) : {}; return makeSignedPayload({ node, privateKey, context, consensusMessage: new RecoveryConsensusMessage({ validatorIndex: context.myIndex, blockIndex: utils.nullthrows(context.blockBuilder.index), viewNumber: context.viewNumber, changeViewMessages, prepareRequestMessage, preparationHash, preparationMessages, commitMessages, }), }); }; export const makePrepareRequest = async ({ node, privateKey, context: contextIn, }: { readonly node: Node; readonly privateKey: PrivateKey; readonly context: ConsensusContext; }) => { let context = contextIn; const { context: maxBlockContext } = await ensureMaxBlockLimitation(node, context, Object.values(node.memPool)); context = maxBlockContext; const previousHeader = await node.blockchain.getHeader(utils.nullthrows(context.blockBuilder.previousHash)); if (previousHeader === undefined) { throw new Error('makePrepareRequested expected previousHeader to be defined'); } const timestamp = BN.max(new BN(Date.now()), previousHeader.timestamp.addn(1)); context = context.clone({ blockOptions: { timestamp, nonce: getNonce() } }); const preparationPayload = await makeSignedPayload({ node, privateKey, context, consensusMessage: new PrepareRequestConsensusMessage({ version: utils.nullthrows(context.blockBuilder.version), prevHash: utils.nullthrows(context.blockBuilder.previousHash), validatorIndex: context.myIndex, blockIndex: utils.nullthrows(context.blockBuilder.index), viewNumber: context.viewNumber, timestamp, nonce: utils.nullthrows(context.blockBuilder.nonce), transactionHashes: context.transactionHashes ?? [], }), }); const mutablePreparationPayloads = [...context.preparationPayloads]; mutablePreparationPayloads[context.myIndex] = preparationPayload; context = context.clone({ preparationPayloads: mutablePreparationPayloads }); return { context, payload: preparationPayload }; }; export const makeRecoveryRequest = async ({ node, context, privateKey, }: { readonly node: Node; readonly context: ConsensusContext; readonly privateKey: PrivateKey; }): Promise<ExtensiblePayload> => { const consensusMessage = new RecoveryRequestConsensusMessage({ validatorIndex: context.myIndex, blockIndex: utils.nullthrows(context.blockBuilder.index), viewNumber: context.viewNumber, timestamp: new BN(Date.now()), }); return makeSignedPayload({ node, context, privateKey, consensusMessage, }); }; export const makeChangeView = async ({ node, privateKey, context: contextIn, reason, }: { readonly node: Node; readonly context: ConsensusContext; readonly privateKey: PrivateKey; readonly reason: ChangeViewReason; }) => { const context = contextIn; const payload = await makeSignedPayload({ node, privateKey, context, consensusMessage: new ChangeViewConsensusMessage({ validatorIndex: context.myIndex, blockIndex: utils.nullthrows(context.blockBuilder.index), viewNumber: context.viewNumber, reason, timestamp: new BN(Date.now()), }), }); const mutableChangeViewPayloads = [...context.changeViewPayloads]; mutableChangeViewPayloads[context.myIndex] = payload; return { context: context.clone({ changeViewPayloads: mutableChangeViewPayloads }), payload }; }; export const getChangeViewPayloadCompact = (context: ConsensusContext, payload: ExtensiblePayload) => { const message = context.getMessage<ChangeViewConsensusMessage>(payload); return new ChangeViewPayloadCompact({ validatorIndex: message.validatorIndex, originalViewNumber: message.viewNumber, timestamp: message.timestamp, invocationScript: payload.witness.invocation, }); }; export const getCommitPayloadCompact = (context: ConsensusContext, payload: ExtensiblePayload) => { const message = context.getMessage<CommitConsensusMessage>(payload); return new CommitPayloadCompact({ viewNumber: message.viewNumber, validatorIndex: message.validatorIndex, signature: message.signature, invocationScript: payload.witness.invocation, }); }; export const getPreparationPayloadCompact = (context: ConsensusContext, payload: ExtensiblePayload) => new PreparationPayloadCompact({ validatorIndex: context.getMessage(payload).validatorIndex, invocationScript: payload.witness.invocation, }); export const getNonce = () => { const getRandom = () => Math.floor(Math.random() * (2 ** 8 - 1)); const buf = new BinaryReader(Buffer.from(_.range(0, 8).map(getRandom))); return new BN(buf.readUInt64LE()); };
the_stack
import dayjs from 'dayjs'; import { Position, Range, Selection, TextDocument, TextEditorRevealType, TextLine, Uri, window, workspace, WorkspaceEdit } from 'vscode'; import { DueDate } from './dueDate'; import { extensionConfig } from './extension'; import { parseDocument } from './parse'; import { Count, TheTask } from './TheTask'; import { dateWithoutTime, DATE_FORMAT, durationTo, getDateInISOFormat } from './time/timeUtils'; import { updateArchivedTasks } from './treeViewProviders/treeViews'; import { DueState } from './types'; import { applyEdit, checkArchiveFileAndNotify, getActiveOrDefaultDocument, specialTag, SpecialTagName, taskToString } from './utils/extensionUtils'; import { forEachTask, getNestedTasksLineNumbers, getTaskAtLineExtension } from './utils/taskUtils'; import { unique } from './utils/utils'; // This file contains 2 types of functions // 1) Performs an action on the document and applies an edit (saves the document) // 2) Has a `WorkspaceEdit` suffix that accepts an edit and performs actions(insert/replace/delete) without applying // ──────────────────────────────────────────────────────────── // ──── Apply Edit ──────────────────────────────────────────── // ──────────────────────────────────────────────────────────── /** * Replace entire line range with new text. (text is take from task transformed to string). */ export async function editTask(document: TextDocument, task: TheTask) { const edit = new WorkspaceEdit(); const newTaskAsText = taskToString(task); const line = document.lineAt(task.lineNumber); edit.replace(document.uri, line.range, newTaskAsText); return applyEdit(edit, document); } /** * Add `{h}` special tag */ export async function hideTask(document: TextDocument, lineNumber: number) { const edit = new WorkspaceEdit(); const line = document.lineAt(lineNumber); const task = getTaskAtLineExtension(lineNumber); if (!task) { return undefined; } if (!task.isHidden) { edit.insert(document.uri, line.range.end, ' {h}'); } return applyEdit(edit, document); } /** * Replace entire line range with new text. */ export async function editTaskRawText(document: TextDocument, lineNumber: number, newRawText: string) { const edit = new WorkspaceEdit(); const line = document.lineAt(lineNumber); edit.replace(document.uri, line.range, newRawText); return applyEdit(edit, document); } /** * Toggle `{c}` special tag */ export async function toggleTaskCollapse(document: TextDocument, lineNumber: number) { const edit = new WorkspaceEdit(); toggleTaskCollapseWorkspaceEdit(edit, document, lineNumber); return applyEdit(edit, document); } /** * Recursively expand/collapse all nested tasks */ export async function toggleTaskCollapseRecursive(document: TextDocument, lineNumber: number) { const parentTask = getTaskAtLineExtension(lineNumber); if (!parentTask) { return undefined; } const edit = new WorkspaceEdit(); if (parentTask.isCollapsed) { forEachTask(task => { if (task.isCollapsed && task.subtasks.length) { toggleTaskCollapseWorkspaceEdit(edit, document, task.lineNumber); } }, parentTask.subtasks); } else { forEachTask(task => { if (!task.isCollapsed && task.subtasks.length) { toggleTaskCollapseWorkspaceEdit(edit, document, task.lineNumber); } }, parentTask.subtasks); } toggleTaskCollapseWorkspaceEdit(edit, document, lineNumber); return await applyEdit(edit, document); } /** * Insert/Replace due date */ export async function setDueDate(document: TextDocument, lineNumber: number, newDueDate: string) { const dueDate = `{due:${newDueDate}}`; const edit = new WorkspaceEdit(); const task = getTaskAtLineExtension(lineNumber); if (task?.overdueRange) { edit.delete(document.uri, task.overdueRange); } if (task?.dueRange) { edit.replace(document.uri, task.dueRange, dueDate); } else { const line = document.lineAt(lineNumber); const isLineEndsWithWhitespace = line.text.endsWith(' '); edit.insert(document.uri, line.range.end, `${isLineEndsWithWhitespace ? '' : ' '}${dueDate}`); } return await applyEdit(edit, document); } /** * Start time tracking (task duration). Triggered manually by user. */ export async function startTask(document: TextDocument, lineNumber: number) { const edit = new WorkspaceEdit(); const line = document.lineAt(lineNumber); const task = getTaskAtLineExtension(lineNumber); if (!task) { return undefined; } const newStartDate = specialTag(SpecialTagName.started, getDateInISOFormat(undefined, true)); if (task.startRange) { edit.replace(document.uri, task.startRange, newStartDate); } else { edit.insert(document.uri, line.range.end, ` ${newStartDate}`); } return await applyEdit(edit, document); } /** * Delete the task. Show confirmation dialog if necessary. Modal dialog shows all the tasks that will be deleted. */ export async function tryToDeleteTask(document: TextDocument, lineNumber: number) { const task = getTaskAtLineExtension(lineNumber); if (!task) { return undefined; } const edit = new WorkspaceEdit(); let numberOfTasksToBeDeleted = ''; let deletedTasksAsText = ''; let showConfirmationDialog = false; const taskLineNumbersToDelete = [lineNumber]; if (task.subtasks.length) { taskLineNumbersToDelete.push(...getNestedTasksLineNumbers(task.subtasks)); } for (const ln of taskLineNumbersToDelete) { const taskAtLine = getTaskAtLineExtension(ln); if (!taskAtLine) { continue; } deletedTasksAsText += `${taskAtLine.rawText.replace(/\s\s/g, '┄')}\n`; } numberOfTasksToBeDeleted = `❗ [ ${taskLineNumbersToDelete.length} ] task${taskLineNumbersToDelete.length > 1 ? 's' : ''} will be deleted.`; if (extensionConfig.confirmTaskDelete === 'always') { showConfirmationDialog = true; } else if (extensionConfig.confirmTaskDelete === 'hasNestedTasks') { if (task.subtasks.length) { showConfirmationDialog = true; } } if (showConfirmationDialog) { const confirmBtnName = 'Delete'; const button = await window.showWarningMessage(`${numberOfTasksToBeDeleted}\n${deletedTasksAsText}`, { modal: true, }, confirmBtnName); if (button !== confirmBtnName) { return undefined; } } for (const ln of taskLineNumbersToDelete) { deleteTaskWorkspaceEdit(edit, document, ln); } return applyEdit(edit, document); } /** * Either toggle done or increment count */ export async function toggleDoneOrIncrementCount(document: TextDocument, lineNumber: number) { const task = getTaskAtLineExtension(lineNumber); if (!task) { return undefined; } if (task.count) { return await incrementCountForTask(document, lineNumber, task); } else { await toggleDoneAtLine(document, lineNumber); return undefined; } } /** * Increment count special tag. If already max `3/3` then set it to `0/3` */ export async function incrementCountForTask(document: TextDocument, lineNumber: number, task: TheTask) { const line = document.lineAt(lineNumber); const edit = new WorkspaceEdit(); const count = task.count; if (!count) { return Promise.resolve(undefined); } let newValue = 0; // TODO: this function must call toggleDoneAtLine() !!! if (count.current !== count.needed) { newValue = count.current + 1; if (newValue === count.needed) { insertCompletionDateWorkspaceEdit(edit, document, line, task); removeOverdueWorkspaceEdit(edit, document.uri, task); } setCountCurrentValueWorkspaceEdit(edit, document.uri, count, String(newValue)); } else { setCountCurrentValueWorkspaceEdit(edit, document.uri, count, '0'); removeCompletionDateWorkspaceEdit(edit, document.uri, task); } return applyEdit(edit, document); } /** * Decrement count special tag. If alredy min `0/3` then do nothing. */ export async function decrementCountForTask(document: TextDocument, lineNumber: number, task: TheTask) { const edit = new WorkspaceEdit(); const count = task.count; if (!count) { return undefined; } if (count.current === 0) { return undefined; } else if (count.current === count.needed) { removeCompletionDateWorkspaceEdit(edit, document.uri, task); } setCountCurrentValueWorkspaceEdit(edit, document.uri, count, String(count.current - 1)); return applyEdit(edit, document); } /** * Increment/Decrement a priority. Create it if the task doesn't have one. */ export async function incrementOrDecrementPriority(document: TextDocument, lineNumber: number, type: 'decrement' | 'increment') { const task = getTaskAtLineExtension(lineNumber); if (!task || type === 'increment' && task.priority === 'A' || type === 'decrement' && task.priority === 'Z') { return undefined; } const newPriority = type === 'increment' ? String.fromCharCode(task.priority.charCodeAt(0) - 1) : String.fromCharCode(task.priority.charCodeAt(0) + 1); const edit = new WorkspaceEdit(); if (task.priorityRange) { // Task has a priority edit.replace(document.uri, task.priorityRange, `(${newPriority})`); } else { // No priority, create one edit.insert(document.uri, new Position(lineNumber, 0), `(${newPriority}) `); } return applyEdit(edit, document); } /** * Remove overdue special tag */ async function removeOverdueFromLine(document: TextDocument, task: TheTask) { const edit = new WorkspaceEdit(); removeOverdueWorkspaceEdit(edit, document.uri, task); return applyEdit(edit, document); } /** * Toggle task completion. Handle what to insert/delete. */ export async function toggleDoneAtLine(document: TextDocument, lineNumber: number) { const task = getTaskAtLineExtension(lineNumber); if (!task) { return; } if (task.overdue) { await removeOverdueFromLine(document, task); } const line = document.lineAt(lineNumber); const edit = new WorkspaceEdit(); if (task.done) { removeCompletionDateWorkspaceEdit(edit, document.uri, task); removeDurationWorkspaceEdit(edit, document.uri, task); removeStartWorkspaceEdit(edit, document.uri, task); } else { insertCompletionDateWorkspaceEdit(edit, document, line, task); } await applyEdit(edit, document); if (extensionConfig.autoArchiveTasks) { await archiveTasks([task], document); } } /** * - Warning and noop when default archive file path is not specified * - Archive only works for completed tasks * - When the task is non-root (has parent task) - noop * - When the task has subtasks -> archive them too */ export async function archiveTasks(tasks: TheTask[], document: TextDocument) { const isDefaultArchiveFileSpecified = await checkArchiveFileAndNotify(); if (!isDefaultArchiveFileSpecified) { return undefined; } const fileEdit = new WorkspaceEdit(); const archiveFileEdit = new WorkspaceEdit(); const archiveFileUri = Uri.file(extensionConfig.defaultArchiveFile); const archiveDocument = await workspace.openTextDocument(archiveFileUri); let taskLineNumbersToArchive = []; for (const task of tasks) { // Only root tasks provided will be archived if (task.parentTaskLineNumber !== undefined) { continue; } // Recurring tasks cannot be archived if (task.due?.isRecurring) { continue; } taskLineNumbersToArchive.push(task.lineNumber); if (task.subtasks.length) { taskLineNumbersToArchive.push(...getNestedTasksLineNumbers(task.subtasks)); } } taskLineNumbersToArchive = unique(taskLineNumbersToArchive); for (const lineNumber of taskLineNumbersToArchive) { const task = getTaskAtLineExtension(lineNumber); if (!task) { continue; } const line = document.lineAt(lineNumber); archiveTaskWorkspaceEdit(fileEdit, archiveFileEdit, archiveDocument, document.uri, line, true); } await applyEdit(fileEdit, document); await applyEdit(archiveFileEdit, archiveDocument); updateArchivedTasks(); return undefined; } /** * Reveal the line/task in the file. * * Move cursor, reveal range, highlight the line for a moment */ export async function revealTask(lineNumber: number, document?: TextDocument) { const documentToReveal = document ?? await getActiveOrDefaultDocument(); const editor = await window.showTextDocument(documentToReveal); const range = new Range(lineNumber, 0, lineNumber, 0); editor.selection = new Selection(range.start, range.end); editor.revealRange(range, TextEditorRevealType.Default); // Highlight for a short time revealed range const lineHighlightDecorationType = window.createTextEditorDecorationType({ backgroundColor: '#ffa30468', isWholeLine: true, }); editor.setDecorations(lineHighlightDecorationType, [range]); setTimeout(() => { editor.setDecorations(lineHighlightDecorationType, []); }, 700); } /** * Recurring tasks completion state should reset every day. * This function goes through all tasks in a document and resets their completion/count, adds `{overdue}` tag when needed */ export async function resetAllRecurringTasks(document: TextDocument, lastVisit: Date | string = new Date()) { if (typeof lastVisit === 'string') { lastVisit = new Date(lastVisit); } const edit = new WorkspaceEdit(); const tasks = (await parseDocument(document)).tasks; const now = new Date(); const nowWithoutTime = dateWithoutTime(now); for (const task of tasks) { if (task.due?.isRecurring) { const line = document.lineAt(task.lineNumber); if (task.done) { removeCompletionDateWorkspaceEdit(edit, document.uri, task); removeStartWorkspaceEdit(edit, document.uri, task); removeDurationWorkspaceEdit(edit, document.uri, task); } else { if (!task.overdue && !dayjs().isSame(lastVisit, 'day')) { const lastVisitWithoutTime = dateWithoutTime(lastVisit); const daysSinceLastVisit = dayjs(nowWithoutTime).diff(lastVisitWithoutTime, 'day'); for (let i = daysSinceLastVisit; i > 0; i--) { const date = dayjs().subtract(i, 'day'); const res = new DueDate(task.due.raw, { targetDate: date.toDate(), }); if (res.isDue === DueState.due || res.isDue === DueState.overdue) { addOverdueSpecialTagWorkspaceEdit(edit, document.uri, line, date.format(DATE_FORMAT)); break; } } } } const count = task.count; if (count) { setCountCurrentValueWorkspaceEdit(edit, document.uri, count, '0'); } } } return applyEdit(edit, document); } /** * Insert line break `\n` and some text to the file */ export async function appendTaskToFile(text: string, filePath: string) { const uri = Uri.file(filePath); const document = await workspace.openTextDocument(uri); const edit = new WorkspaceEdit(); const eofPosition = document.lineAt(document.lineCount - 1).rangeIncludingLineBreak.end; edit.insert(uri, eofPosition, `\n${text}`); return applyEdit(edit, document); } // ──────────────────────────────────────────────────────────── // ──── Do not apply edit ───────────────────────────────────── // ──────────────────────────────────────────────────────────── export function toggleTaskCollapseWorkspaceEdit(edit: WorkspaceEdit, document: TextDocument, lineNumber: number) { const line = document.lineAt(lineNumber); const task = getTaskAtLineExtension(lineNumber); if (task?.collapseRange) { edit.delete(document.uri, task.collapseRange); } else { edit.insert(document.uri, line.range.end, ' {c}'); } } export function deleteTaskWorkspaceEdit(edit: WorkspaceEdit, document: TextDocument, lineNumber: number) { edit.delete(document.uri, document.lineAt(lineNumber).rangeIncludingLineBreak); } export function removeOverdueWorkspaceEdit(edit: WorkspaceEdit, uri: Uri, task: TheTask) { if (task.overdueRange) { edit.delete(uri, task.overdueRange); } } export function insertCompletionDateWorkspaceEdit(edit: WorkspaceEdit, document: TextDocument, line: TextLine, task: TheTask, forceIncludeTime = false) { const dateInIso = getDateInISOFormat(new Date(), forceIncludeTime || extensionConfig.completionDateIncludeTime); const newCompletionDate = specialTag(SpecialTagName.completionDate, extensionConfig.completionDateIncludeDate ? dateInIso : undefined); if (task.completionDateRange) { edit.replace(document.uri, task.completionDateRange, newCompletionDate); } else { edit.insert(document.uri, new Position(line.lineNumber, line.range.end.character), ` ${newCompletionDate}`); } if (task.start) { insertDurationWorkspaceEdit(edit, document, line, task); } } export function insertDurationWorkspaceEdit(edit: WorkspaceEdit, document: TextDocument, line: TextLine, task: TheTask) { if (!task.start) { return; } const newDurationDate = specialTag(SpecialTagName.duration, durationTo(task, true, extensionConfig.durationIncludeSeconds)); if (task.durationRange) { edit.replace(document.uri, task.durationRange, newDurationDate); } else { edit.insert(document.uri, line.range.end, ` ${newDurationDate}`); } } export function removeCompletionDateWorkspaceEdit(edit: WorkspaceEdit, uri: Uri, task: TheTask) { if (task.completionDateRange) { edit.delete(uri, task.completionDateRange); } } export function removeDurationWorkspaceEdit(edit: WorkspaceEdit, uri: Uri, task: TheTask) { if (task.durationRange) { edit.delete(uri, task.durationRange); } } export function removeStartWorkspaceEdit(edit: WorkspaceEdit, uri: Uri, task: TheTask) { if (task.startRange) { edit.delete(uri, task.startRange); } } export function archiveTaskWorkspaceEdit(edit: WorkspaceEdit, archiveFileEdit: WorkspaceEdit, archiveDocument: TextDocument, uri: Uri, line: TextLine, shouldDelete: boolean) { appendTaskToFileWorkspaceEdit(archiveFileEdit, archiveDocument, line.text);// Add task to archive file if (shouldDelete) { edit.delete(uri, line.rangeIncludingLineBreak);// Delete task from active file } } function addOverdueSpecialTagWorkspaceEdit(edit: WorkspaceEdit, uri: Uri, line: TextLine, overdueDateString: string) { edit.insert(uri, new Position(line.lineNumber, line.range.end.character), ` {overdue:${overdueDateString}}`); } export function setCountCurrentValueWorkspaceEdit(edit: WorkspaceEdit, uri: Uri, count: Count, value: string) { const charIndexWithOffset = count.range.start.character + 'count:'.length + 1; const currentRange = new Range(count.range.start.line, charIndexWithOffset, count.range.start.line, charIndexWithOffset + String(count.current).length); edit.replace(uri, currentRange, String(value)); } function appendTaskToFileWorkspaceEdit(edit: WorkspaceEdit, document: TextDocument, text: string) { const eofPosition = document.lineAt(document.lineCount - 1).rangeIncludingLineBreak.end; edit.insert(document.uri, eofPosition, `\n${text}`); } export function toggleCommentAtLineWorkspaceEdit(edit: WorkspaceEdit, document: TextDocument, lineNumber: number) { const line = document.lineAt(lineNumber); if (line.text.startsWith('# ')) { edit.delete(document.uri, new Range(lineNumber, 0, lineNumber, 2)); } else { edit.insert(document.uri, new Position(lineNumber, 0), '# '); } } export function editTaskWorkspaceEdit(edit: WorkspaceEdit, document: TextDocument, task: TheTask) { const newTaskAsText = taskToString(task); const line = document.lineAt(task.lineNumber); edit.replace(document.uri, line.range, newTaskAsText); }
the_stack
import { CSSResult, supportsAdoptingStyleSheets, } from '@spectrum-web-components/base'; import coreStyles from './theme.css.js'; declare global { interface Window { ShadyCSS: { nativeShadow: boolean; prepareTemplate( template: HTMLTemplateElement, elementName: string, typeExtension?: string ): void; styleElement(host: HTMLElement): void; ScopingShim: { prepareAdoptedCssText( cssTextArray: string[], elementName: string ): void; }; }; } interface ShadowRoot { adoptedStyleSheets?: CSSStyleSheet[]; } } type FragmentType = 'color' | 'scale' | 'core' | 'app'; type SettableFragmentTypes = 'color' | 'scale'; type FragmentMap = Map<string, { name: string; styles: CSSResult }>; export type ThemeFragmentMap = Map<FragmentType, FragmentMap>; export type Color = 'light' | 'lightest' | 'dark' | 'darkest'; export type Scale = 'medium' | 'large'; const ScaleValues = ['medium', 'large']; const ColorValues = ['light', 'lightest', 'dark', 'darkest']; type FragmentName = Color | Scale | 'core' | 'app'; export interface ThemeData { color?: Color; scale?: Scale; lang?: string; } type ThemeKindProvider = { [P in SettableFragmentTypes]: Color | Scale | ''; }; export interface ProvideLang { callback: (lang: string) => void; } /** * @element sp-theme * * @slot - Content on which to apply the CSS Custom Properties defined by the current theme configuration */ export class Theme extends HTMLElement implements ThemeKindProvider { private static themeFragmentsByKind: ThemeFragmentMap = new Map(); private static defaultFragments: Set<FragmentName> = new Set(['core']); private static templateElement?: HTMLTemplateElement; private static instances: Set<Theme> = new Set(); static get observedAttributes(): string[] { return ['color', 'scale', 'lang']; } protected attributeChangedCallback( attrName: SettableFragmentTypes, old: string | null, value: string | null ): void { if (old === value) { return; } if (attrName === 'color') { this.color = value as Color; } else if (attrName === 'scale') { this.scale = value as Scale; } else if (attrName === 'lang' && !!value) { this.lang = value; this._provideContext(); } } private requestUpdate(): void { if (window.ShadyCSS !== undefined && !window.ShadyCSS.nativeShadow) { window.ShadyCSS.styleElement(this); } else { this.shouldAdoptStyles(); } } public shadowRoot!: ShadowRoot; private _color: Color | '' = ''; get color(): Color | '' { const themeFragments = Theme.themeFragmentsByKind.get('color'); const { name } = (themeFragments && themeFragments.get('default')) || {}; return this._color || (name as Color) || ''; } set color(newValue: Color | '') { if (newValue === this._color) return; const color = !!newValue && ColorValues.includes(newValue) ? newValue : this.color; if (color !== this._color) { this._color = color; this.requestUpdate(); } if (color) { this.setAttribute('color', color); } else { this.removeAttribute('color'); } } private _scale: Scale | '' = ''; get scale(): Scale | '' { const themeFragments = Theme.themeFragmentsByKind.get('scale'); const { name } = (themeFragments && themeFragments.get('default')) || {}; return this._scale || (name as Scale) || ''; } set scale(newValue: Scale | '') { if (newValue === this._scale) return; const scale = !!newValue && ScaleValues.includes(newValue) ? newValue : this.scale; if (scale !== this._scale) { this._scale = scale; this.requestUpdate(); } if (scale) { this.setAttribute('scale', scale); } else { this.removeAttribute('scale'); } } private get styles(): CSSResult[] { const themeKinds: FragmentType[] = [ ...Theme.themeFragmentsByKind.keys(), ]; const styles = themeKinds.reduce((acc, kind) => { const kindFragments = Theme.themeFragmentsByKind.get( kind ) as FragmentMap; const addStyles = ( name: FragmentName, kind?: FragmentType ): void => { const currentStyles = kindFragments.get(name); if (currentStyles && (!kind || this.hasAttribute(kind))) { acc.push(currentStyles.styles); } }; if (kind === 'app' || kind === 'core') { addStyles(kind); } else { const { [kind]: name } = this; addStyles(<FragmentName>name, kind); } return acc; }, [] as CSSResult[]); return [...styles]; } private static get template(): HTMLTemplateElement { if (!this.templateElement) { this.templateElement = document.createElement('template'); this.templateElement.innerHTML = '<slot></slot>'; } return this.templateElement; } constructor() { super(); this.attachShadow({ mode: 'open' }); const node = document.importNode(Theme.template.content, true); this.shadowRoot.appendChild(node); this.shouldAdoptStyles(); this.addEventListener( 'sp-query-theme', this.onQueryTheme as EventListener ); this.addEventListener( 'sp-language-context', this._handleContextPresence as EventListener ); this.updateComplete = this.__createDeferredPromise(); } public updateComplete!: Promise<boolean>; private __resolve!: (compelted: boolean) => void; private __createDeferredPromise(): Promise<boolean> { return new Promise((resolve) => { this.__resolve = resolve; }); } private onQueryTheme(event: CustomEvent<ThemeData>): void { if (event.defaultPrevented) { return; } event.preventDefault(); const { detail: theme } = event; theme.color = this.color || undefined; theme.scale = this.scale || undefined; theme.lang = this.lang || document.documentElement.lang || navigator.language; } protected connectedCallback(): void { this.shouldAdoptStyles(); // Note, first update/render handles styleElement so we only call this if // connected after first update. /* c8 ignore next 3 */ if (window.ShadyCSS !== undefined) { window.ShadyCSS.styleElement(this); } // Add `this` to the instances array. Theme.instances.add(this); const manageDir = (): void => { const { dir } = this; this.trackedChildren.forEach((el) => { el.setAttribute('dir', dir === 'rtl' ? dir : 'ltr'); }); }; if (!this.observer) { this.observer = new MutationObserver(manageDir); } this.observer.observe(this, { attributes: true, attributeFilter: ['dir'], }); if (!this.hasAttribute('dir')) { let dirParent = ((this as HTMLElement).assignedSlot || this.parentNode) as HTMLElement | DocumentFragment | ShadowRoot; while ( dirParent !== document.documentElement && !(dirParent instanceof Theme) ) { dirParent = ((dirParent as HTMLElement).assignedSlot || // step into the shadow DOM of the parent of a slotted node dirParent.parentNode || // DOM Element detected (dirParent as ShadowRoot).host) as | HTMLElement | DocumentFragment | ShadowRoot; } this.dir = dirParent.dir === 'rtl' ? dirParent.dir : 'ltr'; } requestAnimationFrame(() => manageDir()); } protected disconnectedCallback(): void { // Remove `this` to the instances array. Theme.instances.delete(this); this.observer.disconnect(); } private observer!: MutationObserver; public startManagingContentDirection(el: HTMLElement): void { this.trackedChildren.add(el); } public stopManagingContentDirection(el: HTMLElement): void { this.trackedChildren.delete(el); } private trackedChildren: Set<HTMLElement> = new Set(); private _updateRequested = false; private async shouldAdoptStyles(): Promise<void> { if (!this._updateRequested) { this.updateComplete = this.__createDeferredPromise(); this._updateRequested = true; this._updateRequested = await false; this.adoptStyles(); this.__resolve(true); } } protected adoptStyles(): void { const styles = this.styles; // No test coverage on Edge // There are three separate cases here based on Shadow DOM support. // (1) shadowRoot polyfilled: use ShadyCSS // (2) shadowRoot.adoptedStyleSheets available: use it. // (3) shadowRoot.adoptedStyleSheets polyfilled: append styles after // rendering /* c8 ignore next */ if ( window.ShadyCSS !== undefined && !window.ShadyCSS.nativeShadow && window.ShadyCSS.ScopingShim ) { // For browsers using the shim, there seems to be one set of // processed styles per template, so it is hard to nest styles. So, // for those, we load in all style fragments and then switch using a // host selector (e.g. :host([color='dark'])) const fragmentCSS: string[] = []; for (const [kind, fragments] of Theme.themeFragmentsByKind) { for (const [name, { styles }] of fragments) { if (name === 'default') continue; let cssText = styles.cssText; if (!Theme.defaultFragments.has(name as FragmentName)) { cssText = cssText.replace( ':host', `:host([${kind}='${name}'])` ); } fragmentCSS.push(cssText); } } window.ShadyCSS.ScopingShim.prepareAdoptedCssText( fragmentCSS, this.localName ); window.ShadyCSS.prepareTemplate(Theme.template, this.localName); } else if (supportsAdoptingStyleSheets) { const styleSheets: CSSStyleSheet[] = []; for (const style of styles) { styleSheets.push(style.styleSheet as CSSStyleSheet); } this.shadowRoot.adoptedStyleSheets = styleSheets; } else { const styleNodes = this.shadowRoot.querySelectorAll('style'); styleNodes.forEach((element) => element.remove()); styles.forEach((s) => { const style = document.createElement('style'); style.textContent = s.cssText; this.shadowRoot.appendChild(style); }); } } static registerThemeFragment( name: FragmentName, kind: FragmentType, styles: CSSResult ): void { const fragmentMap = Theme.themeFragmentsByKind.get(kind) || new Map(); if (fragmentMap.size === 0) { Theme.themeFragmentsByKind.set(kind, fragmentMap); // we're adding our first fragment for this kind, set as default fragmentMap.set('default', { name, styles }); Theme.defaultFragments.add(name); } fragmentMap.set(name, { name, styles }); Theme.instances.forEach((instance) => instance.shouldAdoptStyles()); } private _contextConsumers = new Map<HTMLElement, ProvideLang['callback']>(); private _provideContext(): void { this._contextConsumers.forEach((consume) => consume(this.lang)); } private _handleContextPresence(event: CustomEvent<ProvideLang>): void { const target = event.composedPath()[0] as HTMLElement; if (this._contextConsumers.has(target)) { this._contextConsumers.delete(target); } else { this._contextConsumers.set(target, event.detail.callback); const callback = this._contextConsumers.get(target); if (callback) { callback( this.lang || document.documentElement.lang || navigator.language ); } } } } Theme.registerThemeFragment('core', 'core', coreStyles);
the_stack
import { test } from "@siteimprove/alfa-test"; import { h } from "@siteimprove/alfa-dom/h"; import { Device } from "@siteimprove/alfa-device"; import { Context } from "@siteimprove/alfa-selector"; import { Style } from "../src/style"; const device = Device.standard(); test("#cascaded() returns the cascaded value of a property", (t) => { const element = <div style={{ color: "red" }} />; const style = Style.from(element, device); t.deepEqual(style.cascaded("color").get().toJSON(), { value: { type: "color", format: "named", color: "red", }, source: h.declaration("color", "red").toJSON(), }); }); test("#cascaded() correctly handles duplicate properties", (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", [ h.declaration("color", "red"), h.declaration("color", "green"), ]), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("color").get().toJSON(), { value: { type: "color", format: "named", color: "green", }, source: h.declaration("color", "green").toJSON(), }); }); test("#cascaded() returns the most specific property value", (t) => { const element = <div style={{ color: "green !important" }} />; h.document( [], [ h.sheet([ h.rule.style("div.foo", { color: "green" }), h.rule.style("div", { color: "red" }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("color").get().toJSON(), { value: { type: "color", format: "named", color: "green", }, source: h.declaration("color", "green", true).toJSON(), }); }); test("#cascaded() correctly handles inline styles overriding the sheet", (t) => { const element = <div style={{ color: "green !important" }} />; h.document([element], [h.sheet([h.rule.style("div", { color: "red" })])]); const style = Style.from(element, device); t.deepEqual(style.cascaded("color").get().toJSON(), { value: { type: "color", format: "named", color: "green", }, source: h.declaration("color", "green", true).toJSON(), }); }); test(`#cascaded() correctly handles an important declaration overriding inline styles`, (t) => { const element = <div style={{ color: "green" }} />; h.document( [element], [h.sheet([h.rule.style("div", { color: "red !important" })])] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("color").get().toJSON(), { value: { type: "color", format: "named", color: "red", }, source: h.declaration("color", "red", true).toJSON(), }); }); test(`#cascaded() correctly handles important inline styles overriding an important declaration`, (t) => { const element = <div style={{ color: "green !important" }} />; h.document( [element], [h.sheet([h.rule.style("div", { color: "red !important" })])] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("color").get().toJSON(), { value: { type: "color", format: "named", color: "green", }, source: h.declaration("color", "green", true).toJSON(), }); }); test(`#cascaded() correctly handles a shorthand declaration overriding a longhand declaration`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { overflowX: "visible", overflow: "hidden", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow", "hidden").toJSON(), }); }); test(`#cascaded() correctly handles a longhand declaration overriding a shorthand declaration`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { overflow: "hidden", overflowX: "visible", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "visible", }, source: h.declaration("overflow-x", "visible").toJSON(), }); }); test(`#cascaded() expands a var() function`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { "--hidden": "hidden", overflowX: "var(--hidden)", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow-x", "var(--hidden)").toJSON(), }); }); test(`#cascaded() expands a var() function with a fallback`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { overflowX: "var(--hidden, hidden)", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow-x", "var(--hidden, hidden)").toJSON(), }); }); test(`#cascaded() expands a var() function with an inherited value`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("main", { "--hidden": "hidden", }), h.rule.style("div", { overflowX: "var(--hidden)", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow-x", "var(--hidden)").toJSON(), }); }); test(`#cascaded() expands a var() function with an overridden value`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("main", { "--hidden": "hidden", }), h.rule.style("div", { overflowX: "var(--hidden)", "--hidden": "visible", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "visible", }, source: h.declaration("overflow-x", "var(--hidden)").toJSON(), }); }); test(`#cascaded() expands a var() function with a value that contains another var() function`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { overflowX: "var(--hidden)", "--hidden": "var(--really-hidden)", "--really-hidden": "hidden", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow-x", "var(--hidden)").toJSON(), }); }); test(`#cascaded() expands multiple var() functions in the same declaration`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("div", { overflow: "var(--hidden) var(--visible)", "--hidden": "hidden", "--visible": "visible", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow", "var(--hidden) var(--visible)").toJSON(), }); t.deepEqual(style.cascaded("overflow-y").get().toJSON(), { value: { type: "keyword", value: "visible", }, source: h.declaration("overflow", "var(--hidden) var(--visible)").toJSON(), }); }); test(`#cascaded() expands several var() function references to the same variable`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { overflow: "var(--hidden) var(--hidden)", "--hidden": "hidden", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow", "var(--hidden) var(--hidden)").toJSON(), }); t.deepEqual(style.cascaded("overflow-y").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow", "var(--hidden) var(--hidden)").toJSON(), }); }); test(`#cascaded() expands a var() function with a fallback with a var() function with a fallback`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { overflowX: "var(--foo, var(--bar, hidden))", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h .declaration("overflow-x", "var(--foo, var(--bar, hidden))") .toJSON(), }); }); test(`#cascaded() returns "unset" when a var() function variable isn't defined`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("div", { overflowX: "var(--visible)", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "unset", }, source: h.declaration("overflow-x", "var(--visible)").toJSON(), }); }); test(`#cascaded() returns "unset" when a var() function fallback is empty`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("div", { overflowX: "var(--visible,)", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "unset", }, source: h.declaration("overflow-x", "var(--visible,)").toJSON(), }); }); test(`#cascaded() returns "unset" when declaration with a var() function is invalid after substitution`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("div", { overflowX: "var(--visible)", "--visible": "foo", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "unset", }, source: h.declaration("overflow-x", "var(--visible)").toJSON(), }); }); test(`#cascaded() returns "unset" when a var() function is invalid`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("div", { overflowX: "var(--hidden)", "--hidden": "var(foo)", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "unset", }, source: h.declaration("overflow-x", "var(--hidden)").toJSON(), }); }); test(`#cascaded() returns "unset" when var() functions contain cyclic references`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { overflowX: "var(--hidden)", "--hidden": "var(--really-hidden)", "--really-hidden": "var(--hidden)", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "unset", }, source: h.declaration("overflow-x", "var(--hidden)").toJSON(), }); }); test(`#cascaded() returns "unset" when a custom property referenced by a var() function has its guaranteed-invalid initial value`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("div", { overflowX: "var(--hidden)", "--hidden": "initial", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "unset", }, source: h.declaration("overflow-x", "var(--hidden)").toJSON(), }); }); test(`#cascaded() returns "unset" when confronted with a billion laughs`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { overflowX: "var(--prop30)", "--prop1": "lol", "--prop2": "var(--prop1) var(--prop1)", "--prop3": "var(--prop2) var(--prop2)", "--prop4": "var(--prop3) var(--prop3)", "--prop5": "var(--prop4) var(--prop4)", "--prop6": "var(--prop5) var(--prop5)", "--prop7": "var(--prop6) var(--prop6)", "--prop8": "var(--prop7) var(--prop7)", "--prop9": "var(--prop8) var(--prop8)", "--prop10": "var(--prop9) var(--prop9)", "--prop11": "var(--prop10) var(--prop10)", "--prop12": "var(--prop11) var(--prop11)", "--prop13": "var(--prop12) var(--prop12)", "--prop14": "var(--prop13) var(--prop13)", "--prop15": "var(--prop14) var(--prop14)", "--prop16": "var(--prop15) var(--prop15)", "--prop17": "var(--prop16) var(--prop16)", "--prop18": "var(--prop17) var(--prop17)", "--prop19": "var(--prop18) var(--prop18)", "--prop20": "var(--prop19) var(--prop19)", "--prop21": "var(--prop20) var(--prop20)", "--prop22": "var(--prop21) var(--prop21)", "--prop23": "var(--prop22) var(--prop22)", "--prop24": "var(--prop23) var(--prop23)", "--prop25": "var(--prop24) var(--prop24)", "--prop26": "var(--prop25) var(--prop25)", "--prop27": "var(--prop26) var(--prop26)", "--prop28": "var(--prop27) var(--prop27)", "--prop29": "var(--prop28) var(--prop28)", "--prop30": "var(--prop29) var(--prop29)", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "unset", }, source: h.declaration("overflow-x", "var(--prop30)").toJSON(), }); }); test(`#cascaded() correctly resolves var() function references within context of the corresponding element`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("main", { "--really-hidden": "var(--hidden)", "--hidden": "hidden", }), // This declaration references `--really-hidden`, but inherits its value // from `main` above. The substitution of `--really-hidden` therefore // happens within context of `main` and the `--hidden` variable defined // for `div` will therefore not apply. h.rule.style("div", { overflowX: "var(--hidden)", "--hidden": "var(--really-hidden)", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow-x", "var(--hidden)").toJSON(), }); }); test(`#cascaded() gives precedence to !important custom properties used in var() function references`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { "--hidden": "hidden !important", }), h.rule.style("div", { overflowX: "var(--hidden)", "--hidden": "visible", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow-x", "var(--hidden)").toJSON(), }); }); test(`#cascaded() does not fall back on the inherited value of a custom property referenced by a var() function if the first value is invalid`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("main", { "--hidden": "hidden", }), h.rule.style("div", { overflowX: "var(--hidden)", "--hidden": "initial", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "unset", }, source: h.declaration("overflow-x", "var(--hidden)").toJSON(), }); }); test(`#cascaded() does not fall back on the inherited value of a custom property referenced by a var() function if the first value is invalid and its fallback is also invalid`, (t) => { const element = <div />; h.document( [<main>{element}</main>], [ h.sheet([ h.rule.style("main", { "--hidden": "hidden", }), h.rule.style("div", { overflowX: "var(--hidden, foo)", "--hidden": "initial", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "unset", }, source: h.declaration("overflow-x", "var(--hidden, foo)").toJSON(), }); }); test(`#cascaded() accept spaces around variable name in a var() function`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { "--hidden": "hidden", overflowX: "var( --hidden )", }), ]), ] ); const style = Style.from(element, device); t.deepEqual(style.cascaded("overflow-x").get().toJSON(), { value: { type: "keyword", value: "hidden", }, source: h.declaration("overflow-x", "var( --hidden )").toJSON(), }); }); test(`#cascaded() resolves :hover style for an element`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { color: "red", }), h.rule.style("div:hover", { color: "blue", }), ]), ] ); let style = Style.from(element, device, Context.hover(element)); t.deepEqual(style.cascaded("color").get().toJSON(), { value: { type: "color", format: "named", color: "blue", }, source: h.declaration("color", "blue").toJSON(), }); style = Style.from(element, device); t.deepEqual(style.cascaded("color").get().toJSON(), { value: { type: "color", format: "named", color: "red", }, source: h.declaration("color", "red").toJSON(), }); }); test(`#cascaded() resolves :focus style for an element`, (t) => { const element = <div />; h.document( [element], [ h.sheet([ h.rule.style("div", { color: "red", }), h.rule.style("div:focus", { color: "blue", }), ]), ] ); let style = Style.from(element, device, Context.focus(element)); t.deepEqual(style.cascaded("color").get().toJSON(), { value: { type: "color", format: "named", color: "blue", }, source: h.declaration("color", "blue").toJSON(), }); style = Style.from(element, device); t.deepEqual(style.cascaded("color").get().toJSON(), { value: { type: "color", format: "named", color: "red", }, source: h.declaration("color", "red").toJSON(), }); }); test(`#specified() keeps the !important flag of properties set to initial`, (t) => { const element = <div style={{ backgroundColor: "initial !important" }} />; const style = Style.from(element, device); t.deepEqual(style.specified("background-color").toJSON(), { value: { type: "color", format: "rgb", red: { type: "percentage", value: 0 }, green: { type: "percentage", value: 0 }, blue: { type: "percentage", value: 0 }, alpha: { type: "percentage", value: 0 }, }, source: { name: "background-color", value: "initial", important: true }, }); });
the_stack
import { CURRENT_SITE_INFO, CURRENT_SITE_NAME, HDB_TEAM } from '../const'; import { getBDType, getTMDBIdByIMDBId, getIMDBIdByUrl, getBDInfoOrMediaInfo, } from '../common'; import { getTeamName, matchSelectForm, filterNexusDescription, isChineseTacker, } from './common'; import handleIts from './its'; import handleTJUPT from './tjupt'; import handleHDRoute from './hdr'; import handleBib from './bib'; import handleBb from './bB'; import handlePTP from './ptp'; import handlePTN from './ptn'; import handleGPW from './gpw'; import handleNPU from './npubits'; import handleBYR from './byr'; import handleSC from './sc'; import handleKG from './kg'; import handleBHD from './bhd'; import autoFill from './autofill'; type SelectKey = 'videoCodec'|'videoType'|'resolution'|'source'|'area' const fillTargetForm = (info:TorrentInfo.Info) => { autoFill(info || {}); if (!info) { return; } console.log(info); if (CURRENT_SITE_NAME === 'bB') { handleBb(info); return false; } if (CURRENT_SITE_NAME === 'PTP') { handlePTP(info); return false; } if (CURRENT_SITE_NAME === 'GPW') { handleGPW(info); return false; } if (CURRENT_SITE_NAME === 'NPUBits') { handleNPU(info); return false; } if (CURRENT_SITE_NAME === 'BYR') { handleBYR(info); return false; } if (CURRENT_SITE_NAME === 'SC') { handleSC(info); return false; } if (CURRENT_SITE_NAME === 'KG') { handleKG(info); return; } if (CURRENT_SITE_NAME === 'BeyondHD') { handleBHD(info); return; } if (CURRENT_SITE_NAME === 'PTSBAO' && localStorage.getItem('autosave')) { localStorage.removeItem('autosave'); } const currentSiteInfo = CURRENT_SITE_INFO as Site.SiteInfo; const imdbId = getIMDBIdByUrl(info.imdbUrl || ''); const isBluray = info.videoType.match(/bluray/i); const { screenshots = [] } = info; const imdbSelector = currentSiteInfo?.imdb.selector; if (CURRENT_SITE_NAME.match(/HDRoute|HDSpace/)) { $(imdbSelector).val(imdbId?.replace('tt', '') ?? ''); } else if (imdbSelector) { $(imdbSelector).val(info.imdbUrl || ''); } // 针对hdb的站点的命名规则对标题进行处理 if (CURRENT_SITE_NAME === 'HDBits') { let mediaTitle = info.title.replace(/([^\d]+)\s+([12][90]\d{2})/, (match, p1, p2) => { return `${info.movieName || info.movieAkaName} ${p2}`; }); if (info.videoType === 'remux') { mediaTitle = mediaTitle.replace(/\s+(bluray|blu-ray)/ig, ''); } info.title = mediaTitle; } // 北洋站没有配置name if (currentSiteInfo.name) { const { title, subtitle } = info; let torrentTitle = title; if (CURRENT_SITE_NAME === 'TTG' && subtitle) { torrentTitle += `[${subtitle}]`; } else if (CURRENT_SITE_NAME.match(/SSD|iTS|HDChina/)) { torrentTitle = title.replace(/\s/ig, '.'); } else if (CURRENT_SITE_NAME.match(/PuTao/)) { torrentTitle = `[${getChineseName(info)}]${title}`; } $(currentSiteInfo.name.selector).val(torrentTitle); } // 避免选择种子文件后自动改变种子名称 disableTorrentChange(); // 填写四个常见的信息 const commonInfoKeys = ['subtitle', 'douban', 'area', 'audioCodec']; type Key = 'subtitle' | 'douban' | 'area' | 'audioCodec'; commonInfoKeys.forEach(key => { const siteInfo = currentSiteInfo[key as Key]; if (siteInfo && siteInfo.selector) { let value = info[key as 'subtitle' | 'area' | 'audioCodec']; if (key === 'douban') { value = info.doubanUrl; } else if (key === 'area' || key === 'audioCodec') { value = (siteInfo as Site.SelectorMap).map[value as string]; } $(siteInfo.selector).val(value as string); } }); const mediaInfo = info.mediaInfo; let description = ''; // 内站直接填写完整简介 if (info.description) { // 去简介前的空格和换行 description = info.description.replace(/^(\s+)/g, ''); if (isChineseTacker(currentSiteInfo.siteType) && CURRENT_SITE_NAME !== 'SSD') { // 需要拼接豆瓣信息的内站 const { doubanInfo } = info; if (doubanInfo) { description = `${doubanInfo}\n${description}`; } } else { // 需要过滤掉中文信息 const { sourceSiteType } = info; if (isChineseTacker(sourceSiteType) && CURRENT_SITE_NAME !== 'Bib') { description = filterNexusDescription(info); } } } if (currentSiteInfo.mediaInfo) { if (CURRENT_SITE_NAME.match(/^(Blutopia|Aither)/)) { const selector = isBluray ? 'textarea[name="bdinfo"]' : currentSiteInfo.mediaInfo.selector; $(selector).val(mediaInfo); description = description.replace(mediaInfo.trim(), ''); } else if (!(isBluray && CURRENT_SITE_NAME.match(/^(HDBits)/))) { // HDB只填入mediainfo bdinfo放在简介里 $(currentSiteInfo.mediaInfo.selector).val(mediaInfo); description = description.replace(mediaInfo.trim(), ''); } } // 删除简介中的截图 if (currentSiteInfo.screenshots) { screenshots.forEach(img => { if (description.includes(img)) { description = description.replace(img, ''); if (!img.match(/\[url=.+?\[url]/)) { description = description.replace(/\[img\]\[\/img\]\n*/g, ''); } } }); } // 给SSD点赞! if (CURRENT_SITE_NAME === 'SSD') { $(currentSiteInfo.imdb.selector).val((info.doubanUrl || info.imdbUrl) as string); $(currentSiteInfo.screenshots.selector).val(screenshots.join('\n')); if (info.category === 'tvPack' || info.title.match(/Trilogy|Collection/i) || (info.subtitle && info.subtitle.match(/合集/))) { $('input[name="pack"]').attr('checked', 'true'); } } // 海带 if (CURRENT_SITE_NAME === 'HDAI') { $(currentSiteInfo.imdb.selector).val((info.doubanUrl || info.imdbUrl) as string); $(currentSiteInfo.screenshots.selector).val(screenshots.join('\n')); if (isBluray) { $('input[type="checkbox"][name="tag[o]"]').attr('checked', 'true'); } } // 海报填写 if (currentSiteInfo.poster) { let poster = info.poster; if (!poster) { const doubanPosterImage = (info.description + info.doubanInfo).match(/\[img\](http[^[]+?(poster|(img\d\.doubanio))[^[]+?)\[\/img\]/); if (doubanPosterImage && doubanPosterImage[1]) { poster = doubanPosterImage[1]; } else { poster = description.match(/\[img\](.+?)\[\/img\]/)?.[1] ?? ''; } } if (poster) { $(currentSiteInfo.poster).val(poster); if (CURRENT_SITE_NAME === 'HDRoute') { $('input[name="poster"]').val(poster); description = description.replace(poster, ''); } } } // Blutopia可以通过设置为显示缩略图 if (CURRENT_SITE_NAME.match(/Blutopia/)) { info.screenshots.forEach(img => { const regStr = new RegExp(`\\[img\\](${img})\\[\\/img\\]`); if (description.match(regStr)) { description = description.replace(regStr, (p1, p2) => { return `[url=${p2}][img=350x350]${p2}[/img][/url]`; }); } }); } // 过滤空标签 description = filterEmptyTags(description); if (CURRENT_SITE_NAME === 'PTer') { const { mediaInfo, bdinfo } = getBDInfoOrMediaInfo(description); description = description.replace(`[quote]${mediaInfo}[/quote]`, `[hide=mediainfo]${mediaInfo}[/hide]`); description = description.replace(`[quote]${bdinfo}[/quote]`, `[hide=BDInfo]${bdinfo}[/hide]`); } if (CURRENT_SITE_NAME === 'PTN') { description = `${info.imdbUrl}\n\n${description}`; } const thanksQuoteClosed = GM_getValue('easy-seed.thanks-quote-closed') || ''; if (!thanksQuoteClosed && info.sourceSite !== undefined) { description = getThanksQuote(info) + description.trim(); } $(currentSiteInfo.description.selector).val(description); // 站点特殊处理 if (CURRENT_SITE_NAME.match(/Blutopia|HDPOST|ACM|Aither/)) { const fillIMDBId = currentSiteInfo.siteType === 'UNIT3D' ? imdbId.replace('tt', '') : imdbId; $(currentSiteInfo.imdb.selector).val(fillIMDBId); getTMDBIdByIMDBId(imdbId).then(data => { $(currentSiteInfo.tmdb.selector).val(data.id); }); if (CURRENT_SITE_NAME.match(/ACM/i)) { const { category, videoType } = info; // videoType和category交换 info.category = videoType; info.videoType = category; // BHD需要细分蓝光类型 if (isBluray) { let bdType = getBDType(info.size); if (videoType === 'uhdbluray' && bdType === 'BD50') { bdType = 'uhd50'; } info.category = bdType || ''; } } } if (currentSiteInfo.category) { const category = currentSiteInfo.category.map[info.category]; const keyArray = ['videoCodec', 'videoType', 'resolution', 'source', 'area']; let finalSelectArray:string[] = []; if (Array.isArray(category)) { finalSelectArray = [...category]; keyArray.forEach(key => { finalSelectArray = matchSelectForm(currentSiteInfo, info, key as SelectKey, finalSelectArray); if (finalSelectArray.length === 1) { $(currentSiteInfo.category.selector).val(finalSelectArray[0]); } }); } else { [...keyArray, 'category'].forEach(key => { matchSelectForm(currentSiteInfo, info, key as SelectKey, finalSelectArray); }); } } if (currentSiteInfo.format) { const formatData = currentSiteInfo.format; $(formatData.selector).val(formatData.map[info.format as string]); } if (currentSiteInfo.image) { $(currentSiteInfo.image.selector).val(info.image || ''); } if (CURRENT_SITE_NAME.match(/HDHome|PTHome|SoulVoice|1PTBA|HDAtmos|3Wmg/i)) { setTimeout(() => { const event = new Event('change'); document.querySelector(currentSiteInfo.category.selector)?.dispatchEvent(event); }, 1000); } // 匿名勾选 if (currentSiteInfo.anonymous) { const { selector, value = '' } = currentSiteInfo.anonymous; if (value) { $(selector).val(value); } else { $(selector).attr('checked', 'true'); } } // 标签勾选 if (currentSiteInfo.tags) { Object.keys(info.tags).forEach(key => { if (info.tags[key] && currentSiteInfo.tags[key]) { $(currentSiteInfo.tags[key]).attr('checked', 'true'); } }); } // 填入制作组 fillTeamName(info); // 对配置覆盖不到的地方进行专门处理 if (CURRENT_SITE_NAME.match(/PTHome|HDSky|LemonHD|1PTBA|52pt|Audiences/i)) { if (info.tags.diy) { let categoryValue = ''; if (CURRENT_SITE_NAME.match(/Audiences|PTHome/)) { categoryValue = info.videoType === 'bluray' ? '14' : '13'; } else if (CURRENT_SITE_NAME === 'HDSky') { categoryValue = info.videoType === 'bluray' ? '12' : '14'; } else if (CURRENT_SITE_NAME === 'LemonHD') { $('select[name="tag_diy"]').val('yes'); return; } else if (CURRENT_SITE_NAME === '1PTBA') { categoryValue = info.videoType === 'bluray' ? '1' : '4'; } else if (CURRENT_SITE_NAME === '52pt') { categoryValue = info.videoType === 'bluray' ? '2' : '12'; } $(currentSiteInfo.videoType.selector).val(categoryValue); } } /* * 单独处理HDU * 为什么要在媒体类型里再还要增加个TV分类?? * */ if (CURRENT_SITE_NAME.match(/HDU/)) { let videoTypeValue = ''; const { resolution, videoType, category } = info; const isTV = category.match(/tv/); if (videoType === 'remux') { if (resolution === '2160p') { videoTypeValue = isTV ? '16' : '15'; } else { videoTypeValue = isTV ? '12' : '3'; } } if (isTV) { if (videoType === 'encode') { videoTypeValue = '14'; } else if (videoType === 'web') { videoTypeValue = '13'; } } if (videoTypeValue) { $(currentSiteInfo.videoType.selector).val(videoTypeValue); } if (videoType.match(/bluray/)) { $(currentSiteInfo.category.selector).val('424'); } } // 单独处理北洋 if (CURRENT_SITE_NAME === 'TJUPT') { $('#browsecat').trigger('change'); handleTJUPT(info); } // 单独处理南洋 if (CURRENT_SITE_NAME === 'NYPT') { $('#browsecat').trigger('change'); const domTimeout = setTimeout(() => { const catMap = { movie: '#movie_enname', tv: '#series_enname', tvPack: '#series_enname', documentary: '#doc_enname', variety: '#show_enname', cartoon: '#anime_enname', }; const selector = catMap[info.category as keyof typeof catMap]; if (selector) { $(selector).val(info.title); } clearTimeout(domTimeout); }, 2000); } // 单独处理UNIT3D剧集 if (currentSiteInfo.siteType === 'UNIT3D' && info.category.match(/tv/)) { const season = info.title.match(/S0?(\d{1,2})/i)?.[1] ?? 1; const episode = info.title.match(/EP?0?(\d{1,3})/i)?.[1] ?? 0; $('#season_number').val(season); $('#episode_number').val(episode); } // 单独处理路 if (CURRENT_SITE_NAME === 'HDRoute') { handleHDRoute(info); } // 处理HDT if (CURRENT_SITE_NAME === 'HDT') { if (info.category !== 'tvPack') { $('select[name="season"').val('true'); } // IMDB地址最后需要带上「/」 if (imdbId) { $(currentSiteInfo.imdb.selector).val(`https://www.imdb.com/title/${imdbId}/`); } } // 处理Pter if (CURRENT_SITE_NAME === 'PTer') { const language = info.description.match(/(语\s+言)\s+(.+)/)?.[2] ?? ''; if (!language.match(/英语/) && info.area === 'EU') { $(currentSiteInfo.area.selector).val('8'); } } // 处理HDH iPad if (CURRENT_SITE_NAME === 'HDHome') { if (info.title.match(/iPad/i)) { const categoryMap = { movie: '412', tv: '426', tvPack: '433', documentary: '418', }; const ipadCat = categoryMap[info.category as keyof typeof categoryMap]; if (ipadCat) { $('#browsecat').val(ipadCat); } } } if (CURRENT_SITE_NAME === 'Bib' && info.doubanBookInfo) { handleBib(info); } if (CURRENT_SITE_NAME === 'iTS') { handleIts(info); } if (CURRENT_SITE_NAME === 'UHDBits') { $(currentSiteInfo.imdb.selector).val(imdbId); const teamName = getTeamName(info); $('#team').val(teamName === 'other' ? 'Unknown' : teamName); if (info.title.match(/web-?rip/i)) { $(currentSiteInfo.videoType.selector).val('WEBRip'); } $('#imdb_button').trigger('click'); } if (CURRENT_SITE_NAME === '52PT') { const { tags, videoType, resolution } = info; let videoTypeValue = videoType; if (videoType.match(/bluray/)) { if (tags.chinese_audio || tags.cantonese_audio || tags.chinese_subtitle) { videoTypeValue = videoType === 'bluray' ? '14' : '15'; } } else if (videoType === 'remux' && resolution === '2160p') { videoTypeValue = '5'; } $(currentSiteInfo.videoType.selector).val(videoTypeValue); } if (CURRENT_SITE_NAME === 'BTSCHOOL') { $(imdbSelector).val(imdbId); if (info.doubanUrl) { const doubanId = info.doubanUrl.match(/\/(\d+)/)?.[1] ?? ''; $(currentSiteInfo.douban.selector).val(doubanId); } } if (CURRENT_SITE_NAME === 'PTN') { handlePTN(info); } if (CURRENT_SITE_NAME === 'HDTime') { if (info.videoType.match(/bluray/i)) { $(currentSiteInfo.category.selector).val('424'); } } if (CURRENT_SITE_NAME === 'HDFans') { const { videoType, resolution, tags } = info; if (videoType === 'remux') { $(currentSiteInfo.videoType.selector).val(resolution === '2160p' ? '10' : '8'); } else if (videoType === 'encode') { const map = { '2160p': '9', '1080p': '5', '1080i': '5', '720p': '11', }; $(currentSiteInfo.videoType.selector).val(map[resolution as keyof typeof map] || '16'); } if (tags.diy) { $(currentSiteInfo.videoType.selector).val(resolution === '2160p' ? '2' : '4'); } } }; const fillTeamName = (info:TorrentInfo.Info) => { const teamConfig = (CURRENT_SITE_INFO as Site.SiteInfo).team; const teamName = getTeamName(info); interface Team { [key:string]:string } if (teamName && teamConfig) { const formateTeamName = teamConfig.map[teamName.toLowerCase() as keyof Team]; const matchValue = formateTeamName || teamConfig.map.other; if (HDB_TEAM.includes(teamName) && CURRENT_SITE_NAME === 'BTSCHOOL') { $(teamConfig.selector).val(teamConfig.map.hdbint); return; } if (CURRENT_SITE_NAME === 'HDAI' && !formateTeamName) { $('input[name="team"]').val(teamName); return; } if (matchValue) { $(teamConfig.selector).val(matchValue.toLowerCase()); } } }; const disableTorrentChange = () => { const nameSelector = (CURRENT_SITE_INFO as Site.SiteInfo).name?.selector ?? ''; if (nameSelector.match(/^#\w+/)) { const nameDom = $(nameSelector).clone().attr('name', '').hide(); $(nameSelector).attr('id', '').after(nameDom); } }; const getThanksQuote = (info:TorrentInfo.Info) => { const isChineseSite = isChineseTacker(CURRENT_SITE_INFO.siteType) || CURRENT_SITE_NAME.match(/HDPOST|GPW/); let thanksQuote = `转自[b]${info.sourceSite}[/b],感谢原发布者!`; if (!isChineseSite) { thanksQuote = `Torrent from [b]${info.sourceSite}[/b].\nAll thanks to the original uploader!`; } return `[quote]${thanksQuote}[/quote]\n\n`; }; // 过滤空标签 const filterEmptyTags = (description:string):string => { // eslint-disable-next-line prefer-regex-literals const reg = new RegExp('\\[(\\w+)(?:=(?:\\w|\\s)+)?\\]\\s*\\[\\/(\\w+)\\]', 'g'); if (description.match(reg)) { description = description.replace(reg, (_match, p1, p2) => { if (p1 === p2) { return ''; } return _match; }); return filterEmptyTags(description); } return description; }; function getChineseName (info:TorrentInfo.Info) { const { description } = info; const originalName = description.match(/(片\s+名)\s+(.+)?/)?.[2] ?? ''; const translateName = description.match(/(译\s+名)\s+(.+)/)?.[2]?.split('/')?.[0] ?? ''; let chineseName = originalName; if (!originalName.match(/[\u4e00-\u9fa5]+/)) { chineseName = translateName.match(/[\u4e00-\u9fa5]+/) ? translateName : ''; } return chineseName.trim(); } export { fillTargetForm, } ;
the_stack
import { SerializerError, SerializerErrorType } from '../errors' import { CosmosTransaction } from '../protocols/cosmos/CosmosTransaction' import { MainProtocolSymbols, ProtocolSymbols, SubProtocolSymbols } from '../utils/ProtocolSymbols' import { IACMessageWrapper } from './iac-message-wrapper' import { IACMessageType } from './interfaces' import { IACMessageDefinitionObjectV3 } from './message' import { SerializableUnsignedCosmosTransaction } from './schemas/definitions/unsigned-transaction-cosmos' import { SchemaInfo, SchemaRoot } from './schemas/schema' import { AeternityTransactionValidator } from './unsigned-transactions/aeternity-transactions.validator' import { BitcoinTransactionValidator } from './unsigned-transactions/bitcoin-transactions.validator' import { CosmosTransactionValidator } from './unsigned-transactions/cosmos-transactions.validator' import { EthereumTransactionValidator } from './unsigned-transactions/ethereum-transactions.validator' import { SubstrateTransactionValidator } from './unsigned-transactions/substrate-transactions.validator' import { TezosTransactionValidator } from './unsigned-transactions/tezos-transactions.validator' import { TezosBTCTransactionValidator } from './unsigned-transactions/xtz-btc-transactions.validator' import { TransactionValidator } from './validators/transactions.validator' const accountShareResponse: SchemaRoot = require('./schemas/generated/account-share-response.json') const messageSignRequest: SchemaRoot = require('./schemas/generated/message-sign-request.json') const messageSignResponse: SchemaRoot = require('./schemas/generated/message-sign-response.json') const unsignedTransactionAeternity: SchemaRoot = require('./schemas/generated/transaction-sign-request-aeternity.json') const unsignedTransactionBitcoinSegwit: SchemaRoot = require('./schemas/generated/transaction-sign-request-bitcoin-segwit.json') const unsignedTransactionBitcoin: SchemaRoot = require('./schemas/generated/transaction-sign-request-bitcoin.json') const unsignedTransactionCosmos: SchemaRoot = require('./schemas/generated/transaction-sign-request-cosmos.json') const unsignedTransactionEthereum: SchemaRoot = require('./schemas/generated/transaction-sign-request-ethereum.json') const unsignedTransactionTezos: SchemaRoot = require('./schemas/generated/transaction-sign-request-tezos.json') const unsignedTransactionTezosSapling: SchemaRoot = require('./schemas/generated/transaction-sign-request-tezos-sapling.json') const unsignedTransactionSubstrate: SchemaRoot = require('./schemas/generated/transaction-sign-request-substrate.json') const signedTransactionAeternity: SchemaRoot = require('./schemas/generated/transaction-sign-response-aeternity.json') const signedTransactionBitcoinSegwit: SchemaRoot = require('./schemas/generated/transaction-sign-response-bitcoin-segwit.json') const signedTransactionBitcoin: SchemaRoot = require('./schemas/generated/transaction-sign-response-bitcoin.json') const signedTransactionCosmos: SchemaRoot = require('./schemas/generated/transaction-sign-response-cosmos.json') const signedTransactionEthereum: SchemaRoot = require('./schemas/generated/transaction-sign-response-ethereum.json') const signedTransactionTezos: SchemaRoot = require('./schemas/generated/transaction-sign-response-tezos.json') const signedTransactionTezosSapling: SchemaRoot = require('./schemas/generated/transaction-sign-response-tezos-sapling.json') const signedTransactionSubstrate: SchemaRoot = require('./schemas/generated/transaction-sign-response-substrate.json') function unsignedTransactionTransformerCosmos(value: SerializableUnsignedCosmosTransaction): SerializableUnsignedCosmosTransaction { value.transaction = CosmosTransaction.fromJSON(value) as any return value } export class SerializerV3 { private static readonly schemas: Map<string, SchemaInfo> = new Map() public static addSchema(schemaId: number, schema: SchemaInfo, protocol?: ProtocolSymbols): void { const protocolSpecificSchemaName: string = SerializerV3.getSchemaName(schemaId, protocol) if (this.schemas.has(protocolSpecificSchemaName)) { throw new SerializerError(SerializerErrorType.SCHEMA_ALREADY_EXISTS, `Schema ${protocolSpecificSchemaName} already exists`) } this.schemas.set(protocolSpecificSchemaName, schema) } public static getSchema(schemaId: number, protocol?: ProtocolSymbols): SchemaInfo { const protocolSpecificSchemaName: string = SerializerV3.getSchemaName(schemaId, protocol) // Try to get the protocol specific scheme, if it doesn't exist fall back to the generic one const schema: SchemaInfo | undefined = this.schemas.get(protocolSpecificSchemaName) ?? this.schemas.get(SerializerV3.getSchemaName(schemaId)) if (!schema) { throw new SerializerError(SerializerErrorType.SCHEMA_DOES_NOT_EXISTS, `Schema ${protocolSpecificSchemaName} does not exist`) } return schema } private static getSchemaName(schemaId: number, protocol?: ProtocolSymbols): string { const schemaName = `${schemaId}-${protocol}` if ( (protocol !== undefined && schemaId === IACMessageType.TransactionSignRequest) || schemaId === IACMessageType.TransactionSignResponse ) { const split = schemaName.split('-') if (split.length >= 3 && `${split[1]}-${split[2]}` === SubProtocolSymbols.ETH_ERC20) { return `${schemaId}-${SubProtocolSymbols.ETH_ERC20}` } } return protocol ? `${schemaId}-${protocol}` : schemaId.toString() } public async serialize(messages: IACMessageDefinitionObjectV3[]): Promise<string> { if ( messages.every((message: IACMessageDefinitionObjectV3) => { return SerializerV3.getSchema(message.type, message.protocol) }) ) { const iacps: IACMessageWrapper = IACMessageWrapper.fromDecoded(JSON.parse(JSON.stringify(messages))) return iacps.encoded() } else { throw new SerializerError(SerializerErrorType.SCHEMA_DOES_NOT_EXISTS, `Unknown schema`) } } public async deserialize(data: string): Promise<IACMessageDefinitionObjectV3[]> { let result: IACMessageWrapper try { result = IACMessageWrapper.fromEncoded(data) } catch { throw new Error('Cannot decode data') } const deserializedIACMessageDefinitionObjects: IACMessageDefinitionObjectV3[] = result.payload.asJson() return deserializedIACMessageDefinitionObjects } public serializationValidatorByProtocolIdentifier(protocolIdentifier: ProtocolSymbols): TransactionValidator { const validators: { [key in ProtocolSymbols]?: any } = { // TODO: Exhaustive list? eth: EthereumTransactionValidator, btc: BitcoinTransactionValidator, grs: BitcoinTransactionValidator, ae: AeternityTransactionValidator, xtz: TezosTransactionValidator, cosmos: CosmosTransactionValidator, polkadot: SubstrateTransactionValidator, kusama: SubstrateTransactionValidator, 'xtz-btc': TezosBTCTransactionValidator } const exactMatch = Object.keys(validators).find((protocol) => protocolIdentifier === protocol) const startsWith = Object.keys(validators).find((protocol) => protocolIdentifier.startsWith(protocol)) const validator = exactMatch ? exactMatch : startsWith // TODO: Only use validator if it's a transaction // if (!validator) { // throw Error(`Validator not implemented for ${protocolIdentifier}, ${exactMatch}, ${startsWith}, ${validator}`) // } return new validators[validator ?? 'eth']() } } // SerializerV3.addSchema(IACMessageType.MetadataRequest, '') // SerializerV3.addSchema(IACMessageType.MetadataResponse, '') // SerializerV3.addSchema(IACMessageType.AccountShareRequest, accountShareRequest) SerializerV3.addSchema(IACMessageType.AccountShareResponse, { schema: accountShareResponse }) SerializerV3.addSchema(IACMessageType.MessageSignRequest, { schema: messageSignRequest }) SerializerV3.addSchema(IACMessageType.MessageSignResponse, { schema: messageSignResponse }) // TODO: Make sure that we have a schema for every protocol we support SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionAeternity }, MainProtocolSymbols.AE) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionBitcoinSegwit }, MainProtocolSymbols.BTC_SEGWIT) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionBitcoin }, MainProtocolSymbols.BTC) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionBitcoin }, MainProtocolSymbols.GRS) SerializerV3.addSchema( IACMessageType.TransactionSignRequest, { schema: unsignedTransactionCosmos, transformer: unsignedTransactionTransformerCosmos }, MainProtocolSymbols.COSMOS ) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionEthereum }, MainProtocolSymbols.ETH) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionEthereum }, SubProtocolSymbols.ETH_ERC20) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionTezos }, MainProtocolSymbols.XTZ) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionTezosSapling }, MainProtocolSymbols.XTZ_SHIELDED) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionTezos }, SubProtocolSymbols.XTZ_BTC) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionTezos }, SubProtocolSymbols.XTZ_ETHTZ) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionTezos }, SubProtocolSymbols.XTZ_KUSD) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionTezos }, SubProtocolSymbols.XTZ_KT) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionTezos }, SubProtocolSymbols.XTZ_USD) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionTezos }, SubProtocolSymbols.XTZ_UUSD) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionTezos }, SubProtocolSymbols.XTZ_YOU) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionSubstrate }, MainProtocolSymbols.POLKADOT) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionSubstrate }, MainProtocolSymbols.KUSAMA) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionSubstrate }, MainProtocolSymbols.MOONBASE) SerializerV3.addSchema(IACMessageType.TransactionSignRequest, { schema: unsignedTransactionSubstrate }, MainProtocolSymbols.MOONRIVER) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionAeternity }, MainProtocolSymbols.AE) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionBitcoinSegwit }, MainProtocolSymbols.BTC_SEGWIT) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionBitcoin }, MainProtocolSymbols.BTC) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionBitcoin }, MainProtocolSymbols.GRS) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionCosmos }, MainProtocolSymbols.COSMOS) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionEthereum }, MainProtocolSymbols.ETH) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionEthereum }, SubProtocolSymbols.ETH_ERC20) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionTezos }, MainProtocolSymbols.XTZ) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionTezosSapling }, MainProtocolSymbols.XTZ_SHIELDED) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionTezos }, SubProtocolSymbols.XTZ_BTC) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionTezos }, SubProtocolSymbols.XTZ_ETHTZ) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionTezos }, SubProtocolSymbols.XTZ_KUSD) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionTezos }, SubProtocolSymbols.XTZ_KT) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionTezos }, SubProtocolSymbols.XTZ_USD) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionTezos }, SubProtocolSymbols.XTZ_UUSD) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionTezos }, SubProtocolSymbols.XTZ_YOU) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionSubstrate }, MainProtocolSymbols.POLKADOT) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionSubstrate }, MainProtocolSymbols.KUSAMA) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionSubstrate }, MainProtocolSymbols.MOONBASE) SerializerV3.addSchema(IACMessageType.TransactionSignResponse, { schema: signedTransactionSubstrate }, MainProtocolSymbols.MOONRIVER)
the_stack
import { extend, remove, isNullOrUndefined, setStyleAttribute, removeClass, addClass } from '@syncfusion/ej2-base'; import { Query, Predicate } from '@syncfusion/ej2-data'; import { IRenderer, IGrid, LazyLoadArgs, LazyLoadGroupArgs, NotifyArgs, IRow } from '../base/interface'; import { ServiceLocator } from '../services/service-locator'; import { ContentRender } from './content-renderer'; import { ReturnType } from '../base/type'; import { Row } from '../models/row'; import { Column } from '../models/column'; import * as events from '../base/constant'; import { isRowEnteredInGrid, parentsUntil, setDisplayValue, generateExpandPredicates, getPredicates, getGroupKeysAndFields } from '../base/util'; import { Grid } from '../base/grid'; import { RowRenderer } from '../renderer/row-renderer'; import { GroupModelGenerator, GroupedData } from '../services/group-model-generator'; import { GroupSummaryModelGenerator, CaptionSummaryModelGenerator } from '../services/summary-model-generator'; import { AggregateColumnModel } from '../models/aggregate-model'; import { Cell } from '../models/cell'; import * as literals from '../base/string-literals'; /** * GroupLazyLoadRenderer is used to perform lazy load grouping * * @hidden */ export class GroupLazyLoadRenderer extends ContentRender implements IRenderer { private locator: ServiceLocator; private groupGenerator: GroupModelGenerator; private summaryModelGen: GroupSummaryModelGenerator; private captionModelGen: CaptionSummaryModelGenerator; private rowRenderer: RowRenderer<Column>; constructor(parent: IGrid, locator?: ServiceLocator) { super(parent, locator); this.locator = locator; this.groupGenerator = new GroupModelGenerator(this.parent); this.summaryModelGen = new GroupSummaryModelGenerator(this.parent); this.captionModelGen = new CaptionSummaryModelGenerator(this.parent); this.rowRenderer = new RowRenderer<Column>(this.locator, null, this.parent); this.eventListener(); } private childCount: number = 0; private scrollData: Row<Column>[] = []; private rowIndex: number; private rowObjectIndex: number; private isFirstChildRow: boolean = false; private isScrollDown: boolean = false; private isScrollUp: boolean = false; private uid1: string; private uid2: string; private uid3: string; private blockSize: number; private groupCache: { [x: number]: Row<Column>[] } = {}; private startIndexes: { [x: number]: number[] } = {}; private captionCounts: { [x: number]: number[] } = {}; private rowsByUid: { [x: number]: Row<Column>[] } = {}; private objIdxByUid: { [x: number]: Row<Column>[] } = {}; private initialGroupCaptions: { [x: number]: Row<Column>[] } = {}; private requestType: string[] = ['paging', 'columnstate', 'reorder', 'cancel', 'save', 'beginEdit', 'add', 'delete', 'filterbeforeopen', 'filterchoicerequest']; /** @hidden */ public pageSize: number; /** @hidden */ public cacheMode: boolean = false; /** @hidden */ public cacheBlockSize: number = 5; /** @hidden */ public ignoreAccent: boolean = this.parent.allowFiltering ? this.parent.filterSettings.ignoreAccent : false; /** @hidden */ public allowCaseSensitive: boolean = false; private eventListener(): void { this.parent.addEventListener(events.actionBegin, this.actionBegin.bind(this)); this.parent.addEventListener(events.actionComplete, this.actionComplete.bind(this)); this.parent.on(events.initialEnd, this.setLazyLoadPageSize, this); this.parent.on(events.setGroupCache, this.setCache, this); this.parent.on(events.lazyLoadScrollHandler, this.scrollHandler, this); this.parent.on(events.columnVisibilityChanged, this.setVisible, this); this.parent.on(events.groupCollapse, this.collapseShortcut, this); } /** * @param {HTMLTableRowElement} tr - specifies the table row element * @returns {void} * @hidden */ public captionExpand(tr: HTMLTableRowElement): void { const page: number = this.parent.pageSettings.currentPage; const rowsObject: Row<Column>[] = this.groupCache[page]; const uid: string = tr.getAttribute('data-uid'); const oriIndex: number = this.getRowObjectIndexByUid(uid); const isRowExist: boolean = rowsObject[oriIndex + 1] ? rowsObject[oriIndex].indent < rowsObject[oriIndex + 1].indent : false; const data: Row<Column> = rowsObject[oriIndex]; const key: { fields: string[], keys: string[] } = getGroupKeysAndFields(oriIndex, rowsObject); const e: LazyLoadGroupArgs = { captionRowElement: tr, groupInfo: data, enableCaching: true, cancel: false }; this.parent.trigger(events.lazyLoadGroupExpand, e, (args: LazyLoadGroupArgs) => { if (args.cancel) { return; } args.keys = key.keys; args.fields = key.fields; args.rowIndex = tr.rowIndex; args.makeRequest = !args.enableCaching || !isRowExist; if (!args.enableCaching && isRowExist) { this.clearCache([uid]); } args.skip = 0; args.take = this.pageSize; data.isExpand = this.rowsByUid[page][data.uid].isExpand = true; this.captionRowExpand(args); }); } /** * @param {HTMLTableRowElement} tr - specifies the table row element * @returns {void} * @hidden */ public captionCollapse(tr: HTMLTableRowElement): void { const cache: Row<Column>[] = this.groupCache[this.parent.pageSettings.currentPage]; const rowIdx: number = tr.rowIndex; const uid: string = tr.getAttribute('data-uid'); const captionIndex: number = this.getRowObjectIndexByUid(uid); const e: LazyLoadArgs = { captionRowElement: tr, groupInfo: cache[captionIndex], cancel: false }; this.parent.trigger(events.lazyLoadGroupCollapse, e, (args: LazyLoadGroupArgs) => { if (args.cancel) { return; } args.isExpand = false; this.removeRows(captionIndex, rowIdx); }); } /** * @returns {void} * @hidden */ public setLazyLoadPageSize(): void { const scrollEle: Element = this.parent.getContent().firstElementChild; const blockSize: number = Math.floor((scrollEle as HTMLElement).offsetHeight / this.parent.getRowHeight()) - 1; this.pageSize = this.pageSize ? this.pageSize : blockSize * 3; this.blockSize = Math.ceil(this.pageSize / 2); } /** * @returns {void} * @hidden */ public clearLazyGroupCache(): void { this.clearCache(); } private clearCache(uids?: string[]): void { uids = uids ? uids : this.getInitialCaptionIndexes(); const cache: Row<Column>[] = this.groupCache[this.parent.pageSettings.currentPage]; if (uids.length) { for (let i: number = 0; i < uids.length; i++) { const capIdx: number = this.getRowObjectIndexByUid(uids[i]); const capRow: Row<Column> = cache[capIdx]; if (!capRow) { continue; } if (this.captionCounts[this.parent.pageSettings.currentPage][capRow.uid]) { for (let i: number = capIdx + 1; i < cache.length; i++) { if (cache[i].indent === capRow.indent || cache[i].indent < capRow.indent) { delete this.captionCounts[this.parent.pageSettings.currentPage][capRow.uid]; break; } if (cache[i].isCaptionRow) { delete this.captionCounts[this.parent.pageSettings.currentPage][cache[i].uid]; } } } if (capRow.isExpand) { const tr: Element = this.parent.getRowElementByUID(capRow.uid); if (!tr) { return; } (this.parent as Grid).groupModule.expandCollapseRows(tr.querySelector('.e-recordplusexpand')); } const child: Row<Column>[] = this.getNextChilds(capIdx); if (!child.length) { continue; } let subChild: Row<Column>[] = []; if (child[child.length - 1].isCaptionRow) { subChild = this.getChildRowsByParentIndex(cache.indexOf(child[child.length - 1]), false, false, null, true, true); } const start: number = cache.indexOf(child[0]); const end: number = subChild.length ? cache.indexOf(subChild[subChild.length - 1]) : cache.indexOf(child[child.length - 1]); cache.splice(start, end - (start - 1)); this.refreshCaches(); } } } private refreshCaches(): void { const page: number = this.parent.pageSettings.currentPage; const cache: Row<Column>[] = this.groupCache[page]; this.rowsByUid = {}; this.objIdxByUid = {}; for (let i: number = 0; i < cache.length; i++) { this.maintainRows(cache[i], i); } } private getInitialCaptionIndexes(): string[] { const page: number = this.parent.pageSettings.currentPage; const uids: string[] = []; for (let i: number = 0; i < this.initialGroupCaptions[page].length; i++) { uids.push(this.initialGroupCaptions[page][i].uid); } return uids; } /** * @param {string} uid - specifies the uid * @returns {number} returns the row object uid * @hidden */ public getRowObjectIndexByUid(uid: string): number { return this.objIdxByUid[this.parent.pageSettings.currentPage][uid] as number; } private collapseShortcut(args: { target: Element, collapse: boolean }): void { if (this.parent.groupSettings.columns.length && args.target && parentsUntil(args.target, literals.content) && args.target.parentElement.tagName === 'TR') { if (!args.collapse && parentsUntil(args.target, literals.row)) { return; } const row: Element = args.target.parentElement; const uid: string = row.getAttribute('data-uid'); if (args.collapse) { const rowObj: Row<Column> = this.getRowByUid(uid); const capRow: Row<Column> = this.getRowByUid(rowObj.parentUid); if (capRow.isCaptionRow && capRow.isExpand) { const capEle: HTMLTableRowElement = this.getRowElementByUid(rowObj.parentUid); (this.parent as Grid).groupModule.expandCollapseRows(capEle.cells[rowObj.indent - 1]); } } else { const capRow: Row<Column> = this.getRowByUid(uid); if (capRow.isCaptionRow && !capRow.isExpand) { const capEle: HTMLTableRowElement = this.getRowElementByUid(uid); (this.parent as Grid).groupModule.expandCollapseRows(capEle.cells[capRow.indent]); } } } } private getRowByUid(uid: string): Row<Column> { return this.rowsByUid[this.parent.pageSettings.currentPage][uid] as Row<Column>; } private actionBegin(args: NotifyArgs): void { if (!args.cancel) { if (!this.requestType.some((value: string) => value === args.requestType)) { this.groupCache = {}; this.resetRowMaintenance(); } if (args.requestType === 'reorder' && this.parent.groupSettings.columns.length) { const keys: string[] = Object.keys(this.groupCache); for (let j: number = 0; j < keys.length; j++) { const cache: Row<Column>[] = this.groupCache[keys[j]]; for (let i: number = 0; i < cache.length; i++) { if (cache[i].isCaptionRow && !this.captionModelGen.isEmpty()) { this.changeCaptionRow(cache[i], null, keys[j]); } if (cache[i].isDataRow) { const from: number = (<{ fromIndex?: number }>args).fromIndex + cache[i].indent; const to: number = (<{ toIndex?: number }>args).toIndex + cache[i].indent; this.moveCells(cache[i].cells, from, to); } } } } if (args.requestType === 'delete' || ((<{ action?: string }>args).action === 'add' && args.requestType === 'save')) { this.groupCache = {}; this.resetRowMaintenance(); } } } private actionComplete(args: NotifyArgs): void { if (!args.cancel && args.requestType !== 'columnstate' && args.requestType !== 'beginEdit' && args.requestType !== 'delete' && args.requestType !== 'save' && args.requestType !== 'reorder') { this.scrollReset(); } } private resetRowMaintenance(): void { this.startIndexes = {}; this.captionCounts = {}; this.rowsByUid = {}; this.objIdxByUid = {}; this.initialGroupCaptions = {}; } private moveCells(arr: Cell<Column>[], from: number, to: number): void { if (from >= arr.length) { let k: number = from - arr.length; while ((k--) + 1) { arr.push(undefined); } } arr.splice(from, 0, arr.splice(to, 1)[0]); } private removeRows(idx: number, trIdx: number): void { const page: number = this.parent.pageSettings.currentPage; const rows: Row<Column>[] = this.groupCache[page]; const trs: Element[] = [].slice.call(this.parent.getContent().querySelectorAll('tr')); let aggUid: string; if (this.parent.aggregates.length) { const agg: Row<Column>[] = this.getAggregateByCaptionIndex(idx); aggUid = agg.length ? agg[agg.length - 1].uid : undefined; } const indent: number = rows[idx].indent; this.addClass(this.getNextChilds(idx)); rows[idx].isExpand = this.rowsByUid[page][rows[idx].uid].isExpand = false; let capUid: string; for (let i: number = idx + 1; i < rows.length; i++) { if (rows[i].indent === indent || rows[i].indent < indent) { capUid = rows[i].uid; break; } if (rows[i].isCaptionRow && rows[i].isExpand) { this.addClass(this.getNextChilds(i)); } } for (let i: number = trIdx + 1; i < trs.length; i++) { if (trs[i].getAttribute('data-uid') === capUid) { break; } else if (trs[i].getAttribute('data-uid') === aggUid) { remove(trs[i]); break; } else { remove(trs[i]); } } } private addClass(rows: Row<Column>[]): void { const last: Row<Column> = rows[this.blockSize]; if (last) { last.lazyLoadCssClass = 'e-lazyload-middle-down'; } } private getNextChilds(index: number, rowObjects?: Row<Column>[]): Row<Column>[] { const group: Row<Column>[] = this.groupCache[this.parent.pageSettings.currentPage]; const rows: Row<Column>[] = rowObjects ? rowObjects : group; const indent: number = group[index].indent + 1; const childRows: Row<Column>[] = []; for (let i: number = rowObjects ? 0 : index + 1; i < rows.length; i++) { if (rows[i].indent < indent) { break; } if (rows[i].indent === indent) { childRows.push(rows[i]); } } return childRows; } private lazyLoadHandler(args: { data: Object[], count: number, level: number, index: number, isRowExist: boolean, isScroll: boolean, up?: boolean, rowIndex?: number } ): void { this.setStartIndexes(); const tr: HTMLElement = this.parent.getContent().querySelectorAll('tr')[args.index]; const uid: string = tr.getAttribute('data-uid'); const captionIndex: number = this.getRowObjectIndexByUid(uid); const captionRow: IRow<Column> = this.groupCache[this.parent.pageSettings.currentPage][captionIndex]; let rows: Row<Column>[] = args.isRowExist ? args.isScroll ? this.scrollData : this.getChildRowsByParentIndex(captionIndex, true, true, null, true) : []; this.scrollData = []; if (!args.isRowExist) { this.setRowIndexes(captionIndex, captionRow); this.refreshCaptionRowCount(this.groupCache[this.parent.pageSettings.currentPage][captionIndex], args.count); if (Object.keys(args.data).indexOf('GroupGuid') !== -1) { for (let i: number = 0; i < args.data.length; i++) { const data: Row<Column> = this.groupGenerator.generateCaptionRow( args.data[i] as GroupedData, args.level, captionRow.parentGid, undefined, 0, captionRow.uid ); rows.push(data); if (this.parent.aggregates.length) { rows = rows.concat(<Row<Column>[]> (this.summaryModelGen.generateRows(args.data[i], { level: args.level + 1, parentUid: data.uid })) ); } } } else { this.groupGenerator.index = this.getStartIndex(captionIndex, args.isScroll); rows = this.groupGenerator.generateDataRows(args.data, args.level, captionRow.parentGid, 0, captionRow.uid); } } const trIdx: number = args.isScroll ? this.rowIndex : args.index; const nxtChild: Row<Column>[] = this.getNextChilds(captionIndex, rows); const lastRow: boolean = !args.up ? this.hasLastChildRow(args.isScroll, args.count, nxtChild.length) : true; if (!args.isRowExist && !lastRow) { nxtChild[this.blockSize].lazyLoadCssClass = 'e-lazyload-middle-down'; } if (!lastRow) { nxtChild[nxtChild.length - 1].lazyLoadCssClass = 'e-not-lazyload-end'; } const aggregates: Row<Column>[] = !args.isScroll && !args.isRowExist ? this.getAggregateByCaptionIndex(captionIndex) : []; if (!args.up) { if (!args.isRowExist) { this.refreshRowObjects(rows, args.isScroll ? this.rowObjectIndex : captionIndex); } } this.render(trIdx, rows, lastRow, aggregates); if (this.isFirstChildRow && !args.up) { this.parent.getContent().firstElementChild.scrollTop = rows.length * this.parent.getRowHeight(); } this.isFirstChildRow = false; this.rowIndex = undefined; this.rowObjectIndex = undefined; this.childCount = 0; } private setRowIndexes(capIdx: number, row: IRow<Column>): void { if (!this.captionCounts[this.parent.pageSettings.currentPage]) { this.captionCounts[this.parent.pageSettings.currentPage] = {} as number[]; } if (row.isCaptionRow) { this.captionCounts[this.parent.pageSettings.currentPage][row.uid] = (row.data as GroupedData).count; } } private getStartIndex(capIdx: number, isScroll: boolean): number { const page: number = this.parent.pageSettings.currentPage; const cache: Row<Column>[] = this.groupCache[page]; if (isScroll) { return cache[this.rowObjectIndex].index + 1; } let count: number = 0; let idx: number = 0; const prevCapRow: Row<Column> = this.getRowByUid(cache[capIdx].parentUid); if (prevCapRow) { idx = this.prevCaptionCount(prevCapRow); } if (cache[capIdx].indent > 0) { for (let i: number = capIdx - 1; i >= 0; i--) { if (cache[i].indent < cache[capIdx].indent) { break; } if (cache[i].isCaptionRow && cache[i].indent === cache[capIdx].indent) { count = count + (cache[i].data as GroupedData).count; } } } const index: number = count + idx + this.startIndexes[page][(cache[capIdx] as IRow<Column>).parentGid]; return index; } private prevCaptionCount(prevCapRow: Row<Column>): number { const page: number = this.parent.pageSettings.currentPage; const cache: Row<Column>[] = this.groupCache[page]; let idx: number = 0; for (let i: number = cache.indexOf(prevCapRow) - 1; i >= 0; i--) { if (cache[i].indent === 0) { break; } if (cache[i].indent < prevCapRow.indent) { break; } if (cache[i].isCaptionRow && cache[i].indent === prevCapRow.indent) { const count: number = this.captionCounts[page][cache[i].uid]; idx = idx + (count ? count : (cache[i].data as GroupedData).count); } } const capRow: Row<Column> = this.getRowByUid(prevCapRow.parentUid); if (capRow) { idx = idx + this.prevCaptionCount(capRow); } return idx; } private setStartIndexes(): void { const cache: Row<Column>[] = this.groupCache[this.parent.pageSettings.currentPage]; if (!this.startIndexes[this.parent.pageSettings.currentPage]) { const indexes: number[] = []; let idx: number; for (let i: number = 0; i < cache.length; i++) { if (cache[i].isCaptionRow) { if (!indexes.length) { indexes.push(0); } else { indexes.push((cache[idx].data as GroupedData).count + indexes[indexes.length - 1]); } idx = i; } } this.startIndexes[this.parent.pageSettings.currentPage] = indexes; } } private hasLastChildRow(isScroll: boolean, captionCount: number, rowCount: number): boolean { return isScroll ? captionCount === this.childCount + rowCount : captionCount === rowCount; } private refreshCaptionRowCount(row: Row<Column>, count: number): void { (row.data as GroupedData).count = count; } private render(trIdx: number, rows: Row<Column>[], hasLastChildRow: boolean, aggregates: Row<Column>[]): void { const tr: HTMLElement = this.parent.getContent().querySelectorAll('tr')[trIdx]; const scrollEle: Element = this.parent.getContent().firstElementChild; const rowHeight: number = this.parent.getRowHeight(); if (tr && aggregates.length) { for (let i: number = aggregates.length - 1; i >= 0; i--) { tr.insertAdjacentElement('afterend', this.rowRenderer.render(aggregates[i], this.parent.getColumns())); } } if (tr && rows.length) { for (let i: number = rows.length - 1; i >= 0; i--) { if (this.confirmRowRendering(rows[i])) { tr.insertAdjacentElement('afterend', this.rowRenderer.render(rows[i], this.parent.getColumns())); if (this.isScrollDown) { scrollEle.scrollTop = scrollEle.scrollTop - rowHeight; } if (this.isScrollUp) { scrollEle.scrollTop = scrollEle.scrollTop + rowHeight; } } } } this.isScrollDown = false; this.isScrollUp = false; } /** * @param {Row<Column>} row - specifies the row * @param {number} index - specifies the index * @returns {void} * @hidden */ public maintainRows(row: Row<Column>, index?: number): void { const page: number = this.parent.pageSettings.currentPage; if (!this.rowsByUid[page]) { this.rowsByUid[page] = {} as Row<Column>[]; this.objIdxByUid[page] = {} as Row<Column>[]; } if (row.uid) { this.rowsByUid[page][row.uid] = row; } this.objIdxByUid[page][row.uid] = index; } private confirmRowRendering(row: Row<Column>): boolean { let check: boolean = true; if (isNullOrUndefined(row.indent) && !row.isDataRow && !row.isCaptionRow) { const cap: Row<Column> = this.getRowByUid(row.parentUid); if (cap.isCaptionRow && !cap.isExpand) { check = false; } } return check; } private refreshRowObjects(newRows: Row<Column>[], index: number): void { const page: number = this.parent.pageSettings.currentPage; const rowsObject: Row<Column>[] = this.groupCache[page]; this.rowsByUid[page] = {} as Row<Column>[]; this.objIdxByUid[page] = {} as Row<Column>[]; const newRowsObject: Row<Column>[] = []; let k: number = 0; for (let i: number = 0; i < rowsObject.length; i++) { if (i === index) { this.maintainRows(rowsObject[i], k); newRowsObject.push(rowsObject[i]); k++; for (let j: number = 0; j < newRows.length; j++) { this.maintainRows(newRows[j], k); newRowsObject.push(newRows[j]); k++; } } else { this.maintainRows(rowsObject[i], k); newRowsObject.push(rowsObject[i]); k++; } } this.groupCache[this.parent.pageSettings.currentPage] = extend([], newRowsObject) as Row<Column>[]; this.updateCurrentViewData(); } private getAggregateByCaptionIndex(index: number): Row<Column>[] { const cache: Row<Column>[] = this.groupCache[this.parent.pageSettings.currentPage]; const parent: Row<Column> = cache[index]; const indent: number = parent.indent; const uid: string = parent.uid; const agg: Row<Column>[] = []; for (let i: number = index + 1; i < cache.length; i++) { if (cache[i].indent === indent) { break; } if (isNullOrUndefined(cache[i].indent) && cache[i].parentUid === uid) { agg.push(cache[i]); } } return agg; } private getChildRowsByParentIndex( index: number, deep?: boolean, block?: boolean, data?: Row<Column>[], includeAgg?: boolean, includeCollapseAgg?: boolean ): Row<Column>[] { const cache: Row<Column>[] = data ? data : this.groupCache[this.parent.pageSettings.currentPage]; const parentRow: Row<Column> = cache[index]; let agg: Row<Column>[] = []; if (!parentRow.isCaptionRow || (parentRow.isCaptionRow && !parentRow.isExpand && !includeCollapseAgg)) { return []; } if (includeAgg && this.parent.aggregates.length) { agg = this.getAggregateByCaptionIndex(index); } const indent: number = parentRow.indent; const uid: string = parentRow.uid; let rows: Row<Column>[] = []; let count: number = 0; for (let i: number = index + 1; i < cache.length; i++) { if (cache[i].parentUid === uid) { if (isNullOrUndefined(cache[i].indent)) { continue; } count++; rows.push(cache[i]); if (deep && cache[i].isCaptionRow) { rows = rows.concat(this.getChildRowsByParentIndex(i, deep, block, data, includeAgg)); } if (block && count === this.pageSize) { break; } } if (cache[i].indent === indent) { break; } } return rows.concat(agg); } /** * @param {boolean} isReorder - specifies the isreorder * @returns {Row<Column>[]} returns the row * @hidden */ public initialGroupRows(isReorder?: boolean): Row<Column>[] { let rows: Row<Column>[] = []; const cache: Row<Column>[] = this.groupCache[this.parent.pageSettings.currentPage]; if (isReorder) { return this.getRenderedRowsObject(); } for (let i: number = 0; i < cache.length; i++) { if (cache[i].indent === 0) { rows.push(cache[i]); rows = rows.concat(this.getChildRowsByParentIndex(i, true, true, cache, true)); } } return rows; } /** * @returns {Row<Column>[]} retruns the row * @hidden */ public getRenderedRowsObject(): Row<Column>[] { const rows: Row<Column>[] = []; const trs: HTMLTableRowElement[] = [].slice.call(this.parent.getContent().querySelectorAll('tr')); for (let i: number = 0; i < trs.length; i++) { rows.push(this.getRowByUid(trs[i].getAttribute('data-uid'))); } return rows; } private getCacheRowsOnDownScroll(index: number): Row<Column>[] { let rows: Row<Column>[] = []; const rowsObject: Row<Column>[] = this.groupCache[this.parent.pageSettings.currentPage]; let k: number = index; for (let i: number = 0; i < this.pageSize; i++) { if (!rowsObject[k] || rowsObject[k].indent < rowsObject[index].indent) { break; } if (rowsObject[k].indent === rowsObject[index].indent) { rows.push(rowsObject[k]); if (rowsObject[k].isCaptionRow && rowsObject[k].isExpand) { rows = rows.concat(this.getChildRowsByParentIndex(k, true, true, null, true)); } } if (rowsObject[k].indent > rowsObject[index].indent || isNullOrUndefined(rowsObject[k].indent)) { i--; } k++; } return rows; } private getCacheRowsOnUpScroll(start: string, end: string, index: number): Row<Column>[] { let rows: Row<Column>[] = []; const rowsObject: Row<Column>[] = this.groupCache[this.parent.pageSettings.currentPage]; let str: boolean = false; for (let i: number = 0; i < rowsObject.length; i++) { if (str && (!rowsObject[i] || rowsObject[i].indent < rowsObject[index].indent || rowsObject[i].uid === end)) { break; } if (!str && rowsObject[i].uid === start) { str = true; } if (str && rowsObject[i].indent === rowsObject[index].indent) { rows.push(rowsObject[i]); if (rowsObject[i].isCaptionRow && rowsObject[i].isExpand) { rows = rows.concat(this.getChildRowsByParentIndex(i, true, true, null, true)); } } } return rows; } private scrollHandler(e: { scrollDown: boolean }): void { if (this.parent.isDestroyed || this.childCount) { return; } const downTrs: Element[] = [].slice.call(this.parent.getContent().getElementsByClassName('e-lazyload-middle-down')); const upTrs: Element[] = [].slice.call(this.parent.getContent().getElementsByClassName('e-lazyload-middle-up')); const endTrs: Element[] = [].slice.call(this.parent.getContent().getElementsByClassName('e-not-lazyload-end')); let tr: Element; let lazyLoadDown: boolean = false; let lazyLoadUp: boolean = false; let lazyLoadEnd: boolean = false; if (e.scrollDown && downTrs.length) { const result: { entered: boolean, tr: Element } = this.findRowElements(downTrs); tr = result.tr; lazyLoadDown = result.entered; } if (!e.scrollDown && endTrs) { for (let i: number = 0; i < endTrs.length; i++) { const top: number = endTrs[i].getBoundingClientRect().top; const scrollHeight: number = this.parent.getContent().scrollHeight; if (top > 0 && top < scrollHeight) { tr = endTrs[i]; lazyLoadEnd = true; this.rowIndex = (tr as HTMLTableRowElement).rowIndex; break; } } } if (!e.scrollDown && upTrs.length && !lazyLoadEnd) { const result: { entered: boolean, tr: Element } = this.findRowElements(upTrs); tr = result.tr; lazyLoadUp = result.entered; } if (tr) { if (lazyLoadDown && e.scrollDown && lazyLoadDown && tr) { this.scrollDownHandler(tr); } if (!e.scrollDown && lazyLoadEnd && tr) { this.scrollUpEndRowHandler(tr); } if (this.cacheMode && !e.scrollDown && !lazyLoadEnd && lazyLoadUp && tr) { this.scrollUpHandler(tr); } } } private scrollUpEndRowHandler(tr: Element): void { const page: number = this.parent.pageSettings.currentPage; const rows: Row<Column>[] = this.groupCache[page]; const uid: string = tr.getAttribute('data-uid'); let index: number = this.rowObjectIndex = this.getRowObjectIndexByUid(uid); const idx: number = index; const childRow: Row<Column> = rows[index]; const parentCapRow: Row<Column> = this.getRowByUid(childRow.parentUid); const capRowObjIdx: number = this.getRowObjectIndexByUid(parentCapRow.uid); const captionRowEle: Element = this.parent.getContent().querySelector('tr[data-uid=' + parentCapRow.uid + ']'); const capRowEleIndex: number = (captionRowEle as HTMLTableRowElement).rowIndex; const child: Row<Column>[] = this.getChildRowsByParentIndex(capRowObjIdx); const childIdx: number = child.indexOf(childRow); const currentPage: number = Math.ceil(childIdx / this.pageSize); if (currentPage === 1) { return; } this.childCount = currentPage * this.pageSize; index = this.getCurrentBlockEndIndex(childRow, index); if (this.childCount < (parentCapRow.data as GroupedData).count) { tr.classList.remove('e-not-lazyload-end'); childRow.lazyLoadCssClass = ''; const isRowExist: boolean = rows[index + 1] ? childRow.indent === rows[index + 1].indent : false; this.scrollData = isRowExist ? this.getCacheRowsOnDownScroll(index + 1) : []; const key: { fields: string[], keys: string[] } = getGroupKeysAndFields(capRowObjIdx, rows); const args: LazyLoadGroupArgs = { rowIndex: capRowEleIndex, makeRequest: !isRowExist, groupInfo: parentCapRow, fields: key.fields, keys: key.keys, skip: this.childCount, take: this.pageSize, isScroll: true }; if (this.cacheMode && this.childCount >= (this.pageSize * this.cacheBlockSize)) { const child: Row<Column>[] = this.getChildRowsByParentIndex(capRowObjIdx); const currenBlock: number = Math.ceil((child.indexOf(rows[idx]) / this.pageSize)); const removeBlock: number = currenBlock - (this.cacheBlockSize - 1); this.removeBlock(uid, isRowExist, removeBlock, child); args.cachedRowIndex = (removeBlock * this.pageSize); } this.captionRowExpand(args); } else { this.childCount = 0; } } private scrollDownHandler(tr: Element): void { const page: number = this.parent.pageSettings.currentPage; const rows: Row<Column>[] = this.groupCache[page]; const uid: string = tr.getAttribute('data-uid'); let index: number = this.getRowObjectIndexByUid(uid); const idx: number = index; const childRow: Row<Column> = rows[index]; const parentCapRow: Row<Column> = this.getRowByUid(childRow.parentUid); const capRowObjIdx: number = this.getRowObjectIndexByUid(parentCapRow.uid); const captionRowEle: Element = this.getRowElementByUid(parentCapRow.uid); const capRowEleIndex: number = (captionRowEle as HTMLTableRowElement).rowIndex; const child: Row<Column>[] = this.getChildRowsByParentIndex(capRowObjIdx); const childIdx: number = child.indexOf(childRow); const currentPage: number = Math.ceil(childIdx / this.pageSize); this.childCount = currentPage * this.pageSize; index = this.rowObjectIndex = this.getRowObjectIndexByUid(child[this.childCount - 1].uid); const lastchild: Row<Column> = rows[index]; const lastRow: HTMLTableRowElement = this.getRowElementByUid(lastchild.uid); this.rowIndex = lastRow.rowIndex; index = this.getCurrentBlockEndIndex(lastchild, index); if (this.childCount < (parentCapRow.data as GroupedData).count) { const isRowExist: boolean = rows[index + 1] ? childRow.indent === rows[index + 1].indent : false; if (isRowExist && !isNullOrUndefined(this.getRowElementByUid(rows[index + 1].uid))) { this.childCount = 0; return; } if (currentPage > 1 || !this.cacheMode) { tr.classList.remove('e-lazyload-middle-down'); lastRow.classList.remove('e-not-lazyload-end'); lastchild.lazyLoadCssClass = ''; } this.scrollData = isRowExist ? this.getCacheRowsOnDownScroll(this.rowObjectIndex + 1) : []; const query: { fields: string[], keys: string[] } = getGroupKeysAndFields(capRowObjIdx, rows); const args: LazyLoadGroupArgs = { rowIndex: capRowEleIndex, makeRequest: !isRowExist, groupInfo: parentCapRow, fields: query.fields, keys: query.keys, skip: this.childCount, take: this.pageSize, isScroll: true }; if (this.cacheMode && (this.childCount - this.pageSize) >= (this.pageSize * this.cacheBlockSize)) { this.isScrollDown = true; const child: Row<Column>[] = this.getChildRowsByParentIndex(capRowObjIdx); const currenBlock: number = Math.ceil((child.indexOf(rows[idx]) / this.pageSize)) - 1; const removeBlock: number = (currenBlock - (this.cacheBlockSize - 1)) + 1; this.removeBlock(uid, isRowExist, removeBlock, child, lastchild); args.cachedRowIndex = (removeBlock * this.pageSize); } this.captionRowExpand(args); } else { this.childCount = 0; } } private getCurrentBlockEndIndex(row: Row<Column>, index: number): number { const page: number = this.parent.pageSettings.currentPage; const rows: Row<Column>[] = this.groupCache[page]; if (row.isCaptionRow) { if (row.isExpand) { const childCount: number = this.getChildRowsByParentIndex(index, true).length; this.rowIndex = this.rowIndex + childCount; } const agg: Row<Column>[] = this.getAggregateByCaptionIndex(index); this.rowObjectIndex = this.rowObjectIndex + agg.length; let idx: number = index; for (let i: number = idx + 1; i < rows.length; i++) { if (rows[i].indent === rows[index].indent || rows[i].indent < rows[index].indent) { index = idx; break; } else { idx++; } } } return index; } private removeBlock(uid: string, isRowExist: boolean, removeBlock: number, child: Row<Column>[], lastchild?: Row<Column>): void { const page: number = this.parent.pageSettings.currentPage; const rows: Row<Column>[] = this.groupCache[page]; const uid1: string = child[(((removeBlock + 1) * this.pageSize) - 1) - this.blockSize].uid; const uid2: string = child[(removeBlock * this.pageSize) - this.pageSize].uid; const uid3: string = child[(removeBlock * this.pageSize)].uid; const firstIdx: number = this.getRowObjectIndexByUid(uid1); rows[firstIdx].lazyLoadCssClass = 'e-lazyload-middle-up'; this.getRowElementByUid(uid1).classList.add('e-lazyload-middle-up'); if (lastchild) { this.getRowElementByUid(uid3).classList.add('e-not-lazyload-first'); this.getRowByUid(uid3).lazyLoadCssClass = 'e-not-lazyload-first'; this.getRowByUid(uid2).lazyLoadCssClass = ''; } if (isRowExist) { this.removeTopRows(lastchild ? lastchild.uid : uid, uid2, uid3); } else { this.uid1 = uid2; this.uid2 = uid3; this.uid3 = lastchild ? lastchild.uid : uid; } } private scrollUpHandler(tr: Element): void { const page: number = this.parent.pageSettings.currentPage; const rows: Row<Column>[] = this.groupCache[page]; const uid: string = tr.getAttribute('data-uid'); const row: IRow<Column> = this.getRowByUid(uid); const index: number = this.rowObjectIndex = this.getRowObjectIndexByUid(uid); const parentCapRow: Row<Column> = this.getRowByUid(row.parentUid); const capRowObjIdx: number = this.rowIndex = this.getRowObjectIndexByUid(parentCapRow.uid); const captionRowEle: Element = this.parent.getRowElementByUID(parentCapRow.uid) as HTMLTableRowElement; const capRowEleIndex: number = (captionRowEle as HTMLTableRowElement).rowIndex; const child: Row<Column>[] = this.getChildRowsByParentIndex(capRowObjIdx); const childIdx: number = child.indexOf(rows[index]); const currenBlock: number = Math.floor((childIdx / this.pageSize)); let idx: number = this.blockSize; if ((this.blockSize * 2) > this.pageSize) { idx = (this.blockSize * 2) - this.pageSize; idx = this.blockSize - idx; } const start: string = child[(childIdx - (idx - 1)) - this.pageSize].uid; const end: string = child[childIdx - (idx - 1)].uid; this.scrollData = this.getCacheRowsOnUpScroll(start, end, index - (idx - 1)); this.isFirstChildRow = currenBlock > 1; if (this.isFirstChildRow) { this.scrollData[0].lazyLoadCssClass = 'e-not-lazyload-first'; } this.getRowByUid(end).lazyLoadCssClass = ''; this.getRowElementByUid(end).classList.remove('e-not-lazyload-first'); const removeBlock: number = currenBlock + this.cacheBlockSize; if (child.length !== (parentCapRow.data as GroupedData).count && (removeBlock * this.pageSize > child.length)) { this.isFirstChildRow = false; this.scrollData[0].lazyLoadCssClass = ''; this.getRowElementByUid(end).classList.add('e-not-lazyload-first'); return; } const count: number = removeBlock * this.pageSize > (parentCapRow.data as GroupedData).count ? (parentCapRow.data as GroupedData).count : removeBlock * this.pageSize; const size: number = removeBlock * this.pageSize > (parentCapRow.data as GroupedData).count ? (this.pageSize - ((this.pageSize * removeBlock) - (parentCapRow.data as GroupedData).count)) : this.pageSize; const childRows: Row<Column>[] = this.getChildRowsByParentIndex(rows.indexOf(child[count - 1]), true, false, null, true); const uid1: string = childRows.length ? childRows[childRows.length - 1].uid : child[(count - 1)].uid; const uid2: string = child[count - size].uid; const uid3: string = child[(count - size) - 1].uid; const lastIdx: number = this.objIdxByUid[page][uid2] - idx; if (rows[lastIdx].lazyLoadCssClass === 'e-lazyload-middle-down') { const trEle: Element = this.getRowElementByUid(rows[lastIdx].uid); if (trEle) { trEle.classList.add('e-lazyload-middle-down'); } } this.getRowByUid(uid1).lazyLoadCssClass = ''; this.getRowByUid(uid3).lazyLoadCssClass = 'e-not-lazyload-end'; this.getRowElementByUid(uid3).classList.add('e-not-lazyload-end'); this.removeBottomRows(uid1, uid2, uid3); this.rowIndex = (tr as HTMLTableRowElement).rowIndex - idx; if (tr.classList.length > 1) { tr.classList.remove('e-lazyload-middle-up'); } else { tr.removeAttribute('class'); } if (!isNullOrUndefined(this.getRowElementByUid(start))) { this.childCount = 0; this.scrollData = []; return; } const key: { fields: string[], keys: string[] } = getGroupKeysAndFields(this.getRowObjectIndexByUid(parentCapRow.uid), rows); const args: LazyLoadGroupArgs = { rowIndex: capRowEleIndex, makeRequest: false, groupInfo: parentCapRow, fields: key.fields, keys: key.keys, skip: this.childCount, take: this.pageSize, isScroll: true, scrollUp: true }; this.isScrollUp = true; this.captionRowExpand(args); } private findRowElements(rows: Element[]): { entered: boolean, tr: Element } { let entered: boolean = false; let tr: Element; for (let i: number = 0; i < rows.length; i++) { const rowIdx: number = (rows[i] as HTMLTableRowElement).rowIndex; if (isRowEnteredInGrid(rowIdx, this.parent)) { entered = true; this.rowIndex = rowIdx; tr = rows[i]; break; } } return { entered, tr }; } private getRowElementByUid(uid: string): HTMLTableRowElement { return this.parent.getContent().querySelector('tr[data-uid=' + uid + ']'); } private removeTopRows(uid1: string, uid2: string, uid3: string): void { const trs: Element[] = [].slice.call(this.parent.getContent().querySelectorAll('tr')); let start: boolean = false; for (let i: number = 0; i < trs.length; i++) { if (trs[i].getAttribute('data-uid') === uid3) { const tr: HTMLTableRowElement = this.parent.getContent().querySelector('tr[data-uid=' + uid1 + ']') as HTMLTableRowElement; if (tr) { this.rowIndex = tr.rowIndex; } break; } if (trs[i].getAttribute('data-uid') === uid2) { start = true; } if (start) { remove(trs[i]); } } } // eslint-disable-next-line @typescript-eslint/no-unused-vars private removeBottomRows(uid1: string, uid2: string, uid3: string): void { const trs: Element[] = [].slice.call(this.parent.getContent().querySelectorAll('tr')); let trigger: boolean = false; for (let i: number = 0; i < trs.length; i++) { if (trs[i].getAttribute('data-uid') === uid2) { trigger = true; } if (trigger) { remove(trs[i]); if (trs[i].getAttribute('data-uid') === uid1) { break; } } } } private setCache(e?: { args: NotifyArgs, data: Row<Column>[] }): void { const page: number = this.parent.pageSettings.currentPage; this.groupCache[page] = this.initialGroupCaptions[page] = extend([], e.data) as Row<Column>[]; } private captionRowExpand(args: LazyLoadGroupArgs): void { const captionRow: Row<Column> = args.groupInfo; const level: number = this.parent.groupSettings.columns.indexOf((captionRow.data as GroupedData).field) + 1; const pred: Predicate = generateExpandPredicates(args.fields, args.keys, this); const predicateList: Predicate[] = getPredicates(pred); const lazyLoad: Object = { level: level, skip: args.skip, take: args.take, where: predicateList }; if (args.makeRequest) { const query: Query = this.parent.renderModule.data.generateQuery(true); if (!query.isCountRequired) { query.isCountRequired = true; } query.lazyLoad.push({ key: 'onDemandGroupInfo', value: lazyLoad }); this.parent.showSpinner(); this.parent.renderModule.data.getData({}, query).then((e: ReturnType) => { this.parent.hideSpinner(); if (e.result.length === 0) { return; } if (this.cacheMode && this.uid1 && this.uid2) { this.removeTopRows(this.uid3, this.uid1, this.uid2); this.uid1 = this.uid2 = this.uid3 = undefined; } this.lazyLoadHandler( { data: e.result, count: e.count, level: level, index: args.rowIndex, isRowExist: false, isScroll: args.isScroll, up: false, rowIndex: args.cachedRowIndex }); }) .catch((e: ReturnType) => this.parent.renderModule.dataManagerFailure(e, { requestType: 'grouping' })); } else { this.lazyLoadHandler( { data: null, count: (args.groupInfo.data as GroupedData).count, level: level, index: args.rowIndex, isRowExist: true, isScroll: args.isScroll, up: args.scrollUp, rowIndex: args.cachedRowIndex }); } } private scrollReset(top?: number): void { this.parent.getContent().firstElementChild.scrollTop = top ? this.parent.getContent().firstElementChild.scrollTop + top : 0; } private updateCurrentViewData(): void { const records: Object[] = []; this.getRows().filter((row: Row<Column>) => { if (row.isDataRow) { records[row.index] = row.data; } }); this.parent.currentViewData = records.length ? records : this.parent.currentViewData; } /** * @returns {Row<Column>[]} returns the row * @hidden */ public getGroupCache(): { [x: number]: Row<Column>[] } { return this.groupCache; } /** * @returns {Row<Column>[]} returns the row * @hidden */ public getRows(): Row<Column>[] { return this.groupCache[this.parent.pageSettings.currentPage] || []; } /** * @returns {Element} returns the element * @hidden */ public getRowElements(): Element[] { return [].slice.call(this.parent.getContent().getElementsByClassName(literals.row)); } /** * @param {number} index - specifies the index * @returns {Element} returns the element * @hidden */ public getRowByIndex(index: number): Element { const tr: Element[] = [].slice.call(this.parent.getContent().getElementsByClassName(literals.row)); let row: Element; for (let i: number = 0; !isNullOrUndefined(index) && i < tr.length; i++) { if (tr[i].getAttribute(literals.ariaRowIndex) === index.toString()) { row = tr[i]; break; } } return row; } /** * Tucntion to set the column visibility * * @param {Column[]} columns - specifies the column * @returns {void} * @hidden */ public setVisible(columns?: Column[]): void { const gObj: IGrid = this.parent; const rows: Row<Column>[] = this.getRows(); let testRow: Row<Column>; rows.some((r: Row<Column>) => { if (r.isDataRow) { testRow = r; } return r.isDataRow; }); const contentrows: Row<Column>[] = this.getRows().filter((row: Row<Column>) => !row.isDetailRow); for (let i: number = 0; i < columns.length; i++) { const column: Column = columns[i]; const idx: number = this.parent.getNormalizedColumnIndex(column.uid); const colIdx: number = this.parent.getColumnIndexByUid(column.uid); const displayVal: string = column.visible === true ? '' : 'none'; if (idx !== -1 && testRow && idx < testRow.cells.length) { setStyleAttribute(<HTMLElement>this.getColGroup().childNodes[idx], { 'display': displayVal }); } this.setDisplayNone(gObj.getDataRows(), colIdx, displayVal, contentrows, idx); if (!this.parent.invokedFromMedia && column.hideAtMedia) { this.parent.updateMediaColumns(column); } this.parent.invokedFromMedia = false; } } /** * Function to set display. * * @param {Object} tr - specifies the row object * @param {number} idx - specifies the index * @param {string} displayVal - specifies the display value * @param {Row<Column>[]} rows - specifies the array of rows * @param {number} oriIdx - specifies the index * @returns {void} * @hidden */ public setDisplayNone(tr: Object, idx: number, displayVal: string, rows: Row<Column>[], oriIdx?: number): void { if (!this.parent.groupSettings.columns.length) { setDisplayValue(tr, idx, displayVal, rows); } else { const keys: string[] = Object.keys(this.groupCache); for (let j: number = 0; j < keys.length; j++) { const uids: Row<Column>[] = this.rowsByUid[keys[j]] as Row<Column>[]; const idxs: string[] = Object.keys(uids); for (let i: number = 0; i < idxs.length; i++) { const tr: HTMLTableRowElement = this.parent.getContent().querySelector('tr[data-uid=' + idxs[i] + ']'); const row: Row<Column> = uids[idxs[i]]; if (row.isCaptionRow) { if (!this.captionModelGen.isEmpty()) { this.changeCaptionRow(row, tr, keys[j]); } else { row.cells[row.indent + 1].colSpan = displayVal === '' ? row.cells[row.indent + 1].colSpan + 1 : row.cells[row.indent + 1].colSpan - 1; if (tr) { tr.cells[row.indent + 1].colSpan = row.cells[row.indent + 1].colSpan; } } } if (row.isDataRow) { this.showAndHideCells(tr, idx, displayVal, false); row.cells[oriIdx].visible = displayVal === '' ? true : false; } if (!row.isCaptionRow && !row.isDataRow && isNullOrUndefined(row.indent)) { row.cells[oriIdx].visible = displayVal === '' ? true : false; row.visible = row.cells.some((cell: Cell<AggregateColumnModel>) => cell.isDataCell && cell.visible); this.showAndHideCells(tr, idx, displayVal, true, row); } } } } } private changeCaptionRow(row: Row<Column>, tr: HTMLTableRowElement, index: string): void { const capRow: IRow<Column> = row; const captionData: GroupedData = row.data as GroupedData; const data: Row<Column> = this.groupGenerator.generateCaptionRow( captionData, capRow.indent, capRow.parentGid, undefined, capRow.tIndex, capRow.parentUid ); data.uid = row.uid; data.isExpand = row.isExpand; data.lazyLoadCssClass = row.lazyLoadCssClass; this.rowsByUid[index][row.uid] = data; this.groupCache[index][this.objIdxByUid[index][row.uid]] = data; if (tr) { const tbody: Element = this.parent.getContentTable().querySelector( literals.tbody); tbody.replaceChild(this.rowRenderer.render(data, this.parent.getColumns()), tr); } } private showAndHideCells(tr: HTMLTableRowElement, idx: number, displayVal: string, isSummary: boolean, row?: Row<Column>): void { if (tr) { const cls: string = isSummary ? 'td.e-summarycell' : 'td.e-rowcell'; setStyleAttribute(tr.querySelectorAll(cls)[idx] as HTMLElement, { 'display': displayVal }); if (tr.querySelectorAll(cls)[idx].classList.contains('e-hide')) { removeClass([tr.querySelectorAll(cls)[idx]], ['e-hide']); } if (isSummary) { if (row.visible && tr.classList.contains('e-hide')) { removeClass([tr], ['e-hide']); } else if (!row.visible) { addClass([tr], ['e-hide']); } } } } }
the_stack
import { expect } from '../../../setup' /* External Imports */ import { ethers } from 'hardhat' import { Signer, ContractFactory, Contract } from 'ethers' import { smockit, MockContract } from '@eth-optimism/smock' import { AppendSequencerBatchParams, encodeAppendSequencerBatch, } from '@eth-optimism/core-utils' import { TransactionResponse } from '@ethersproject/abstract-provider' import { keccak256 } from 'ethers/lib/utils' import _ from 'lodash' /* Internal Imports */ import { makeAddressManager, setProxyTarget, L2_GAS_DISCOUNT_DIVISOR, ENQUEUE_GAS_COST, setEthTime, NON_ZERO_ADDRESS, getEthTime, getNextBlockNumber, } from '../../../helpers' import { names } from '../../../../src/address-names' const ELEMENT_TEST_SIZES = [1, 2, 4, 8, 16] const MAX_GAS_LIMIT = 8_000_000 const getTransactionHash = ( sender: string, target: string, gasLimit: number, data: string ): string => { return keccak256(encodeQueueTransaction(sender, target, gasLimit, data)) } const encodeQueueTransaction = ( sender: string, target: string, gasLimit: number, data: string ): string => { return ethers.utils.defaultAbiCoder.encode( ['address', 'address', 'uint256', 'bytes'], [sender, target, gasLimit, data] ) } const appendSequencerBatch = async ( CanonicalTransactionChain: Contract, batch: AppendSequencerBatchParams ): Promise<TransactionResponse> => { const methodId = keccak256(Buffer.from('appendSequencerBatch()')).slice(2, 10) const calldata = encodeAppendSequencerBatch(batch) return CanonicalTransactionChain.signer.sendTransaction({ to: CanonicalTransactionChain.address, data: '0x' + methodId + calldata, }) } describe('CanonicalTransactionChain', () => { let addressManagerOwner: Signer let sequencer: Signer let otherSigner: Signer before(async () => { ;[addressManagerOwner, sequencer, otherSigner] = await ethers.getSigners() }) let AddressManager: Contract let Mock__StateCommitmentChain: MockContract before(async () => { AddressManager = await makeAddressManager() await AddressManager.setAddress( 'OVM_Sequencer', await sequencer.getAddress() ) Mock__StateCommitmentChain = await smockit( await ethers.getContractFactory('StateCommitmentChain') ) await setProxyTarget( AddressManager, 'StateCommitmentChain', Mock__StateCommitmentChain ) }) let Factory__CanonicalTransactionChain: ContractFactory let Factory__ChainStorageContainer: ContractFactory before(async () => { Factory__CanonicalTransactionChain = await ethers.getContractFactory( 'CanonicalTransactionChain' ) Factory__ChainStorageContainer = await ethers.getContractFactory( 'ChainStorageContainer' ) }) let CanonicalTransactionChain: Contract beforeEach(async () => { CanonicalTransactionChain = await Factory__CanonicalTransactionChain.deploy( AddressManager.address, MAX_GAS_LIMIT, L2_GAS_DISCOUNT_DIVISOR, ENQUEUE_GAS_COST ) const batches = await Factory__ChainStorageContainer.deploy( AddressManager.address, 'CanonicalTransactionChain' ) await AddressManager.setAddress( 'ChainStorageContainer-CTC-batches', batches.address ) await AddressManager.setAddress( names.managed.contracts.CanonicalTransactionChain, CanonicalTransactionChain.address ) }) describe('Gas param setters', () => { describe('setGasParams', async () => { it('should revert when not called by the Burn Admin', async () => { await expect( CanonicalTransactionChain.connect(otherSigner).setGasParams(60000, 32) ).to.be.revertedWith('Only callable by the Burn Admin.') }) it('should update the enqueueGasCost and enqueueL2GasPrepaid correctly', async () => { const newEnqueueGasCost = 31113 const newGasDivisor = 19 await CanonicalTransactionChain.connect( addressManagerOwner ).setGasParams(newGasDivisor, newEnqueueGasCost) await CanonicalTransactionChain.l2GasDiscountDivisor() const enqueueL2GasPrepaid = await CanonicalTransactionChain.enqueueL2GasPrepaid() expect(enqueueL2GasPrepaid).to.equal(newGasDivisor * newEnqueueGasCost) }) it('should emit an L2GasParamsUpdated event', async () => { await expect( CanonicalTransactionChain.connect(addressManagerOwner).setGasParams( 88, 31514 ) ).to.emit(CanonicalTransactionChain, 'L2GasParamsUpdated') }) }) }) describe('enqueue', () => { const target = NON_ZERO_ADDRESS const gasLimit = 500_000 it('should revert when trying to input more data than the max data size', async () => { const MAX_ROLLUP_TX_SIZE = await CanonicalTransactionChain.MAX_ROLLUP_TX_SIZE() const data = '0x' + '12'.repeat(MAX_ROLLUP_TX_SIZE + 1) await expect( CanonicalTransactionChain.enqueue(target, gasLimit, data, { gasLimit: 40000000, }) ).to.be.revertedWith( 'Transaction data size exceeds maximum for rollup transaction.' ) }) it('should revert when trying to enqueue a transaction with a higher gasLimit than the max', async () => { const data = '0x1234567890' await expect( CanonicalTransactionChain.enqueue(target, MAX_GAS_LIMIT + 1, data) ).to.be.revertedWith( 'Transaction gas limit exceeds maximum for rollup transaction.' ) }) it('should revert if gas limit parameter is not at least MIN_ROLLUP_TX_GAS', async () => { const MIN_ROLLUP_TX_GAS = await CanonicalTransactionChain.MIN_ROLLUP_TX_GAS() const customGasLimit = MIN_ROLLUP_TX_GAS / 2 const data = '0x' + '12'.repeat(1234) await expect( CanonicalTransactionChain.enqueue(target, customGasLimit, data) ).to.be.revertedWith('Transaction gas limit too low to enqueue.') }) it('should revert if transaction gas limit does not cover rollup burn', async () => { const _enqueueL2GasPrepaid = await CanonicalTransactionChain.enqueueL2GasPrepaid() const l2GasDiscountDivisor = await CanonicalTransactionChain.l2GasDiscountDivisor() const data = '0x' + '12'.repeat(1234) // Create a tx with high L2 gas limit, but insufficient L1 gas limit to cover burn. const l2GasLimit = 2 * _enqueueL2GasPrepaid // This l1GasLimit is equivalent to the gasToConsume amount calculated in the CTC. After // additional gas overhead, it will be enough trigger the gas burn, but not enough to cover // it. const l1GasLimit = (l2GasLimit - _enqueueL2GasPrepaid) / l2GasDiscountDivisor await expect( CanonicalTransactionChain.enqueue(target, l2GasLimit, data, { gasLimit: l1GasLimit, }) ).to.be.revertedWith('Insufficient gas for L2 rate limiting burn.') }) describe('with valid input parameters', () => { it('should emit a TransactionEnqueued event', async () => { const timestamp = (await getEthTime(ethers.provider)) + 100 const data = '0x' + '12'.repeat(1234) await setEthTime(ethers.provider, timestamp) await expect( CanonicalTransactionChain.enqueue(target, gasLimit, data) ).to.emit(CanonicalTransactionChain, 'TransactionEnqueued') }) describe('when enqueing multiple times', () => { const data = '0x' + '12'.repeat(1234) for (const size of ELEMENT_TEST_SIZES) { it(`should be able to enqueue ${size} elements`, async () => { for (let i = 0; i < size; i++) { await expect( CanonicalTransactionChain.enqueue(target, gasLimit, data) ).to.not.be.reverted } }) } }) }) describe('with _gaslimit below the enqueueL2GasPrepaid threshold', async () => { it('the cost to enqueue transactions is consistent for different L2 gas amounts below the prepaid threshold', async () => { const enqueueL2GasPrepaid = await CanonicalTransactionChain.enqueueL2GasPrepaid() const data = '0x' + '12'.repeat(1234) const l2GasLimit1 = enqueueL2GasPrepaid - 1 const l2GasLimit2 = enqueueL2GasPrepaid - 100 // The first enqueue is more expensive because it's writing to an empty slot, // so we need to pre-load the buffer or the test will fail. await CanonicalTransactionChain.enqueue( NON_ZERO_ADDRESS, l2GasLimit1, data ) const res1 = await CanonicalTransactionChain.enqueue( NON_ZERO_ADDRESS, l2GasLimit1, data ) const receipt1 = await res1.wait() const res2 = await CanonicalTransactionChain.enqueue( NON_ZERO_ADDRESS, l2GasLimit2, data ) const receipt2 = await res2.wait() expect(receipt1.gasUsed).to.equal(receipt2.gasUsed) }) }) }) describe('getQueueElement', () => { it('should revert when accessing a non-existent element', async () => { await expect( CanonicalTransactionChain.getQueueElement(0) ).to.be.revertedWith( 'reverted with panic code 0x32 (Array accessed at an out-of-bounds or negative index)' ) }) describe('when the requested element exists', () => { const target = NON_ZERO_ADDRESS const gasLimit = 500_000 const data = '0x' + '12'.repeat(1234) describe('when getting the first element', () => { for (const size of ELEMENT_TEST_SIZES) { it(`gets the element when ${size} + 1 elements exist`, async () => { const timestamp = (await getEthTime(ethers.provider)) + 100 const blockNumber = await getNextBlockNumber(ethers.provider) await setEthTime(ethers.provider, timestamp) const transactionHash = getTransactionHash( await addressManagerOwner.getAddress(), target, gasLimit, data ) await CanonicalTransactionChain.enqueue(target, gasLimit, data) for (let i = 0; i < size; i++) { await CanonicalTransactionChain.enqueue( target, gasLimit, '0x' + '12'.repeat(i + 1) ) } expect( _.toPlainObject( await CanonicalTransactionChain.getQueueElement(0) ) ).to.deep.include({ transactionHash, timestamp, blockNumber, }) }) } }) describe('when getting the middle element', () => { for (const size of ELEMENT_TEST_SIZES) { it(`gets the element when ${size} elements exist`, async () => { let timestamp: number let blockNumber: number let transactionHash: string const middleIndex = Math.floor(size / 2) for (let i = 0; i < size; i++) { if (i === middleIndex) { timestamp = (await getEthTime(ethers.provider)) + 100 blockNumber = await getNextBlockNumber(ethers.provider) await setEthTime(ethers.provider, timestamp) transactionHash = getTransactionHash( await addressManagerOwner.getAddress(), target, gasLimit, data ) await CanonicalTransactionChain.enqueue(target, gasLimit, data) } else { await CanonicalTransactionChain.enqueue( target, gasLimit, '0x' + '12'.repeat(i + 1) ) } } expect( _.toPlainObject( await CanonicalTransactionChain.getQueueElement(middleIndex) ) ).to.deep.include({ transactionHash, timestamp, blockNumber, }) }) } }) describe('when getting the last element', () => { for (const size of ELEMENT_TEST_SIZES) { it(`gets the element when ${size} elements exist`, async () => { let timestamp: number let blockNumber: number let transactionHash: string for (let i = 0; i < size; i++) { if (i === size - 1) { timestamp = (await getEthTime(ethers.provider)) + 100 blockNumber = await getNextBlockNumber(ethers.provider) await setEthTime(ethers.provider, timestamp) transactionHash = getTransactionHash( await addressManagerOwner.getAddress(), target, gasLimit, data ) await CanonicalTransactionChain.enqueue(target, gasLimit, data) } else { await CanonicalTransactionChain.enqueue( target, gasLimit, '0x' + '12'.repeat(i + 1) ) } } expect( _.toPlainObject( await CanonicalTransactionChain.getQueueElement(size - 1) ) ).to.deep.include({ transactionHash, timestamp, blockNumber, }) }) } }) }) }) describe('appendSequencerBatch', () => { beforeEach(() => { CanonicalTransactionChain = CanonicalTransactionChain.connect(sequencer) }) it('should revert if expected start does not match current total batches', async () => { await expect( appendSequencerBatch(CanonicalTransactionChain, { transactions: ['0x1234'], contexts: [ { numSequencedTransactions: 0, numSubsequentQueueTransactions: 0, timestamp: 0, blockNumber: 0, }, ], shouldStartAtElement: 1234, totalElementsToAppend: 1, }) ).to.be.revertedWith( 'Actual batch start index does not match expected start index.' ) }) it('should revert if attempting to append more elements than are available in the queue.', async () => { await expect( appendSequencerBatch(CanonicalTransactionChain, { transactions: ['0x1234'], contexts: [ { numSequencedTransactions: 1, numSubsequentQueueTransactions: 1, timestamp: 0, blockNumber: 0, }, ], shouldStartAtElement: 0, totalElementsToAppend: 2, }) ).to.be.revertedWith( 'Attempted to append more elements than are available in the queue.' ) }) it('should revert if not called by the sequencer', async () => { await expect( appendSequencerBatch( CanonicalTransactionChain.connect(addressManagerOwner), { transactions: ['0x1234'], contexts: [ { numSequencedTransactions: 0, numSubsequentQueueTransactions: 0, timestamp: 0, blockNumber: 0, }, ], shouldStartAtElement: 0, totalElementsToAppend: 1, } ) ).to.be.revertedWith('Function can only be called by the Sequencer.') }) it('should emit the previous blockhash in the TransactionBatchAppended event', async () => { const timestamp = await getEthTime(ethers.provider) const currentBlockHash = await ( await ethers.provider.getBlock('latest') ).hash const blockNumber = await getNextBlockNumber(ethers.provider) const res = await appendSequencerBatch(CanonicalTransactionChain, { transactions: ['0x1234'], contexts: [ { numSequencedTransactions: 1, numSubsequentQueueTransactions: 0, timestamp, blockNumber, }, ], shouldStartAtElement: 0, totalElementsToAppend: 1, }) const receipt = await res.wait() // Because the res value is returned by a sendTransaction type, we need to manually // decode the logs. const eventArgs = ethers.utils.defaultAbiCoder.decode( ['uint256', 'bytes32', 'uint256', 'uint256', 'bytes'], receipt.logs[0].data ) await expect(eventArgs[0]).to.eq(currentBlockHash) }) for (const size of ELEMENT_TEST_SIZES) { const target = NON_ZERO_ADDRESS const gasLimit = 500_000 const data = '0x' + '12'.repeat(1234) describe(`Happy path: when appending ${size} sequencer transactions`, () => { describe('when not inserting queue elements in between', () => { describe('when using a single batch context', () => { let contexts: any[] let transactions: any[] beforeEach(async () => { const timestamp = (await getEthTime(ethers.provider)) - 100 const blockNumber = (await getNextBlockNumber(ethers.provider)) - 10 contexts = [ { numSequencedTransactions: size, numSubsequentQueueTransactions: 0, timestamp, blockNumber, }, ] transactions = [...Array(size)].map((el, idx) => { return '0x' + '12' + '34'.repeat(idx) }) }) it('should append the given number of transactions', async () => { await expect( appendSequencerBatch(CanonicalTransactionChain, { transactions, contexts, shouldStartAtElement: 0, totalElementsToAppend: size, }) ) .to.emit(CanonicalTransactionChain, 'SequencerBatchAppended') .withArgs(0, 0, size) }) }) }) describe('when inserting queue elements in between', () => { beforeEach(async () => { for (let i = 0; i < size; i++) { await CanonicalTransactionChain.enqueue(target, gasLimit, data) } }) describe('between every other sequencer transaction', () => { let contexts: any[] let transactions: any[] beforeEach(async () => { const timestamp = (await getEthTime(ethers.provider)) - 100 const blockNumber = (await getNextBlockNumber(ethers.provider)) - 50 contexts = [...Array(size)].map(() => { return { numSequencedTransactions: 1, numSubsequentQueueTransactions: 1, timestamp, blockNumber: Math.max(blockNumber, 0), } }) transactions = [...Array(size)].map((el, idx) => { return '0x' + '12' + '34'.repeat(idx) }) }) it('should append the batch', async () => { await expect( appendSequencerBatch(CanonicalTransactionChain, { transactions, contexts, shouldStartAtElement: 0, totalElementsToAppend: size * 2, }) ) .to.emit(CanonicalTransactionChain, 'SequencerBatchAppended') .withArgs(0, size, size * 2) }) }) const spacing = Math.max(Math.floor(size / 4), 1) describe(`between every ${spacing} sequencer transaction`, () => { let contexts: any[] let transactions: any[] beforeEach(async () => { const timestamp = (await getEthTime(ethers.provider)) - 100 const blockNumber = (await getNextBlockNumber(ethers.provider)) - 50 contexts = [...Array(spacing)].map(() => { return { numSequencedTransactions: size / spacing, numSubsequentQueueTransactions: 1, timestamp, blockNumber: Math.max(blockNumber, 0), } }) transactions = [...Array(size)].map((el, idx) => { return '0x' + '12' + '34'.repeat(idx) }) }) it('should append the batch', async () => { await expect( appendSequencerBatch(CanonicalTransactionChain, { transactions, contexts, shouldStartAtElement: 0, totalElementsToAppend: size + spacing, }) ) .to.emit(CanonicalTransactionChain, 'SequencerBatchAppended') .withArgs(0, spacing, size + spacing) }) }) }) }) } }) describe('getTotalElements', () => { it('should return zero when no elements exist', async () => { expect(await CanonicalTransactionChain.getTotalElements()).to.equal(0) }) for (const size of ELEMENT_TEST_SIZES) { describe(`when the sequencer inserts a batch of ${size} elements`, () => { beforeEach(async () => { const timestamp = (await getEthTime(ethers.provider)) - 100 const blockNumber = (await getNextBlockNumber(ethers.provider)) - 10 const contexts = [ { numSequencedTransactions: size, numSubsequentQueueTransactions: 0, timestamp, blockNumber: Math.max(blockNumber, 0), }, ] const transactions = [...Array(size)].map((el, idx) => { return '0x' + '12' + '34'.repeat(idx) }) const res = await appendSequencerBatch( CanonicalTransactionChain.connect(sequencer), { transactions, contexts, shouldStartAtElement: 0, totalElementsToAppend: size, } ) await res.wait() expect(await CanonicalTransactionChain.getLastTimestamp()).to.equal( timestamp ) expect(await CanonicalTransactionChain.getLastBlockNumber()).to.equal( blockNumber ) expect( await CanonicalTransactionChain.getNumPendingQueueElements() ).to.equal(0) }) it(`should return ${size}`, async () => { expect(await CanonicalTransactionChain.getTotalElements()).to.equal( size ) }) it('should return zero after queue is emptied', async () => { expect(await CanonicalTransactionChain.getNextQueueIndex()).to.equal( 0 ) }) }) } it('should return zero', async () => { expect(await CanonicalTransactionChain.getTotalBatches()).to.equal(0) }) }) })
the_stack
import initWx from '@/utils/wx'; import { connect } from 'dva'; import LS from 'parsec-ls'; import React, { RefObject } from 'react'; import CountUp from 'react-countup'; import router from 'umi/router'; const styles = require('./index.less'); const shareIcon = require('@/assets/share-icon.jpeg'); function isWeixn() { const ua = navigator.userAgent.toLowerCase(); return ua.includes('micromessenger'); } interface IProps { dispatch: any; result: { gender: number; score: number; }; } interface IResultState { mainDom?: RefObject<HTMLDivElement>; resultSrc: string; resultData: Array<{ minScore: number; maxScore: number; info: Array<{ gender: 0 | 1; title: string; src: string; imgSrc?: string; desc: string | React.ReactNode; msg: string | React.ReactNode; mapData: { mapSrc: string; time: number; distance: number; }; }>; }>; } @connect(({ result, loading }) => ({ result, loading: loading.models.result, })) class Result extends React.PureComponent<IProps, IResultState> { constructor(props) { super(props); this.state = { resultSrc: '', mainDom: React.createRef(), resultData: [ { minScore: 0, maxScore: 10, info: [ { gender: 0, title: '佛系小仙女', src: require('../../assets/result/3-2.png'), desc: ( <span> 态度要端正,姿势要优美 <br/> 马路上你是佛系小仙女 <br/> 开慢车不是技术不行,只是沉醉于沿途风景 </span> ), msg: ( <span> 驾驶ES8,你最适合一条 最美试驾路线 <br/> 尽享四季西子湖畔 </span> ), mapData: { mapSrc: require('../../assets/result/3-1.png'), time: 30, distance: 10 }, }, { gender: 1, title: '公路旅行家', src: require('../../assets/result/1-2.png'), desc: ( <span> 拒绝路怒症,路遇拥堵放首歌 <br/> 有车插队我让行,不闻马路喧嚣鸣笛 <br/> 只见窗外风景怡然 </span> ), msg: ( <span> 驾驶ES8,你最适合一条 最美试驾路线 <br/> 尽享四季西子湖畔 </span> ), mapData: { mapSrc: require('../../assets/result/1-1.png'), time: 30, distance: 10 }, }, ], }, { minScore: 11, maxScore: 15, info: [ { gender: 0, title: '公路哲学家', src: require('../../assets/result/4-2.png'), desc: ( <span> 对于驾驶总有自己的哲学思考 <br/> 身体和灵魂两个都要在路上 <br/> 不走寻常路是你的人生态度 </span> ), msg: ( <span> 驾驶ES8,你最适合一条品质小众路线 <br/> 深度体验黑科技 </span> ), mapData: { mapSrc: require('../../assets/result/4-1.png'), time: 40, distance: 24 }, }, { gender: 1, title: '公路极客', src: require('../../assets/result/6-2.png'), desc: ( <span> 朋友圈里人称活地图,另辟蹊径你最拿手 <br/> 不执著于速度的刺激 <br/> 而追求于科技带来的惊喜 </span> ), msg: ( <span> 驾驶ES8,你最适合一条 品质小众路线, <br/> 深度体验黑科技 </span> ), mapData: { mapSrc: require('../../assets/result/6-1.png'), time: 40, distance: 15 }, }, ], }, { minScore: 16, maxScore: 25, info: [ { gender: 0, title: '公路女王', src: require('../../assets/result/2-2.png'), desc: ( <span> 你主宰生活,也要主宰马路。谁说女司机不 <br/> 靠谱,开自己的路,让别人说去吧 <br/> Catch me,if you can </span> ), msg: ( <span> 驾驶ES8,你最适合一条极致体验路线 <br/> 百公里加速定义速度与激情 </span> ), mapData: { mapSrc: require('../../assets/result/2-1.png'), time: 30, distance: 8.3 }, }, { gender: 1, title: '天生赛车手', src: require('../../assets/result/5-2.png'), desc: ( <span> 出了停车场就是赛车场 <br/> 超车是你的态度,加速是你的风度 <br/> 一出发,再也没有慢下来的理由 </span> ), msg: ( <span> 驾驶ES8,你最适合一条极致体验路线 <br/> 百公里加速定义速度与激情 </span> ), mapData: { mapSrc: require('../../assets/result/5-1.png'), time: 30, distance: 8.3 }, }, ], }, ], }; } public componentDidMount(): void { const { dispatch } = this.props; dispatch({ type: 'result/fetch', }); } public render() { const { mainDom, resultSrc } = this.state; // @ts-ignore const { title, src, imgSrc, desc, msg, mapData: { mapSrc, time, distance } } = this.getResultData(); if (isWeixn() && !!title) { initWx({ title: `试驾蔚来ES8,我的驾驶人格是${title}`, imgUrl: shareIcon, isNeedLogin: true, desc: '试驾蔚来ES8,认识路上的自己', openid: process.env.NODE_ENV === 'development' ? 'oEgayjggrU06oORZJVeFUJ_KF1Mk' : undefined, }); } return ( <React.Fragment> <div className={styles.result} onTouchStart={() => { return false; }}> <div className={styles.domain} ref={mainDom}> <div className={styles.headerWrapper}> <img src={require('../../assets/logo.png')} alt="logo" className={styles.logo}/> <img src={src} alt="logo" className={styles.innerBg}/> <div className={styles['top-text-wrapper']}> <p className={styles.info}>你的驾驶人格是</p> <p className={styles.resultTitle}>{title}</p> </div> <div className={styles['result-desc']}>{desc}</div> </div> <div className={styles['map-wrapper']}> <div className={styles['map-box']}> <img src={mapSrc} alt="map"/> </div> <div className={styles['map-info']}> <div className={styles['info-item']}> <span>DISTANCE</span> <div className={styles.unit}> <CountUp start={0} end={distance} delay={1}/> km </div> </div> <div className={styles['info-item']}> <span>TIME</span> <div className={styles.unit}> <CountUp start={0} end={time} delay={1.3} onEnd={() => { this.handleCreate(); }} /> min </div> </div> </div> </div> <div className={styles['footer-text']}>{msg}</div> <div className={styles['footer-text-info']}> *结果页显示的试驾路线位于杭州,其他城市仅供示意 </div> <div className={styles['share-wrapper']}> <div className={styles['share-info']}> <p>长按保存图片分享好友</p> <p>(蔚来APP内请点击保存)</p> </div> <div className={styles.qrcode}> <img src={require('../../assets/1548314593.png')} alt=""/> </div> </div> </div> {resultSrc && <img src={resultSrc} alt="" style={{ width: '100%', objectFit: 'contain', pointerEvents: 'visible' }}/>} </div> </React.Fragment> ); } private getResultData = () => { const { resultData } = this.state; let { result: { score, gender }, } = this.props; if (LS.getObj('answers') === null || LS.get('gender') === null) { router.push('/'); return ''; } const answers = LS.getObj('answers'); gender = LS.getObj('gender'); score = 0; answers.map(answer => { return (score += answer.score); }); const obj = resultData.filter(item => item.minScore <= score && item.maxScore >= score)[0] || { info: [], }; let data: { minScore: number; maxScore: number; info: Array<{ gender: 0 | 1; title: string; src: string; imgSrc?: string; desc: string | React.ReactNode; msg: string | React.ReactNode; mapData: { mapSrc: string; time: number; distance: number; }; }> }; if (obj) { data = obj.info.filter(item => item.gender === gender)[0] || {}; } return data; }; /** * 生成图片 */ private handleCreate = () => { const { mainDom: { current: mainDom }, } = this.state; if (mainDom === null) { return; } let isCreacting = true; mainDom.style.display = 'block'; // @ts-ignore import('@/utils/html2canvas.min.js') .then(({ default: html2canvas }) => html2canvas(mainDom, { useCORS: true, allowTaint: true, // 允许加载跨域的图片 taintTest: true, // 检测每张图片都已经加载完成 height: mainDom.clientHeight, width: mainDom.clientWidth, backgroundColor: '#00263c', scale: 2, letterRendering: true, // 在设置了字间距的时候有用 logging: true, }), ) .then(canvas => { setTimeout(() => { if (isCreacting) { isCreacting = false; alert('您的手机版本过低,请升级至最新版本后重新打开。'); } }, 2000); const resultSrc = canvas.toDataURL('image/png'); console.log('resultSrc', resultSrc); this.setState({ resultSrc }, () => { mainDom.style.display = 'none'; isCreacting = false; }); }); }; } export default Result;
the_stack
import { assert } from "chai"; import { join } from "path"; import * as semver from "semver"; import { BisCoreSchema, BriefcaseDb, BriefcaseManager, ECSqlStatement, Element, ElementRefersToElements, ExternalSourceAspect, GenericSchema, IModelDb, IModelHost, IModelJsFs, IModelJsNative, NativeLoggerCategory, PhysicalModel, PhysicalObject, PhysicalPartition, SnapshotDb, SpatialCategory, } from "@itwin/core-backend"; import { ExtensiveTestScenario, HubMock, HubWrappers, IModelTestUtils, KnownTestLocations, TestUserType } from "@itwin/core-backend/lib/cjs/test"; import { AccessToken, DbResult, Guid, GuidString, Id64, Id64String, IModelStatus, Logger, LogLevel } from "@itwin/core-bentley"; import { Code, ColorDef, IModel, IModelVersion, PhysicalElementProps, SubCategoryAppearance } from "@itwin/core-common"; import { Point3d, YawPitchRollAngles } from "@itwin/core-geometry"; import { IModelExporter, IModelTransformer, TransformerLoggerCategory } from "../../core-transformer"; import { CountingIModelImporter, IModelToTextFileExporter, IModelTransformerTestUtils, TestIModelTransformer, TransformerExtensiveTestScenario as TransformerExtensiveTestScenario, } from "../IModelTransformerUtils"; describe("IModelTransformerHub", () => { const outputDir = join(KnownTestLocations.outputDir, "IModelTransformerHub"); let iTwinId: GuidString; let accessToken: AccessToken; before(async () => { HubMock.startup("IModelTransformerHub"); iTwinId = HubMock.iTwinId; IModelJsFs.recursiveMkDirSync(outputDir); accessToken = await HubWrappers.getAccessToken(TestUserType.Regular); // initialize logging if (false) { Logger.initializeToConsole(); Logger.setLevelDefault(LogLevel.Error); Logger.setLevel(TransformerLoggerCategory.IModelExporter, LogLevel.Trace); Logger.setLevel(TransformerLoggerCategory.IModelImporter, LogLevel.Trace); Logger.setLevel(TransformerLoggerCategory.IModelTransformer, LogLevel.Trace); Logger.setLevel(NativeLoggerCategory.Changeset, LogLevel.Trace); } }); after(() => HubMock.shutdown()); it("Transform source iModel to target iModel", async () => { // Create and push seed of source IModel const sourceIModelName = "TransformerSource"; const sourceSeedFileName = join(outputDir, `${sourceIModelName}.bim`); if (IModelJsFs.existsSync(sourceSeedFileName)) IModelJsFs.removeSync(sourceSeedFileName); const sourceSeedDb = SnapshotDb.createEmpty(sourceSeedFileName, { rootSubject: { name: "TransformerSource" } }); assert.isTrue(IModelJsFs.existsSync(sourceSeedFileName)); await ExtensiveTestScenario.prepareDb(sourceSeedDb); sourceSeedDb.saveChanges(); sourceSeedDb.close(); const sourceIModelId = await IModelHost.hubAccess.createNewIModel({ iTwinId, iModelName: sourceIModelName, description: "source", revision0: sourceSeedFileName, noLocks: true }); // Create and push seed of target IModel const targetIModelName = "TransformerTarget"; const targetSeedFileName = join(outputDir, `${targetIModelName}.bim`); if (IModelJsFs.existsSync(targetSeedFileName)) { IModelJsFs.removeSync(targetSeedFileName); } const targetSeedDb = SnapshotDb.createEmpty(targetSeedFileName, { rootSubject: { name: "TransformerTarget" } }); assert.isTrue(IModelJsFs.existsSync(targetSeedFileName)); await TransformerExtensiveTestScenario.prepareTargetDb(targetSeedDb); assert.isTrue(targetSeedDb.codeSpecs.hasName("TargetCodeSpec")); // inserted by prepareTargetDb targetSeedDb.saveChanges(); targetSeedDb.close(); const targetIModelId = await IModelHost.hubAccess.createNewIModel({ iTwinId, iModelName: targetIModelName, description: "target", revision0: targetSeedFileName, noLocks: true }); try { const sourceDb = await HubWrappers.downloadAndOpenBriefcase({ accessToken, iTwinId, iModelId: sourceIModelId }); const targetDb = await HubWrappers.downloadAndOpenBriefcase({ accessToken, iTwinId, iModelId: targetIModelId }); assert.isTrue(sourceDb.isBriefcaseDb()); assert.isTrue(targetDb.isBriefcaseDb()); assert.isFalse(sourceDb.isSnapshot); assert.isFalse(targetDb.isSnapshot); assert.isTrue(targetDb.codeSpecs.hasName("TargetCodeSpec")); // make sure prepareTargetDb changes were saved and pushed to iModelHub if (true) { // initial import ExtensiveTestScenario.populateDb(sourceDb); sourceDb.saveChanges(); await sourceDb.pushChanges({ accessToken, description: "Populate source" }); // Use IModelExporter.exportChanges to verify the changes to the sourceDb const sourceExportFileName: string = IModelTestUtils.prepareOutputFile("IModelTransformer", "TransformerSource-ExportChanges-1.txt"); assert.isFalse(IModelJsFs.existsSync(sourceExportFileName)); const sourceExporter = new IModelToTextFileExporter(sourceDb, sourceExportFileName); await sourceExporter.exportChanges(accessToken); assert.isTrue(IModelJsFs.existsSync(sourceExportFileName)); const sourceDbChanges: any = (sourceExporter.exporter as any)._sourceDbChanges; // access private member for testing purposes assert.exists(sourceDbChanges); // expect inserts and 1 update from populateSourceDb assert.isAtLeast(sourceDbChanges.codeSpec.insertIds.size, 1); assert.isAtLeast(sourceDbChanges.element.insertIds.size, 1); assert.isAtLeast(sourceDbChanges.aspect.insertIds.size, 1); assert.isAtLeast(sourceDbChanges.model.insertIds.size, 1); assert.equal(sourceDbChanges.model.updateIds.size, 1, "Expect the RepositoryModel to be updated"); assert.isTrue(sourceDbChanges.model.updateIds.has(IModel.repositoryModelId)); assert.isAtLeast(sourceDbChanges.relationship.insertIds.size, 1); // expect no other updates nor deletes from populateSourceDb assert.equal(sourceDbChanges.codeSpec.updateIds.size, 0); assert.equal(sourceDbChanges.codeSpec.deleteIds.size, 0); assert.equal(sourceDbChanges.element.updateIds.size, 0); assert.equal(sourceDbChanges.element.deleteIds.size, 0); assert.equal(sourceDbChanges.aspect.updateIds.size, 0); assert.equal(sourceDbChanges.aspect.deleteIds.size, 0); assert.equal(sourceDbChanges.model.deleteIds.size, 0); assert.equal(sourceDbChanges.relationship.updateIds.size, 0); assert.equal(sourceDbChanges.relationship.deleteIds.size, 0); const transformer = new TestIModelTransformer(sourceDb, targetDb); await transformer.processChanges(accessToken); transformer.dispose(); targetDb.saveChanges(); await targetDb.pushChanges({ accessToken, description: "Import #1" }); TransformerExtensiveTestScenario.assertTargetDbContents(sourceDb, targetDb); // Use IModelExporter.exportChanges to verify the changes to the targetDb const targetExportFileName: string = IModelTestUtils.prepareOutputFile("IModelTransformer", "TransformerTarget-ExportChanges-1.txt"); assert.isFalse(IModelJsFs.existsSync(targetExportFileName)); const targetExporter = new IModelToTextFileExporter(targetDb, targetExportFileName); await targetExporter.exportChanges(accessToken); assert.isTrue(IModelJsFs.existsSync(targetExportFileName)); const targetDbChanges: any = (targetExporter.exporter as any)._sourceDbChanges; // access private member for testing purposes assert.exists(targetDbChanges); // expect inserts and a few updates from transforming the result of populateSourceDb assert.isAtLeast(targetDbChanges.codeSpec.insertIds.size, 1); assert.isAtLeast(targetDbChanges.element.insertIds.size, 1); assert.isAtMost(targetDbChanges.element.updateIds.size, 1, "Expect the root Subject to be updated"); assert.isAtLeast(targetDbChanges.aspect.insertIds.size, 1); assert.isAtLeast(targetDbChanges.model.insertIds.size, 1); assert.isAtMost(targetDbChanges.model.updateIds.size, 1, "Expect the RepositoryModel to be updated"); assert.isTrue(targetDbChanges.model.updateIds.has(IModel.repositoryModelId)); assert.isAtLeast(targetDbChanges.relationship.insertIds.size, 1); // expect no other changes from transforming the result of populateSourceDb assert.equal(targetDbChanges.codeSpec.updateIds.size, 0); assert.equal(targetDbChanges.codeSpec.deleteIds.size, 0); assert.equal(targetDbChanges.element.deleteIds.size, 0); assert.equal(targetDbChanges.aspect.updateIds.size, 0); assert.equal(targetDbChanges.aspect.deleteIds.size, 0); assert.equal(targetDbChanges.model.deleteIds.size, 0); assert.equal(targetDbChanges.relationship.updateIds.size, 0); assert.equal(targetDbChanges.relationship.deleteIds.size, 0); } if (true) { // second import with no changes to source, should be a no-op const numTargetElements: number = count(targetDb, Element.classFullName); const numTargetExternalSourceAspects: number = count(targetDb, ExternalSourceAspect.classFullName); const numTargetRelationships: number = count(targetDb, ElementRefersToElements.classFullName); const targetImporter = new CountingIModelImporter(targetDb); const transformer = new TestIModelTransformer(sourceDb, targetImporter); await transformer.processChanges(accessToken); assert.equal(targetImporter.numModelsInserted, 0); assert.equal(targetImporter.numModelsUpdated, 0); assert.equal(targetImporter.numElementsInserted, 0); assert.equal(targetImporter.numElementsUpdated, 0); assert.equal(targetImporter.numElementsDeleted, 0); assert.equal(targetImporter.numElementAspectsInserted, 0); assert.equal(targetImporter.numElementAspectsUpdated, 0); assert.equal(targetImporter.numRelationshipsInserted, 0); assert.equal(targetImporter.numRelationshipsUpdated, 0); assert.equal(numTargetElements, count(targetDb, Element.classFullName), "Second import should not add elements"); assert.equal(numTargetExternalSourceAspects, count(targetDb, ExternalSourceAspect.classFullName), "Second import should not add aspects"); assert.equal(numTargetRelationships, count(targetDb, ElementRefersToElements.classFullName), "Second import should not add relationships"); transformer.dispose(); targetDb.saveChanges(); assert.isFalse(targetDb.nativeDb.hasPendingTxns()); await targetDb.pushChanges({ accessToken, description: "Should not actually push because there are no changes" }); } if (true) { // update source db, then import again ExtensiveTestScenario.updateDb(sourceDb); sourceDb.saveChanges(); await sourceDb.pushChanges({ accessToken, description: "Update source" }); // Use IModelExporter.exportChanges to verify the changes to the sourceDb const sourceExportFileName: string = IModelTestUtils.prepareOutputFile("IModelTransformer", "TransformerSource-ExportChanges-2.txt"); assert.isFalse(IModelJsFs.existsSync(sourceExportFileName)); const sourceExporter = new IModelToTextFileExporter(sourceDb, sourceExportFileName); await sourceExporter.exportChanges(accessToken); assert.isTrue(IModelJsFs.existsSync(sourceExportFileName)); const sourceDbChanges: any = (sourceExporter.exporter as any)._sourceDbChanges; // access private member for testing purposes assert.exists(sourceDbChanges); // expect some inserts from updateDb assert.equal(sourceDbChanges.codeSpec.insertIds.size, 0); assert.equal(sourceDbChanges.element.insertIds.size, 1); assert.equal(sourceDbChanges.aspect.insertIds.size, 0); assert.equal(sourceDbChanges.model.insertIds.size, 0); assert.equal(sourceDbChanges.relationship.insertIds.size, 2); // expect some updates from updateDb assert.isAtLeast(sourceDbChanges.element.updateIds.size, 1); assert.isAtLeast(sourceDbChanges.aspect.updateIds.size, 1); assert.isAtLeast(sourceDbChanges.model.updateIds.size, 1); assert.isAtLeast(sourceDbChanges.relationship.updateIds.size, 1); // expect some deletes from updateDb assert.isAtLeast(sourceDbChanges.element.deleteIds.size, 1); assert.equal(sourceDbChanges.relationship.deleteIds.size, 1); // don't expect other changes from updateDb assert.equal(sourceDbChanges.codeSpec.updateIds.size, 0); assert.equal(sourceDbChanges.codeSpec.deleteIds.size, 0); assert.equal(sourceDbChanges.aspect.deleteIds.size, 0); assert.equal(sourceDbChanges.model.deleteIds.size, 0); const transformer = new TestIModelTransformer(sourceDb, targetDb); await transformer.processChanges(accessToken); transformer.dispose(); targetDb.saveChanges(); await targetDb.pushChanges({ accessToken, description: "Import #2" }); ExtensiveTestScenario.assertUpdatesInDb(targetDb); // Use IModelExporter.exportChanges to verify the changes to the targetDb const targetExportFileName: string = IModelTestUtils.prepareOutputFile("IModelTransformer", "TransformerTarget-ExportChanges-2.txt"); assert.isFalse(IModelJsFs.existsSync(targetExportFileName)); const targetExporter = new IModelToTextFileExporter(targetDb, targetExportFileName); await targetExporter.exportChanges(accessToken); assert.isTrue(IModelJsFs.existsSync(targetExportFileName)); const targetDbChanges: any = (targetExporter.exporter as any)._sourceDbChanges; // access private member for testing purposes assert.exists(targetDbChanges); // expect some inserts from transforming the result of updateDb assert.equal(targetDbChanges.codeSpec.insertIds.size, 0); assert.equal(targetDbChanges.element.insertIds.size, 1); assert.equal(targetDbChanges.aspect.insertIds.size, 3); assert.equal(targetDbChanges.model.insertIds.size, 0); assert.equal(targetDbChanges.relationship.insertIds.size, 2); // expect some updates from transforming the result of updateDb assert.isAtLeast(targetDbChanges.element.updateIds.size, 1); assert.isAtLeast(targetDbChanges.aspect.updateIds.size, 1); assert.isAtLeast(targetDbChanges.model.updateIds.size, 1); assert.isAtLeast(targetDbChanges.relationship.updateIds.size, 1); // expect some deletes from transforming the result of updateDb assert.isAtLeast(targetDbChanges.element.deleteIds.size, 1); assert.isAtLeast(targetDbChanges.aspect.deleteIds.size, 1); assert.equal(targetDbChanges.relationship.deleteIds.size, 1); // don't expect other changes from transforming the result of updateDb assert.equal(targetDbChanges.codeSpec.updateIds.size, 0); assert.equal(targetDbChanges.codeSpec.deleteIds.size, 0); assert.equal(targetDbChanges.model.deleteIds.size, 0); } const sourceIModelChangeSets = await IModelHost.hubAccess.queryChangesets({ accessToken, iModelId: sourceIModelId }); const targetIModelChangeSets = await IModelHost.hubAccess.queryChangesets({ accessToken, iModelId: targetIModelId }); assert.equal(sourceIModelChangeSets.length, 2); assert.equal(targetIModelChangeSets.length, 2); await HubWrappers.closeAndDeleteBriefcaseDb(accessToken, sourceDb); await HubWrappers.closeAndDeleteBriefcaseDb(accessToken, targetDb); } finally { try { await IModelHost.hubAccess.deleteIModel({ iTwinId, iModelId: sourceIModelId }); await IModelHost.hubAccess.deleteIModel({ iTwinId, iModelId: targetIModelId }); } catch (err) { // eslint-disable-next-line no-console console.log("can't destroy", err); } } }); it("Clone/upgrade test", async () => { const sourceIModelName: string = IModelTestUtils.generateUniqueName("CloneSource"); const sourceIModelId = await HubWrappers.recreateIModel({ accessToken, iTwinId, iModelName: sourceIModelName, noLocks: true }); assert.isTrue(Guid.isGuid(sourceIModelId)); const targetIModelName: string = IModelTestUtils.generateUniqueName("CloneTarget"); const targetIModelId = await HubWrappers.recreateIModel({ accessToken, iTwinId, iModelName: targetIModelName, noLocks: true }); assert.isTrue(Guid.isGuid(targetIModelId)); try { // open/upgrade sourceDb const sourceDb = await HubWrappers.downloadAndOpenBriefcase({ accessToken, iTwinId, iModelId: sourceIModelId }); const seedBisCoreVersion = sourceDb.querySchemaVersion(BisCoreSchema.schemaName)!; assert.isTrue(semver.satisfies(seedBisCoreVersion, ">= 1.0.1")); await sourceDb.importSchemas([BisCoreSchema.schemaFilePath, GenericSchema.schemaFilePath]); const updatedBisCoreVersion = sourceDb.querySchemaVersion(BisCoreSchema.schemaName)!; assert.isTrue(semver.satisfies(updatedBisCoreVersion, ">= 1.0.10")); assert.isTrue(sourceDb.containsClass(ExternalSourceAspect.classFullName), "Expect BisCore to be updated and contain ExternalSourceAspect"); const expectedHasPendingTxns: boolean = seedBisCoreVersion !== updatedBisCoreVersion; // push sourceDb schema changes assert.equal(sourceDb.nativeDb.hasPendingTxns(), expectedHasPendingTxns, "Expect importSchemas to have saved changes"); assert.isFalse(sourceDb.nativeDb.hasUnsavedChanges(), "Expect no unsaved changes after importSchemas"); await sourceDb.pushChanges({ accessToken, description: "Import schemas to upgrade BisCore" }); // may push schema changes // import schemas again to test common scenario of not knowing whether schemas are up-to-date or not.. await sourceDb.importSchemas([BisCoreSchema.schemaFilePath, GenericSchema.schemaFilePath]); assert.isFalse(sourceDb.nativeDb.hasPendingTxns(), "Expect importSchemas to be a no-op"); assert.isFalse(sourceDb.nativeDb.hasUnsavedChanges(), "Expect importSchemas to be a no-op"); sourceDb.saveChanges(); // will be no changes to save in this case await sourceDb.pushChanges({ accessToken, description: "Import schemas again" }); // will be no changes to push in this case // populate sourceDb IModelTransformerTestUtils.populateTeamIModel(sourceDb, "Test", Point3d.createZero(), ColorDef.green); IModelTransformerTestUtils.assertTeamIModelContents(sourceDb, "Test"); sourceDb.saveChanges(); await sourceDb.pushChanges({ accessToken, description: "Populate Source" }); // open/upgrade targetDb const targetDb = await HubWrappers.downloadAndOpenBriefcase({ accessToken, iTwinId, iModelId: targetIModelId }); await targetDb.importSchemas([BisCoreSchema.schemaFilePath, GenericSchema.schemaFilePath]); assert.isTrue(targetDb.containsClass(ExternalSourceAspect.classFullName), "Expect BisCore to be updated and contain ExternalSourceAspect"); // push targetDb schema changes targetDb.saveChanges(); await targetDb.pushChanges({ accessToken, description: "Upgrade BisCore" }); // import sourceDb changes into targetDb const transformer = new IModelTransformer(new IModelExporter(sourceDb), targetDb); await transformer.processAll(); transformer.dispose(); IModelTransformerTestUtils.assertTeamIModelContents(targetDb, "Test"); targetDb.saveChanges(); await targetDb.pushChanges({ accessToken, description: "Import changes from sourceDb" }); // close iModel briefcases await HubWrappers.closeAndDeleteBriefcaseDb(accessToken, sourceDb); await HubWrappers.closeAndDeleteBriefcaseDb(accessToken, targetDb); } finally { try { // delete iModel briefcases await IModelHost.hubAccess.deleteIModel({ iTwinId, iModelId: sourceIModelId }); await IModelHost.hubAccess.deleteIModel({ iTwinId, iModelId: targetIModelId }); } catch (err) { // eslint-disable-next-line no-console console.log("can't destroy", err); } } }); it("should merge changes made on a branch back to master", async () => { // create and push master IModel const masterIModelName = "Master"; const masterSeedFileName = join(outputDir, `${masterIModelName}.bim`); if (IModelJsFs.existsSync(masterSeedFileName)) IModelJsFs.removeSync(masterSeedFileName); // make sure file from last run does not exist const state0 = [1, 2]; const masterSeedDb = SnapshotDb.createEmpty(masterSeedFileName, { rootSubject: { name: "Master" } }); populateMaster(masterSeedDb, state0); assert.isTrue(IModelJsFs.existsSync(masterSeedFileName)); masterSeedDb.nativeDb.setITwinId(iTwinId); // WIP: attempting a workaround for "ContextId was not properly setup in the checkpoint" issue masterSeedDb.saveChanges(); masterSeedDb.close(); const masterIModelId = await IModelHost.hubAccess.createNewIModel({ iTwinId, iModelName: masterIModelName, description: "master", revision0: masterSeedFileName, noLocks: true }); assert.isTrue(Guid.isGuid(masterIModelId)); IModelJsFs.removeSync(masterSeedFileName); // now that iModel is pushed, can delete local copy of the seed const masterDb = await HubWrappers.downloadAndOpenBriefcase({ accessToken, iTwinId, iModelId: masterIModelId }); assert.isTrue(masterDb.isBriefcaseDb()); assert.equal(masterDb.iTwinId, iTwinId); assert.equal(masterDb.iModelId, masterIModelId); assertPhysicalObjects(masterDb, state0); const changesetMasterState0 = masterDb.changeset.id; // create Branch1 iModel using Master as a template const branchIModelName1 = "Branch1"; const branchIModelId1 = await IModelHost.hubAccess.createNewIModel({ iTwinId, iModelName: branchIModelName1, description: `Branch1 of ${masterIModelName}`, revision0: masterDb.pathName, noLocks: true }); const branchDb1 = await HubWrappers.downloadAndOpenBriefcase({ accessToken, iTwinId, iModelId: branchIModelId1 }); assert.isTrue(branchDb1.isBriefcaseDb()); assert.equal(branchDb1.iTwinId, iTwinId); assertPhysicalObjects(branchDb1, state0); const changesetBranch1First = branchDb1.changeset.id; // create Branch2 iModel using Master as a template const branchIModelName2 = "Branch2"; const branchIModelId2 = await IModelHost.hubAccess.createNewIModel({ iTwinId, iModelName: branchIModelName2, description: `Branch2 of ${masterIModelName}`, revision0: masterDb.pathName, noLocks: true }); const branchDb2 = await HubWrappers.downloadAndOpenBriefcase({ accessToken, iTwinId, iModelId: branchIModelId2 }); assert.isTrue(branchDb2.isBriefcaseDb()); assert.equal(branchDb2.iTwinId, iTwinId); assertPhysicalObjects(branchDb2, state0); const changesetBranch2First = branchDb2.changeset.id; // create empty iModel meant to contain replayed master history const replayedIModelName = "Replayed"; const replayedIModelId = await IModelHost.hubAccess.createNewIModel({ iTwinId, iModelName: replayedIModelName, description: "blank", noLocks: true }); const replayedDb = await HubWrappers.downloadAndOpenBriefcase({ accessToken, iTwinId, iModelId: replayedIModelId }); assert.isTrue(replayedDb.isBriefcaseDb()); assert.equal(replayedDb.iTwinId, iTwinId); try { // record provenance in Branch1 and Branch2 iModels const provenanceInserterB1 = new IModelTransformer(masterDb, branchDb1, { wasSourceIModelCopiedToTarget: true, }); const provenanceInserterB2 = new IModelTransformer(masterDb, branchDb2, { wasSourceIModelCopiedToTarget: true, }); await provenanceInserterB1.processAll(); await provenanceInserterB2.processAll(); provenanceInserterB1.dispose(); provenanceInserterB2.dispose(); assert.equal(count(masterDb, ExternalSourceAspect.classFullName), 0); assert.isAbove(count(branchDb1, ExternalSourceAspect.classFullName), state0.length); assert.isAbove(count(branchDb2, ExternalSourceAspect.classFullName), state0.length); // push Branch1 and Branch2 provenance changes await saveAndPushChanges(branchDb1, "State0"); await saveAndPushChanges(branchDb2, "State0"); const changesetBranch1State0 = branchDb1.changeset.id; const changesetBranch2State0 = branchDb2.changeset.id; assert.notEqual(changesetBranch1State0, changesetBranch1First); assert.notEqual(changesetBranch2State0, changesetBranch2First); // push Branch1 State1 const delta01 = [2, 3, 4]; // update 2, insert 3 and 4 const state1 = [1, 2, 3, 4]; maintainPhysicalObjects(branchDb1, delta01); assertPhysicalObjects(branchDb1, state1); await saveAndPushChanges(branchDb1, "State0 -> State1"); const changesetBranch1State1 = branchDb1.changeset.id; assert.notEqual(changesetBranch1State1, changesetBranch1State0); // push Branch1 State2 const delta12 = [1, -3, 5, 6]; // update 1, delete 3, insert 5 and 6 const state2 = [1, 2, -3, 4, 5, 6]; maintainPhysicalObjects(branchDb1, delta12); assertPhysicalObjects(branchDb1, state2); await saveAndPushChanges(branchDb1, "State1 -> State2"); const changesetBranch1State2 = branchDb1.changeset.id; assert.notEqual(changesetBranch1State2, changesetBranch1State1); // merge changes made on Branch1 back to Master const branch1ToMaster = new IModelTransformer(branchDb1, masterDb, { isReverseSynchronization: true, // provenance stored in source/branch }); await branch1ToMaster.processChanges(accessToken, changesetBranch1State1); branch1ToMaster.dispose(); assertPhysicalObjects(masterDb, state2); assertPhysicalObjectUpdated(masterDb, 1); assertPhysicalObjectUpdated(masterDb, 2); assert.equal(count(masterDb, ExternalSourceAspect.classFullName), 0); await saveAndPushChanges(masterDb, "State0 -> State2"); // a squash of 2 branch changes into 1 in the masterDb change ledger const changesetMasterState2 = masterDb.changeset.id; assert.notEqual(changesetMasterState2, changesetMasterState0); branchDb1.saveChanges(); // saves provenance locally in case of re-merge // merge changes from Master to Branch2 const masterToBranch2 = new IModelTransformer(masterDb, branchDb2); await masterToBranch2.processChanges(accessToken, changesetMasterState2); masterToBranch2.dispose(); assertPhysicalObjects(branchDb2, state2); await saveAndPushChanges(branchDb2, "State0 -> State2"); const changesetBranch2State2 = branchDb2.changeset.id; assert.notEqual(changesetBranch2State2, changesetBranch2State0); // make changes to Branch2 const delta23 = [7, 8]; const state3 = [1, 2, -3, 4, 5, 6, 7, 8]; maintainPhysicalObjects(branchDb2, delta23); assertPhysicalObjects(branchDb2, state3); await saveAndPushChanges(branchDb2, "State2 -> State3"); const changesetBranch2State3 = branchDb2.changeset.id; assert.notEqual(changesetBranch2State3, changesetBranch2State2); // merge changes made on Branch2 back to Master const branch2ToMaster = new IModelTransformer(branchDb2, masterDb, { isReverseSynchronization: true, // provenance stored in source/branch }); await branch2ToMaster.processChanges(accessToken, changesetBranch2State3); branch2ToMaster.dispose(); assertPhysicalObjects(masterDb, state3); assert.equal(count(masterDb, ExternalSourceAspect.classFullName), 0); await saveAndPushChanges(masterDb, "State2 -> State3"); const changesetMasterState3 = masterDb.changeset.id; assert.notEqual(changesetMasterState3, changesetMasterState2); branchDb2.saveChanges(); // saves provenance locally in case of re-merge // make change directly on Master const delta34 = [6, -7]; // update 6, delete 7 const state4 = [1, 2, -3, 4, 5, 6, -7, 8]; maintainPhysicalObjects(masterDb, delta34); assertPhysicalObjects(masterDb, state4); await saveAndPushChanges(masterDb, "State3 -> State4"); const changesetMasterState4 = masterDb.changeset.id; assert.notEqual(changesetMasterState4, changesetMasterState3); // merge Master to Branch1 const masterToBranch1 = new IModelTransformer(masterDb, branchDb1); await masterToBranch1.processChanges(accessToken, changesetMasterState3); masterToBranch1.dispose(); assertPhysicalObjects(branchDb1, state4); assertPhysicalObjectUpdated(branchDb1, 6); await saveAndPushChanges(branchDb1, "State2 -> State4"); const changesetBranch1State4 = branchDb1.changeset.id; assert.notEqual(changesetBranch1State4, changesetBranch1State2); const masterDbChangesets = await IModelHost.hubAccess.downloadChangesets({ accessToken, iModelId: masterIModelId, targetDir: BriefcaseManager.getChangeSetsPath(masterIModelId) }); assert.equal(masterDbChangesets.length, 3); const masterDeletedElementIds = new Set<Id64String>(); for (const masterDbChangeset of masterDbChangesets) { assert.isDefined(masterDbChangeset.id); assert.isDefined(masterDbChangeset.description); // test code above always included a change description when pushChanges was called const changesetPath = masterDbChangeset.pathname; assert.isTrue(IModelJsFs.existsSync(changesetPath)); // below is one way of determining the set of elements that were deleted in a specific changeset const statusOrResult: IModelJsNative.ErrorStatusOrResult<IModelStatus, any> = masterDb.nativeDb.extractChangedInstanceIdsFromChangeSet(changesetPath); assert.isUndefined(statusOrResult.error); const result: IModelJsNative.ChangedInstanceIdsProps = JSON.parse(statusOrResult.result); assert.isDefined(result.element); if (result.element?.delete) { result.element.delete.forEach((id: Id64String) => masterDeletedElementIds.add(id)); } } assert.isAtLeast(masterDeletedElementIds.size, 1); // replay master history to create replayed iModel const sourceDb = await HubWrappers.downloadAndOpenBriefcase({ accessToken, iTwinId, iModelId: masterIModelId, asOf: IModelVersion.first().toJSON() }); const replayTransformer = new IModelTransformer(sourceDb, replayedDb); // this replay strategy pretends that deleted elements never existed for (const elementId of masterDeletedElementIds) { replayTransformer.exporter.excludeElement(elementId); } // note: this test knows that there were no schema changes, so does not call `processSchemas` await replayTransformer.processAll(); // process any elements that were part of the "seed" await saveAndPushChanges(replayedDb, "changes from source seed"); for (const masterDbChangeset of masterDbChangesets) { await sourceDb.pullChanges({ accessToken, toIndex: masterDbChangeset.index }); await replayTransformer.processChanges(accessToken, sourceDb.changeset.id); await saveAndPushChanges(replayedDb, masterDbChangeset.description ?? ""); } replayTransformer.dispose(); sourceDb.close(); assertPhysicalObjects(replayedDb, state4); // should have same ending state as masterDb // make sure there are no deletes in the replay history (all elements that were eventually deleted from masterDb were excluded) const replayedDbChangesets = await IModelHost.hubAccess.downloadChangesets({ accessToken, iModelId: replayedIModelId, targetDir: BriefcaseManager.getChangeSetsPath(replayedIModelId) }); assert.isAtLeast(replayedDbChangesets.length, masterDbChangesets.length); // replayedDb will have more changesets when seed contains elements const replayedDeletedElementIds = new Set<Id64String>(); for (const replayedDbChangeset of replayedDbChangesets) { assert.isDefined(replayedDbChangeset.id); const changesetPath = replayedDbChangeset.pathname; assert.isTrue(IModelJsFs.existsSync(changesetPath)); // below is one way of determining the set of elements that were deleted in a specific changeset const statusOrResult: IModelJsNative.ErrorStatusOrResult<IModelStatus, any> = replayedDb.nativeDb.extractChangedInstanceIdsFromChangeSet(changesetPath); assert.isUndefined(statusOrResult.error); const result: IModelJsNative.ChangedInstanceIdsProps = JSON.parse(statusOrResult.result); assert.isDefined(result.element); if (result.element?.delete) { result.element.delete.forEach((id: Id64String) => replayedDeletedElementIds.add(id)); } } assert.equal(replayedDeletedElementIds.size, 0); masterDb.close(); branchDb1.close(); branchDb2.close(); replayedDb.close(); } finally { await IModelHost.hubAccess.deleteIModel({ iTwinId, iModelId: masterIModelId }); await IModelHost.hubAccess.deleteIModel({ iTwinId, iModelId: branchIModelId1 }); await IModelHost.hubAccess.deleteIModel({ iTwinId, iModelId: branchIModelId2 }); await IModelHost.hubAccess.deleteIModel({ iTwinId, iModelId: replayedIModelId }); } }); function count(iModelDb: IModelDb, classFullName: string): number { return iModelDb.withPreparedStatement(`SELECT COUNT(*) FROM ${classFullName}`, (statement: ECSqlStatement): number => { return DbResult.BE_SQLITE_ROW === statement.step() ? statement.getValue(0).getInteger() : 0; }); } async function saveAndPushChanges(briefcaseDb: BriefcaseDb, description: string): Promise<void> { briefcaseDb.saveChanges(description); await briefcaseDb.pushChanges({ accessToken, description }); } function populateMaster(iModelDb: IModelDb, numbers: number[]): void { SpatialCategory.insert(iModelDb, IModel.dictionaryId, "SpatialCategory", new SubCategoryAppearance()); PhysicalModel.insert(iModelDb, IModel.rootSubjectId, "PhysicalModel"); maintainPhysicalObjects(iModelDb, numbers); } function assertPhysicalObjects(iModelDb: IModelDb, numbers: number[]): void { let numPhysicalObjects = 0; for (const n of numbers) { if (n > 0) { // negative "n" value means element was deleted ++numPhysicalObjects; } assertPhysicalObject(iModelDb, n); } assert.equal(numPhysicalObjects, count(iModelDb, PhysicalObject.classFullName)); } function assertPhysicalObject(iModelDb: IModelDb, n: number): void { const physicalObjectId = getPhysicalObjectId(iModelDb, n); if (n > 0) { assert.isTrue(Id64.isValidId64(physicalObjectId), "Expected element to exist"); } else { assert.equal(physicalObjectId, Id64.invalid, "Expected element to not exist"); // negative "n" means element was deleted } } function assertPhysicalObjectUpdated(iModelDb: IModelDb, n: number): void { assert.isTrue(n > 0); const physicalObjectId = getPhysicalObjectId(iModelDb, n); const physicalObject = iModelDb.elements.getElement(physicalObjectId, PhysicalObject); assert.isAtLeast(physicalObject.jsonProperties.updated, 1); } function getPhysicalObjectId(iModelDb: IModelDb, n: number): Id64String { const sql = `SELECT ECInstanceId FROM ${PhysicalObject.classFullName} WHERE UserLabel=:userLabel`; return iModelDb.withPreparedStatement(sql, (statement: ECSqlStatement): Id64String => { statement.bindString("userLabel", n.toString()); return DbResult.BE_SQLITE_ROW === statement.step() ? statement.getValue(0).getId() : Id64.invalid; }); } function maintainPhysicalObjects(iModelDb: IModelDb, numbers: number[]): void { const modelId = iModelDb.elements.queryElementIdByCode(PhysicalPartition.createCode(iModelDb, IModel.rootSubjectId, "PhysicalModel"))!; const categoryId = iModelDb.elements.queryElementIdByCode(SpatialCategory.createCode(iModelDb, IModel.dictionaryId, "SpatialCategory"))!; for (const n of numbers) { maintainPhysicalObject(iModelDb, modelId, categoryId, n); } } function maintainPhysicalObject(iModelDb: IModelDb, modelId: Id64String, categoryId: Id64String, n: number): Id64String { if (n > 0) { // positive "n" value means insert or update const physicalObjectId = getPhysicalObjectId(iModelDb, n); if (Id64.isValidId64(physicalObjectId)) { // if element exists, update it const physicalObject = iModelDb.elements.getElement(physicalObjectId, PhysicalObject); const numTimesUpdated: number = physicalObject.jsonProperties?.updated ?? 0; physicalObject.jsonProperties.updated = 1 + numTimesUpdated; physicalObject.update(); return physicalObjectId; } else { // if element does not exist, insert it const physicalObjectProps: PhysicalElementProps = { classFullName: PhysicalObject.classFullName, model: modelId, category: categoryId, code: Code.createEmpty(), userLabel: n.toString(), geom: IModelTestUtils.createBox(Point3d.create(1, 1, 1)), placement: { origin: Point3d.create(n, n, 0), angles: YawPitchRollAngles.createDegrees(0, 0, 0), }, }; return iModelDb.elements.insertElement(physicalObjectProps); } } else { // negative "n" value means delete const physicalObjectId = getPhysicalObjectId(iModelDb, -n); iModelDb.elements.deleteElement(physicalObjectId); return physicalObjectId; } } });
the_stack
import * as Cally from "@gi-types/cally"; import * as Clutter from "@gi-types/clutter"; import * as Gtk from "@gi-types/gtk"; import * as Meta from "@gi-types/meta"; import * as Atk from "@gi-types/atk"; import * as Gio from "@gi-types/gio"; import * as GObject from "@gi-types/gobject"; import * as GLib from "@gi-types/glib"; import * as Json from "@gi-types/json"; import * as cairo from "@gi-types/cairo"; import * as Pango from "@gi-types/pango"; import * as Cogl from "@gi-types/cogl"; export function describe_actor(actor: Clutter.Actor): string; export type ClipboardCallbackFunc = (clipboard: Clipboard, text: string) => void; export type ClipboardContentCallbackFunc = (clipboard: Clipboard, bytes: GLib.Bytes | Uint8Array) => void; export type EntryCursorFunc = (entry: Entry, use_ibeam: boolean, data?: any | null) => void; export namespace Align { export const $gtype: GObject.GType<Align>; } export enum Align { START = 0, MIDDLE = 1, END = 2, } export namespace BackgroundSize { export const $gtype: GObject.GType<BackgroundSize>; } export enum BackgroundSize { AUTO = 0, CONTAIN = 1, COVER = 2, FIXED = 3, } export namespace ClipboardType { export const $gtype: GObject.GType<ClipboardType>; } export enum ClipboardType { PRIMARY = 0, CLIPBOARD = 1, } export namespace Corner { export const $gtype: GObject.GType<Corner>; } export enum Corner { TOPLEFT = 0, TOPRIGHT = 1, BOTTOMRIGHT = 2, BOTTOMLEFT = 3, } export namespace DirectionType { export const $gtype: GObject.GType<DirectionType>; } export enum DirectionType { TAB_FORWARD = 0, TAB_BACKWARD = 1, UP = 2, DOWN = 3, LEFT = 4, RIGHT = 5, } export namespace GradientType { export const $gtype: GObject.GType<GradientType>; } export enum GradientType { NONE = 0, VERTICAL = 1, HORIZONTAL = 2, RADIAL = 3, } export namespace IconStyle { export const $gtype: GObject.GType<IconStyle>; } export enum IconStyle { REQUESTED = 0, REGULAR = 1, SYMBOLIC = 2, } export namespace PolicyType { export const $gtype: GObject.GType<PolicyType>; } export enum PolicyType { ALWAYS = 0, AUTOMATIC = 1, NEVER = 2, EXTERNAL = 3, } export namespace Side { export const $gtype: GObject.GType<Side>; } export enum Side { TOP = 0, RIGHT = 1, BOTTOM = 2, LEFT = 3, } export namespace TextAlign { export const $gtype: GObject.GType<TextAlign>; } export enum TextAlign { LEFT = 0, CENTER = 1, RIGHT = 2, JUSTIFY = 3, } export namespace TextureCachePolicy { export const $gtype: GObject.GType<TextureCachePolicy>; } export enum TextureCachePolicy { NONE = 0, FOREVER = 1, } export namespace ButtonMask { export const $gtype: GObject.GType<ButtonMask>; } export enum ButtonMask { ONE = 1, TWO = 2, THREE = 4, } export namespace TextDecoration { export const $gtype: GObject.GType<TextDecoration>; } export enum TextDecoration { UNDERLINE = 1, OVERLINE = 2, LINE_THROUGH = 4, BLINK = 8, } export module Adjustment { export interface ConstructorProperties extends GObject.Object.ConstructorProperties { [key: string]: any; actor: Clutter.Actor; lower: number; page_increment: number; pageIncrement: number; page_size: number; pageSize: number; step_increment: number; stepIncrement: number; upper: number; value: number; } } export class Adjustment extends GObject.Object implements Clutter.Animatable { static $gtype: GObject.GType<Adjustment>; constructor(properties?: Partial<Adjustment.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<Adjustment.ConstructorProperties>, ...args: any[]): void; // Properties actor: Clutter.Actor; lower: number; page_increment: number; pageIncrement: number; page_size: number; pageSize: number; step_increment: number; stepIncrement: number; upper: number; value: number; // Signals connect(id: string, callback: (...args: any[]) => any): number; connect_after(id: string, callback: (...args: any[]) => any): number; emit(id: string, ...args: any[]): void; connect(signal: "changed", callback: (_source: this) => void): number; connect_after(signal: "changed", callback: (_source: this) => void): number; emit(signal: "changed"): void; // Constructors static ["new"]( actor: Clutter.Actor | null, value: number, lower: number, upper: number, step_increment: number, page_increment: number, page_size: number ): Adjustment; // Members add_transition(name: string, transition: Clutter.Transition): void; adjust_for_scroll_event(delta: number): void; clamp_page(lower: number, upper: number): void; get_transition(name: string): Clutter.Transition | null; get_value(): number; get_values(): [number | null, number | null, number | null, number | null, number | null, number | null]; remove_transition(name: string): void; set_value(value: number): void; set_values( value: number, lower: number, upper: number, step_increment: number, page_increment: number, page_size: number ): void; vfunc_changed(): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; } export module Bin { export interface ConstructorProperties<A extends Clutter.Actor = Clutter.Actor> extends Widget.ConstructorProperties { [key: string]: any; child: A; } } export class Bin<A extends Clutter.Actor = Clutter.Actor> extends Widget implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<A>, Clutter.Scriptable { static $gtype: GObject.GType<Bin>; constructor(properties?: Partial<Bin.ConstructorProperties<A>>, ...args: any[]); _init(properties?: Partial<Bin.ConstructorProperties<A>>, ...args: any[]): void; // Properties child: A; // Constructors static ["new"](): Bin; // Members get_child(): A | null; set_child(child?: A | null): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: A): void; child_get_property(child: A, property: string, value: any): void; child_notify(child: A, pspec: GObject.ParamSpec): void; child_set_property(child: A, property: string, value: any): void; create_child_meta(actor: A): void; destroy_child_meta(actor: A): void; find_child_by_name(child_name: string): A; get_child_meta(actor: A): Clutter.ChildMeta; get_children(): A[]; get_children(...args: never[]): never; lower_child(actor: A, sibling?: A | null): void; raise_child(actor: A, sibling?: A | null): void; remove_actor(actor: A): void; sort_depth_order(): void; vfunc_actor_added(actor: A): void; vfunc_actor_removed(actor: A): void; vfunc_add(actor: A): void; vfunc_child_notify(child: A, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: A): void; vfunc_destroy_child_meta(actor: A): void; vfunc_get_child_meta(actor: A): Clutter.ChildMeta; vfunc_lower(actor: A, sibling?: A | null): void; vfunc_raise(actor: A, sibling?: A | null): void; vfunc_remove(actor: A): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; } export module BorderImage { export interface ConstructorProperties extends GObject.Object.ConstructorProperties { [key: string]: any; } } export class BorderImage extends GObject.Object { static $gtype: GObject.GType<BorderImage>; constructor(properties?: Partial<BorderImage.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<BorderImage.ConstructorProperties>, ...args: any[]): void; // Constructors static ["new"]( file: Gio.File, border_top: number, border_right: number, border_bottom: number, border_left: number, scale_factor: number ): BorderImage; // Members equal(other: BorderImage): boolean; get_borders(border_top: number, border_right: number, border_bottom: number, border_left: number): void; get_file(): Gio.File; } export module BoxLayout { export interface ConstructorProperties<A extends Clutter.Actor = Clutter.Actor> extends Viewport.ConstructorProperties<Clutter.BoxLayout> { [key: string]: any; pack_start: boolean; packStart: boolean; vertical: boolean; } } export class BoxLayout<A extends Clutter.Actor = Clutter.Actor> extends Viewport<Clutter.BoxLayout> implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<A>, Clutter.Scriptable, Scrollable { static $gtype: GObject.GType<BoxLayout>; constructor(properties?: Partial<BoxLayout.ConstructorProperties<A>>, ...args: any[]); _init(properties?: Partial<BoxLayout.ConstructorProperties<A>>, ...args: any[]): void; // Properties pack_start: boolean; packStart: boolean; vertical: boolean; // Implemented Properties hadjustment: Adjustment; vadjustment: Adjustment; // Constructors static ["new"](): BoxLayout; // Members get_pack_start(): boolean; get_vertical(): boolean; set_pack_start(pack_start: boolean): void; set_vertical(vertical: boolean): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: A): void; child_get_property(child: A, property: string, value: any): void; child_notify(child: A, pspec: GObject.ParamSpec): void; child_set_property(child: A, property: string, value: any): void; create_child_meta(actor: A): void; destroy_child_meta(actor: A): void; find_child_by_name(child_name: string): A; get_child_meta(actor: A): Clutter.ChildMeta; get_children(): A[]; get_children(...args: never[]): never; lower_child(actor: A, sibling?: A | null): void; raise_child(actor: A, sibling?: A | null): void; remove_actor(actor: A): void; sort_depth_order(): void; vfunc_actor_added(actor: A): void; vfunc_actor_removed(actor: A): void; vfunc_add(actor: A): void; vfunc_child_notify(child: A, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: A): void; vfunc_destroy_child_meta(actor: A): void; vfunc_get_child_meta(actor: A): Clutter.ChildMeta; vfunc_lower(actor: A, sibling?: A | null): void; vfunc_raise(actor: A, sibling?: A | null): void; vfunc_remove(actor: A): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; get_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; set_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; vfunc_get_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; vfunc_set_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; } export module Button { export interface ConstructorProperties<A extends Clutter.Actor = Clutter.Actor> extends Bin.ConstructorProperties<A> { [key: string]: any; button_mask: ButtonMask; buttonMask: ButtonMask; checked: boolean; label: string; pressed: boolean; toggle_mode: boolean; toggleMode: boolean; } } export class Button<A extends Clutter.Actor = Clutter.Actor> extends Bin<A> implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<A>, Clutter.Scriptable { static $gtype: GObject.GType<Button>; constructor(properties?: Partial<Button.ConstructorProperties<A>>, ...args: any[]); _init(properties?: Partial<Button.ConstructorProperties<A>>, ...args: any[]): void; // Properties button_mask: ButtonMask; buttonMask: ButtonMask; checked: boolean; label: string; pressed: boolean; toggle_mode: boolean; toggleMode: boolean; // Signals connect(id: string, callback: (...args: any[]) => any): number; connect_after(id: string, callback: (...args: any[]) => any): number; emit(id: string, ...args: any[]): void; connect(signal: "clicked", callback: (_source: this, clicked_button: number) => void): number; connect_after(signal: "clicked", callback: (_source: this, clicked_button: number) => void): number; emit(signal: "clicked", clicked_button: number): void; // Constructors static ["new"](): Button; static new_with_label(text: string): Button; // Members fake_release(): void; get_button_mask(): ButtonMask; get_checked(): boolean; get_label(): string; get_toggle_mode(): boolean; set_button_mask(mask: ButtonMask): void; set_checked(checked: boolean): void; set_label(text?: string | null): void; set_toggle_mode(toggle: boolean): void; vfunc_clicked(clicked_button: number): void; vfunc_transition(): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: A): void; child_get_property(child: A, property: string, value: any): void; child_notify(child: A, pspec: GObject.ParamSpec): void; child_set_property(child: A, property: string, value: any): void; create_child_meta(actor: A): void; destroy_child_meta(actor: A): void; find_child_by_name(child_name: string): A; get_child_meta(actor: A): Clutter.ChildMeta; get_children(): A[]; get_children(...args: never[]): never; lower_child(actor: A, sibling?: A | null): void; raise_child(actor: A, sibling?: A | null): void; remove_actor(actor: A): void; sort_depth_order(): void; vfunc_actor_added(actor: A): void; vfunc_actor_removed(actor: A): void; vfunc_add(actor: A): void; vfunc_child_notify(child: A, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: A): void; vfunc_destroy_child_meta(actor: A): void; vfunc_get_child_meta(actor: A): Clutter.ChildMeta; vfunc_lower(actor: A, sibling?: A | null): void; vfunc_raise(actor: A, sibling?: A | null): void; vfunc_remove(actor: A): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; } export module Clipboard { export interface ConstructorProperties extends GObject.Object.ConstructorProperties { [key: string]: any; } } export class Clipboard extends GObject.Object { static $gtype: GObject.GType<Clipboard>; constructor(properties?: Partial<Clipboard.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<Clipboard.ConstructorProperties>, ...args: any[]): void; // Members get_content(type: ClipboardType, mimetype: string, callback: ClipboardContentCallbackFunc): void; get_mimetypes(type: ClipboardType): string[]; get_text(type: ClipboardType, callback: ClipboardCallbackFunc): void; set_content(type: ClipboardType, mimetype: string, bytes: GLib.Bytes | Uint8Array): void; set_text(type: ClipboardType, text: string): void; static get_default(): Clipboard; } export module DrawingArea { export interface ConstructorProperties<A extends Clutter.Actor = Clutter.Actor> extends Widget.ConstructorProperties { [key: string]: any; } } export class DrawingArea<A extends Clutter.Actor = Clutter.Actor> extends Widget implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<A>, Clutter.Scriptable { static $gtype: GObject.GType<DrawingArea>; constructor(properties?: Partial<DrawingArea.ConstructorProperties<A>>, ...args: any[]); _init(properties?: Partial<DrawingArea.ConstructorProperties<A>>, ...args: any[]): void; // Signals connect(id: string, callback: (...args: any[]) => any): number; connect_after(id: string, callback: (...args: any[]) => any): number; emit(id: string, ...args: any[]): void; connect(signal: "repaint", callback: (_source: this) => void): number; connect_after(signal: "repaint", callback: (_source: this) => void): number; emit(signal: "repaint"): void; // Members get_context(): cairo.Context; get_surface_size(): [number | null, number | null]; queue_repaint(): void; vfunc_repaint(): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: A): void; child_get_property(child: A, property: string, value: any): void; child_notify(child: A, pspec: GObject.ParamSpec): void; child_set_property(child: A, property: string, value: any): void; create_child_meta(actor: A): void; destroy_child_meta(actor: A): void; find_child_by_name(child_name: string): A; get_child_meta(actor: A): Clutter.ChildMeta; get_children(): A[]; get_children(...args: never[]): never; lower_child(actor: A, sibling?: A | null): void; raise_child(actor: A, sibling?: A | null): void; remove_actor(actor: A): void; sort_depth_order(): void; vfunc_actor_added(actor: A): void; vfunc_actor_removed(actor: A): void; vfunc_add(actor: A): void; vfunc_child_notify(child: A, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: A): void; vfunc_destroy_child_meta(actor: A): void; vfunc_get_child_meta(actor: A): Clutter.ChildMeta; vfunc_lower(actor: A, sibling?: A | null): void; vfunc_raise(actor: A, sibling?: A | null): void; vfunc_remove(actor: A): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; } export module Entry { export interface ConstructorProperties<A extends Clutter.Actor = Clutter.Actor> extends Widget.ConstructorProperties { [key: string]: any; clutter_text: Clutter.Text; clutterText: Clutter.Text; hint_actor: Clutter.Actor; hintActor: Clutter.Actor; hint_text: string; hintText: string; input_hints: Clutter.InputContentHintFlags; inputHints: Clutter.InputContentHintFlags; input_purpose: Clutter.InputContentPurpose; inputPurpose: Clutter.InputContentPurpose; primary_icon: Clutter.Actor; primaryIcon: Clutter.Actor; secondary_icon: Clutter.Actor; secondaryIcon: Clutter.Actor; text: string; } } export class Entry<A extends Clutter.Actor = Clutter.Actor> extends Widget implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<A>, Clutter.Scriptable { static $gtype: GObject.GType<Entry>; constructor(properties?: Partial<Entry.ConstructorProperties<A>>, ...args: any[]); _init(properties?: Partial<Entry.ConstructorProperties<A>>, ...args: any[]): void; // Properties clutter_text: Clutter.Text; clutterText: Clutter.Text; hint_actor: Clutter.Actor; hintActor: Clutter.Actor; hint_text: string; hintText: string; input_hints: Clutter.InputContentHintFlags; inputHints: Clutter.InputContentHintFlags; input_purpose: Clutter.InputContentPurpose; inputPurpose: Clutter.InputContentPurpose; primary_icon: Clutter.Actor; primaryIcon: Clutter.Actor; secondary_icon: Clutter.Actor; secondaryIcon: Clutter.Actor; text: string; // Signals connect(id: string, callback: (...args: any[]) => any): number; connect_after(id: string, callback: (...args: any[]) => any): number; emit(id: string, ...args: any[]): void; connect(signal: "primary-icon-clicked", callback: (_source: this) => void): number; connect_after(signal: "primary-icon-clicked", callback: (_source: this) => void): number; emit(signal: "primary-icon-clicked"): void; connect(signal: "secondary-icon-clicked", callback: (_source: this) => void): number; connect_after(signal: "secondary-icon-clicked", callback: (_source: this) => void): number; emit(signal: "secondary-icon-clicked"): void; // Constructors static ["new"](text?: string | null): Entry; static ["new"](...args: never[]): never; // Members get_clutter_text(): Clutter.Actor; get_hint_actor(): Clutter.Actor | null; get_hint_text(): string | null; get_input_hints(): Clutter.InputContentHintFlags; get_input_purpose(): Clutter.InputContentPurpose; get_primary_icon(): Clutter.Actor | null; get_secondary_icon(): Clutter.Actor | null; get_text(): string; set_hint_actor(hint_actor?: Clutter.Actor | null): void; set_hint_text(text?: string | null): void; set_input_hints(hints: Clutter.InputContentHintFlags): void; set_input_purpose(purpose: Clutter.InputContentPurpose): void; set_primary_icon(icon?: Clutter.Actor | null): void; set_secondary_icon(icon?: Clutter.Actor | null): void; set_text(text?: string | null): void; vfunc_primary_icon_clicked(): void; vfunc_secondary_icon_clicked(): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: A): void; child_get_property(child: A, property: string, value: any): void; child_notify(child: A, pspec: GObject.ParamSpec): void; child_set_property(child: A, property: string, value: any): void; create_child_meta(actor: A): void; destroy_child_meta(actor: A): void; find_child_by_name(child_name: string): A; get_child_meta(actor: A): Clutter.ChildMeta; get_children(): A[]; get_children(...args: never[]): never; lower_child(actor: A, sibling?: A | null): void; raise_child(actor: A, sibling?: A | null): void; remove_actor(actor: A): void; sort_depth_order(): void; vfunc_actor_added(actor: A): void; vfunc_actor_removed(actor: A): void; vfunc_add(actor: A): void; vfunc_child_notify(child: A, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: A): void; vfunc_destroy_child_meta(actor: A): void; vfunc_get_child_meta(actor: A): Clutter.ChildMeta; vfunc_lower(actor: A, sibling?: A | null): void; vfunc_raise(actor: A, sibling?: A | null): void; vfunc_remove(actor: A): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; } export module FocusManager { export interface ConstructorProperties extends GObject.Object.ConstructorProperties { [key: string]: any; } } export class FocusManager extends GObject.Object { static $gtype: GObject.GType<FocusManager>; constructor(properties?: Partial<FocusManager.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<FocusManager.ConstructorProperties>, ...args: any[]): void; // Members add_group(root: Widget): void; get_group(widget: Widget): Widget; navigate_from_event(event: Clutter.Event): boolean; remove_group(root: Widget): void; static get_for_stage(stage: Clutter.Stage): FocusManager; } export module GenericAccessible { export interface ConstructorProperties extends WidgetAccessible.ConstructorProperties { [key: string]: any; } } export class GenericAccessible extends WidgetAccessible implements Atk.Action, Atk.Component, Atk.Value { static $gtype: GObject.GType<GenericAccessible>; constructor(properties?: Partial<GenericAccessible.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<GenericAccessible.ConstructorProperties>, ...args: any[]): void; // Fields priv: GenericAccessiblePrivate; // Signals connect(id: string, callback: (...args: any[]) => any): number; connect_after(id: string, callback: (...args: any[]) => any): number; emit(id: string, ...args: any[]): void; connect(signal: "get-current-value", callback: (_source: this) => number): number; connect_after(signal: "get-current-value", callback: (_source: this) => number): number; emit(signal: "get-current-value"): void; connect(signal: "get-maximum-value", callback: (_source: this) => number): number; connect_after(signal: "get-maximum-value", callback: (_source: this) => number): number; emit(signal: "get-maximum-value"): void; connect(signal: "get-minimum-increment", callback: (_source: this) => number): number; connect_after(signal: "get-minimum-increment", callback: (_source: this) => number): number; emit(signal: "get-minimum-increment"): void; connect(signal: "get-minimum-value", callback: (_source: this) => number): number; connect_after(signal: "get-minimum-value", callback: (_source: this) => number): number; emit(signal: "get-minimum-value"): void; connect(signal: "set-current-value", callback: (_source: this, new_value: number) => void): number; connect_after(signal: "set-current-value", callback: (_source: this, new_value: number) => void): number; emit(signal: "set-current-value", new_value: number): void; // Constructors static new_for_actor(actor: Clutter.Actor): GenericAccessible; // Implemented Members do_action(i: number): boolean; get_description(i: number): string | null; get_description(...args: never[]): never; get_keybinding(i: number): string | null; get_localized_name(i: number): string | null; get_n_actions(): number; get_name(i: number): string | null; get_name(...args: never[]): never; set_description(i: number, desc: string): boolean; set_description(...args: never[]): never; vfunc_do_action(i: number): boolean; vfunc_get_description(i: number): string | null; vfunc_get_description(...args: never[]): never; vfunc_get_keybinding(i: number): string | null; vfunc_get_localized_name(i: number): string | null; vfunc_get_n_actions(): number; vfunc_get_name(i: number): string | null; vfunc_get_name(...args: never[]): never; vfunc_set_description(i: number, desc: string): boolean; vfunc_set_description(...args: never[]): never; contains(x: number, y: number, coord_type: Atk.CoordType): boolean; get_alpha(): number; get_extents(coord_type: Atk.CoordType): [number | null, number | null, number | null, number | null]; get_layer(): Atk.Layer; get_mdi_zorder(): number; get_position(coord_type: Atk.CoordType): [number | null, number | null]; get_size(): [number | null, number | null]; grab_focus(): boolean; ref_accessible_at_point(x: number, y: number, coord_type: Atk.CoordType): Atk.Object | null; remove_focus_handler(handler_id: number): void; scroll_to(type: Atk.ScrollType): boolean; scroll_to_point(coords: Atk.CoordType, x: number, y: number): boolean; set_extents(x: number, y: number, width: number, height: number, coord_type: Atk.CoordType): boolean; set_position(x: number, y: number, coord_type: Atk.CoordType): boolean; set_size(width: number, height: number): boolean; vfunc_bounds_changed(bounds: Atk.Rectangle): void; vfunc_contains(x: number, y: number, coord_type: Atk.CoordType): boolean; vfunc_get_alpha(): number; vfunc_get_extents(coord_type: Atk.CoordType): [number | null, number | null, number | null, number | null]; vfunc_get_layer(): Atk.Layer; vfunc_get_mdi_zorder(): number; vfunc_get_position(coord_type: Atk.CoordType): [number | null, number | null]; vfunc_get_size(): [number | null, number | null]; vfunc_grab_focus(): boolean; vfunc_ref_accessible_at_point(x: number, y: number, coord_type: Atk.CoordType): Atk.Object | null; vfunc_remove_focus_handler(handler_id: number): void; vfunc_scroll_to(type: Atk.ScrollType): boolean; vfunc_scroll_to_point(coords: Atk.CoordType, x: number, y: number): boolean; vfunc_set_extents(x: number, y: number, width: number, height: number, coord_type: Atk.CoordType): boolean; vfunc_set_position(x: number, y: number, coord_type: Atk.CoordType): boolean; vfunc_set_size(width: number, height: number): boolean; get_current_value(): unknown; get_increment(): number; get_maximum_value(): unknown; get_minimum_increment(): unknown; get_minimum_value(): unknown; get_range(): Atk.Range | null; get_sub_ranges(): Atk.Range[]; get_value_and_text(): [number, string | null]; set_current_value(value: any): boolean; set_value(new_value: number): void; vfunc_get_current_value(): unknown; vfunc_get_increment(): number; vfunc_get_maximum_value(): unknown; vfunc_get_minimum_increment(): unknown; vfunc_get_minimum_value(): unknown; vfunc_get_range(): Atk.Range | null; vfunc_get_sub_ranges(): Atk.Range[]; vfunc_get_value_and_text(): [number, string | null]; vfunc_set_current_value(value: any): boolean; vfunc_set_value(new_value: number): void; } export module Icon { export interface ConstructorProperties<A extends Clutter.Actor = Clutter.Actor> extends Widget.ConstructorProperties { [key: string]: any; fallback_gicon: Gio.Icon; fallbackGicon: Gio.Icon; fallback_icon_name: string; fallbackIconName: string; gicon: Gio.Icon; icon_name: string; iconName: string; icon_size: number; iconSize: number; } } export class Icon<A extends Clutter.Actor = Clutter.Actor> extends Widget implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<A>, Clutter.Scriptable { static $gtype: GObject.GType<Icon>; constructor(properties?: Partial<Icon.ConstructorProperties<A>>, ...args: any[]); _init(properties?: Partial<Icon.ConstructorProperties<A>>, ...args: any[]): void; // Properties fallback_gicon: Gio.Icon; fallbackGicon: Gio.Icon; fallback_icon_name: string; fallbackIconName: string; gicon: Gio.Icon; icon_name: string; iconName: string; icon_size: number; iconSize: number; // Constructors static ["new"](): Icon; // Members get_fallback_gicon(): Gio.Icon; get_fallback_icon_name(): string; get_gicon(): Gio.Icon | null; get_icon_name(): string | null; get_icon_size(): number; set_fallback_gicon(fallback_gicon?: Gio.Icon | null): void; set_fallback_icon_name(fallback_icon_name?: string | null): void; set_gicon(gicon?: Gio.Icon | null): void; set_icon_name(icon_name?: string | null): void; set_icon_size(size: number): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: A): void; child_get_property(child: A, property: string, value: any): void; child_notify(child: A, pspec: GObject.ParamSpec): void; child_set_property(child: A, property: string, value: any): void; create_child_meta(actor: A): void; destroy_child_meta(actor: A): void; find_child_by_name(child_name: string): A; get_child_meta(actor: A): Clutter.ChildMeta; get_children(): A[]; get_children(...args: never[]): never; lower_child(actor: A, sibling?: A | null): void; raise_child(actor: A, sibling?: A | null): void; remove_actor(actor: A): void; sort_depth_order(): void; vfunc_actor_added(actor: A): void; vfunc_actor_removed(actor: A): void; vfunc_add(actor: A): void; vfunc_child_notify(child: A, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: A): void; vfunc_destroy_child_meta(actor: A): void; vfunc_get_child_meta(actor: A): Clutter.ChildMeta; vfunc_lower(actor: A, sibling?: A | null): void; vfunc_raise(actor: A, sibling?: A | null): void; vfunc_remove(actor: A): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; } export module ImageContent { export interface ConstructorProperties extends Clutter.Image.ConstructorProperties { [key: string]: any; preferred_height: number; preferredHeight: number; preferred_width: number; preferredWidth: number; } } export class ImageContent extends Clutter.Image implements Clutter.Content, Gio.Icon, Gio.LoadableIcon { static $gtype: GObject.GType<ImageContent>; constructor(properties?: Partial<ImageContent.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<ImageContent.ConstructorProperties>, ...args: any[]): void; // Properties preferred_height: number; preferredHeight: number; preferred_width: number; preferredWidth: number; // Members static new_with_preferred_size(width: number, height: number): Clutter.Content; // Implemented Members get_preferred_size(): [boolean, number, number]; invalidate(): void; invalidate_size(): void; vfunc_attached(actor: Clutter.Actor): void; vfunc_detached(actor: Clutter.Actor): void; vfunc_get_preferred_size(): [boolean, number, number]; vfunc_invalidate(): void; vfunc_invalidate_size(): void; vfunc_paint_content(actor: Clutter.Actor, node: Clutter.PaintNode, paint_context: Clutter.PaintContext): void; equal(icon2?: Gio.Icon | null): boolean; serialize(): GLib.Variant; to_string(): string | null; vfunc_equal(icon2?: Gio.Icon | null): boolean; vfunc_hash(): number; vfunc_serialize(): GLib.Variant; load(size: number, cancellable?: Gio.Cancellable | null): [Gio.InputStream, string | null]; load_async(size: number, cancellable?: Gio.Cancellable | null): Promise<[Gio.InputStream, string | null]>; load_async(size: number, cancellable: Gio.Cancellable | null, callback: Gio.AsyncReadyCallback<this> | null): void; load_async( size: number, cancellable?: Gio.Cancellable | null, callback?: Gio.AsyncReadyCallback<this> | null ): Promise<[Gio.InputStream, string | null]> | void; load_finish(res: Gio.AsyncResult): [Gio.InputStream, string | null]; vfunc_load(size: number, cancellable?: Gio.Cancellable | null): [Gio.InputStream, string | null]; vfunc_load_async(size: number, cancellable?: Gio.Cancellable | null): Promise<[Gio.InputStream, string | null]>; vfunc_load_async( size: number, cancellable: Gio.Cancellable | null, callback: Gio.AsyncReadyCallback<this> | null ): void; vfunc_load_async( size: number, cancellable?: Gio.Cancellable | null, callback?: Gio.AsyncReadyCallback<this> | null ): Promise<[Gio.InputStream, string | null]> | void; vfunc_load_finish(res: Gio.AsyncResult): [Gio.InputStream, string | null]; } export module Label { export interface ConstructorProperties<A extends Clutter.Actor = Clutter.Actor> extends Widget.ConstructorProperties { [key: string]: any; clutter_text: Clutter.Text; clutterText: Clutter.Text; text: string; } } export class Label<A extends Clutter.Actor = Clutter.Actor> extends Widget implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<A>, Clutter.Scriptable { static $gtype: GObject.GType<Label>; constructor(properties?: Partial<Label.ConstructorProperties<A>>, ...args: any[]); _init(properties?: Partial<Label.ConstructorProperties<A>>, ...args: any[]): void; // Properties clutter_text: Clutter.Text; clutterText: Clutter.Text; text: string; // Constructors static ["new"](text?: string | null): Label; static ["new"](...args: never[]): never; // Members get_clutter_text(): Clutter.Actor; get_text(): string; set_text(text?: string | null): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: A): void; child_get_property(child: A, property: string, value: any): void; child_notify(child: A, pspec: GObject.ParamSpec): void; child_set_property(child: A, property: string, value: any): void; create_child_meta(actor: A): void; destroy_child_meta(actor: A): void; find_child_by_name(child_name: string): A; get_child_meta(actor: A): Clutter.ChildMeta; get_children(): A[]; get_children(...args: never[]): never; lower_child(actor: A, sibling?: A | null): void; raise_child(actor: A, sibling?: A | null): void; remove_actor(actor: A): void; sort_depth_order(): void; vfunc_actor_added(actor: A): void; vfunc_actor_removed(actor: A): void; vfunc_add(actor: A): void; vfunc_child_notify(child: A, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: A): void; vfunc_destroy_child_meta(actor: A): void; vfunc_get_child_meta(actor: A): Clutter.ChildMeta; vfunc_lower(actor: A, sibling?: A | null): void; vfunc_raise(actor: A, sibling?: A | null): void; vfunc_remove(actor: A): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; } export module PasswordEntry { export interface ConstructorProperties<A extends Clutter.Actor = Clutter.Actor> extends Entry.ConstructorProperties<A> { [key: string]: any; password_visible: boolean; passwordVisible: boolean; show_peek_icon: boolean; showPeekIcon: boolean; } } export class PasswordEntry<A extends Clutter.Actor = Clutter.Actor> extends Entry<A> implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<A>, Clutter.Scriptable { static $gtype: GObject.GType<PasswordEntry>; constructor(properties?: Partial<PasswordEntry.ConstructorProperties<A>>, ...args: any[]); _init(properties?: Partial<PasswordEntry.ConstructorProperties<A>>, ...args: any[]): void; // Properties password_visible: boolean; passwordVisible: boolean; show_peek_icon: boolean; showPeekIcon: boolean; // Constructors static ["new"](): PasswordEntry; static ["new"](...args: never[]): never; // Members get_password_visible(): boolean; get_show_peek_icon(): boolean; set_password_visible(value: boolean): void; set_show_peek_icon(value: boolean): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: A): void; child_get_property(child: A, property: string, value: any): void; child_notify(child: A, pspec: GObject.ParamSpec): void; child_set_property(child: A, property: string, value: any): void; create_child_meta(actor: A): void; destroy_child_meta(actor: A): void; find_child_by_name(child_name: string): A; get_child_meta(actor: A): Clutter.ChildMeta; get_children(): A[]; get_children(...args: never[]): never; lower_child(actor: A, sibling?: A | null): void; raise_child(actor: A, sibling?: A | null): void; remove_actor(actor: A): void; sort_depth_order(): void; vfunc_actor_added(actor: A): void; vfunc_actor_removed(actor: A): void; vfunc_add(actor: A): void; vfunc_child_notify(child: A, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: A): void; vfunc_destroy_child_meta(actor: A): void; vfunc_get_child_meta(actor: A): Clutter.ChildMeta; vfunc_lower(actor: A, sibling?: A | null): void; vfunc_raise(actor: A, sibling?: A | null): void; vfunc_remove(actor: A): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; } export module ScrollBar { export interface ConstructorProperties<A extends Clutter.Actor = Clutter.Actor> extends Widget.ConstructorProperties { [key: string]: any; adjustment: Adjustment; vertical: boolean; } } export class ScrollBar<A extends Clutter.Actor = Clutter.Actor> extends Widget implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<A>, Clutter.Scriptable { static $gtype: GObject.GType<ScrollBar>; constructor(properties?: Partial<ScrollBar.ConstructorProperties<A>>, ...args: any[]); _init(properties?: Partial<ScrollBar.ConstructorProperties<A>>, ...args: any[]): void; // Properties adjustment: Adjustment; vertical: boolean; // Signals connect(id: string, callback: (...args: any[]) => any): number; connect_after(id: string, callback: (...args: any[]) => any): number; emit(id: string, ...args: any[]): void; connect(signal: "scroll-start", callback: (_source: this) => void): number; connect_after(signal: "scroll-start", callback: (_source: this) => void): number; emit(signal: "scroll-start"): void; connect(signal: "scroll-stop", callback: (_source: this) => void): number; connect_after(signal: "scroll-stop", callback: (_source: this) => void): number; emit(signal: "scroll-stop"): void; // Constructors static ["new"](adjustment: Adjustment): ScrollBar; static ["new"](...args: never[]): never; // Members get_adjustment(): Adjustment; set_adjustment(adjustment: Adjustment): void; vfunc_scroll_start(): void; vfunc_scroll_stop(): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: A): void; child_get_property(child: A, property: string, value: any): void; child_notify(child: A, pspec: GObject.ParamSpec): void; child_set_property(child: A, property: string, value: any): void; create_child_meta(actor: A): void; destroy_child_meta(actor: A): void; find_child_by_name(child_name: string): A; get_child_meta(actor: A): Clutter.ChildMeta; get_children(): A[]; get_children(...args: never[]): never; lower_child(actor: A, sibling?: A | null): void; raise_child(actor: A, sibling?: A | null): void; remove_actor(actor: A): void; sort_depth_order(): void; vfunc_actor_added(actor: A): void; vfunc_actor_removed(actor: A): void; vfunc_add(actor: A): void; vfunc_child_notify(child: A, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: A): void; vfunc_destroy_child_meta(actor: A): void; vfunc_get_child_meta(actor: A): Clutter.ChildMeta; vfunc_lower(actor: A, sibling?: A | null): void; vfunc_raise(actor: A, sibling?: A | null): void; vfunc_remove(actor: A): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; } export module ScrollView { export interface ConstructorProperties<A extends Clutter.Actor = Clutter.Actor> extends Bin.ConstructorProperties<A> { [key: string]: any; enable_mouse_scrolling: boolean; enableMouseScrolling: boolean; hscroll: ScrollBar; hscrollbar_policy: PolicyType; hscrollbarPolicy: PolicyType; hscrollbar_visible: boolean; hscrollbarVisible: boolean; overlay_scrollbars: boolean; overlayScrollbars: boolean; vscroll: ScrollBar; vscrollbar_policy: PolicyType; vscrollbarPolicy: PolicyType; vscrollbar_visible: boolean; vscrollbarVisible: boolean; } } export class ScrollView<A extends Clutter.Actor = Clutter.Actor> extends Bin<A> implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<A>, Clutter.Scriptable { static $gtype: GObject.GType<ScrollView>; constructor(properties?: Partial<ScrollView.ConstructorProperties<A>>, ...args: any[]); _init(properties?: Partial<ScrollView.ConstructorProperties<A>>, ...args: any[]): void; // Properties enable_mouse_scrolling: boolean; enableMouseScrolling: boolean; hscroll: ScrollBar; hscrollbar_policy: PolicyType; hscrollbarPolicy: PolicyType; hscrollbar_visible: boolean; hscrollbarVisible: boolean; overlay_scrollbars: boolean; overlayScrollbars: boolean; vscroll: ScrollBar; vscrollbar_policy: PolicyType; vscrollbarPolicy: PolicyType; vscrollbar_visible: boolean; vscrollbarVisible: boolean; // Constructors static ["new"](): ScrollView; // Members get_column_size(): number; get_hscroll_bar(): A; get_mouse_scrolling(): boolean; get_overlay_scrollbars(): boolean; get_row_size(): number; get_vscroll_bar(): A; set_column_size(column_size: number): void; set_mouse_scrolling(enabled: boolean): void; set_overlay_scrollbars(enabled: boolean): void; set_policy(hscroll: PolicyType, vscroll: PolicyType): void; set_row_size(row_size: number): void; update_fade_effect(vfade_offset: number, hfade_offset: number): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: A): void; child_get_property(child: A, property: string, value: any): void; child_notify(child: A, pspec: GObject.ParamSpec): void; child_set_property(child: A, property: string, value: any): void; create_child_meta(actor: A): void; destroy_child_meta(actor: A): void; find_child_by_name(child_name: string): A; get_child_meta(actor: A): Clutter.ChildMeta; get_children(): A[]; get_children(...args: never[]): never; lower_child(actor: A, sibling?: A | null): void; raise_child(actor: A, sibling?: A | null): void; remove_actor(actor: A): void; sort_depth_order(): void; vfunc_actor_added(actor: A): void; vfunc_actor_removed(actor: A): void; vfunc_add(actor: A): void; vfunc_child_notify(child: A, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: A): void; vfunc_destroy_child_meta(actor: A): void; vfunc_get_child_meta(actor: A): Clutter.ChildMeta; vfunc_lower(actor: A, sibling?: A | null): void; vfunc_raise(actor: A, sibling?: A | null): void; vfunc_remove(actor: A): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; } export module ScrollViewFade { export interface ConstructorProperties extends Clutter.ShaderEffect.ConstructorProperties { [key: string]: any; fade_edges: boolean; fadeEdges: boolean; hfade_offset: number; hfadeOffset: number; vfade_offset: number; vfadeOffset: number; } } export class ScrollViewFade extends Clutter.ShaderEffect { static $gtype: GObject.GType<ScrollViewFade>; constructor(properties?: Partial<ScrollViewFade.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<ScrollViewFade.ConstructorProperties>, ...args: any[]): void; // Properties fade_edges: boolean; fadeEdges: boolean; hfade_offset: number; hfadeOffset: number; vfade_offset: number; vfadeOffset: number; // Constructors static ["new"](): ScrollViewFade; static ["new"](...args: never[]): never; } export module Settings { export interface ConstructorProperties extends GObject.Object.ConstructorProperties { [key: string]: any; drag_threshold: number; dragThreshold: number; enable_animations: boolean; enableAnimations: boolean; font_name: string; fontName: string; gtk_icon_theme: string; gtkIconTheme: string; gtk_theme: string; gtkTheme: string; magnifier_active: boolean; magnifierActive: boolean; primary_paste: boolean; primaryPaste: boolean; slow_down_factor: number; slowDownFactor: number; } } export class Settings extends GObject.Object { static $gtype: GObject.GType<Settings>; constructor(properties?: Partial<Settings.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<Settings.ConstructorProperties>, ...args: any[]): void; // Properties drag_threshold: number; dragThreshold: number; enable_animations: boolean; enableAnimations: boolean; font_name: string; fontName: string; gtk_icon_theme: string; gtkIconTheme: string; gtk_theme: string; gtkTheme: string; magnifier_active: boolean; magnifierActive: boolean; primary_paste: boolean; primaryPaste: boolean; slow_down_factor: number; slowDownFactor: number; // Members inhibit_animations(): void; uninhibit_animations(): void; static get(): Settings; } export module TextureCache { export interface ConstructorProperties extends GObject.Object.ConstructorProperties { [key: string]: any; } } export class TextureCache extends GObject.Object { static $gtype: GObject.GType<TextureCache>; constructor(properties?: Partial<TextureCache.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<TextureCache.ConstructorProperties>, ...args: any[]): void; // Fields priv: TextureCachePrivate; // Signals connect(id: string, callback: (...args: any[]) => any): number; connect_after(id: string, callback: (...args: any[]) => any): number; emit(id: string, ...args: any[]): void; connect(signal: "icon-theme-changed", callback: (_source: this) => void): number; connect_after(signal: "icon-theme-changed", callback: (_source: this) => void): number; emit(signal: "icon-theme-changed"): void; connect(signal: "texture-file-changed", callback: (_source: this, file: Gio.File) => void): number; connect_after(signal: "texture-file-changed", callback: (_source: this, file: Gio.File) => void): number; emit(signal: "texture-file-changed", file: Gio.File): void; // Members bind_cairo_surface_property(object: GObject.Object, property_name: string): Gio.Icon; load_file_async( file: Gio.File, available_width: number, available_height: number, paint_scale: number, resource_scale: number ): Clutter.Actor; load_file_to_cairo_surface(file: Gio.File, paint_scale: number, resource_scale: number): cairo.Surface; load_gicon( theme_node: ThemeNode | null, icon: Gio.Icon, size: number, paint_scale: number, resource_scale: number ): Clutter.Actor | null; load_sliced_image( file: Gio.File, grid_width: number, grid_height: number, paint_scale: number, resource_scale: number, load_callback?: GLib.Func | null ): Clutter.Actor; rescan_icon_theme(): boolean; static get_default(): TextureCache; } export module Theme { export interface ConstructorProperties extends GObject.Object.ConstructorProperties { [key: string]: any; application_stylesheet: Gio.File; applicationStylesheet: Gio.File; default_stylesheet: Gio.File; defaultStylesheet: Gio.File; theme_stylesheet: Gio.File; themeStylesheet: Gio.File; } } export class Theme extends GObject.Object { static $gtype: GObject.GType<Theme>; constructor(properties?: Partial<Theme.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<Theme.ConstructorProperties>, ...args: any[]): void; // Properties application_stylesheet: Gio.File; applicationStylesheet: Gio.File; default_stylesheet: Gio.File; defaultStylesheet: Gio.File; theme_stylesheet: Gio.File; themeStylesheet: Gio.File; // Signals connect(id: string, callback: (...args: any[]) => any): number; connect_after(id: string, callback: (...args: any[]) => any): number; emit(id: string, ...args: any[]): void; connect(signal: "custom-stylesheets-changed", callback: (_source: this) => void): number; connect_after(signal: "custom-stylesheets-changed", callback: (_source: this) => void): number; emit(signal: "custom-stylesheets-changed"): void; // Constructors static ["new"](application_stylesheet: Gio.File, theme_stylesheet: Gio.File, default_stylesheet: Gio.File): Theme; // Members get_custom_stylesheets(): Gio.File[]; load_stylesheet(file: Gio.File): boolean; unload_stylesheet(file: Gio.File): void; } export module ThemeContext { export interface ConstructorProperties extends GObject.Object.ConstructorProperties { [key: string]: any; scale_factor: number; scaleFactor: number; } } export class ThemeContext extends GObject.Object { static $gtype: GObject.GType<ThemeContext>; constructor(properties?: Partial<ThemeContext.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<ThemeContext.ConstructorProperties>, ...args: any[]): void; // Properties scale_factor: number; scaleFactor: number; // Signals connect(id: string, callback: (...args: any[]) => any): number; connect_after(id: string, callback: (...args: any[]) => any): number; emit(id: string, ...args: any[]): void; connect(signal: "changed", callback: (_source: this) => void): number; connect_after(signal: "changed", callback: (_source: this) => void): number; emit(signal: "changed"): void; // Constructors static ["new"](): ThemeContext; // Members get_font(): Pango.FontDescription; get_root_node(): ThemeNode; get_scale_factor(): number; get_theme(): Theme; intern_node(node: ThemeNode): ThemeNode; set_font(font: Pango.FontDescription): void; set_theme(theme: Theme): void; static get_for_stage(stage: Clutter.Stage): ThemeContext; } export module ThemeNode { export interface ConstructorProperties extends GObject.Object.ConstructorProperties { [key: string]: any; } } export class ThemeNode extends GObject.Object { static $gtype: GObject.GType<ThemeNode>; constructor(properties?: Partial<ThemeNode.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<ThemeNode.ConstructorProperties>, ...args: any[]): void; // Constructors static ["new"]( context: ThemeContext, parent_node: ThemeNode | null, theme: Theme | null, element_type: GObject.GType, element_id: string | null, element_class: string | null, pseudo_class: string | null, inline_style: string ): ThemeNode; // Members adjust_for_height(for_height: number): number; adjust_for_width(for_width: number): number; adjust_preferred_height(min_height_p: number | null, natural_height_p: number): [number | null, number]; adjust_preferred_width(min_width_p: number | null, natural_width_p: number): [number | null, number]; equal(node_b: ThemeNode): boolean; geometry_equal(other: ThemeNode): boolean; get_background_color(): Clutter.Color; get_background_gradient(): [GradientType, Clutter.Color, Clutter.Color]; get_background_image(): Gio.File; get_background_image_shadow(): Shadow | null; get_background_paint_box(allocation: Clutter.ActorBox): Clutter.ActorBox; get_border_color(side: Side): Clutter.Color; get_border_image(): BorderImage; get_border_radius(corner: Corner): number; get_border_width(side: Side): number; get_box_shadow(): Shadow | null; get_color(property_name: string): Clutter.Color; get_content_box(allocation: Clutter.ActorBox): Clutter.ActorBox; get_double(property_name: string): number; get_element_classes(): string[]; get_element_id(): string; get_element_type(): GObject.GType; get_font(): Pango.FontDescription; get_font_features(): string; get_foreground_color(): Clutter.Color; get_height(): number; get_horizontal_padding(): number; get_icon_colors(): IconColors; get_icon_style(): IconStyle; get_length(property_name: string): number; get_letter_spacing(): number; get_margin(side: Side): number; get_max_height(): number; get_max_width(): number; get_min_height(): number; get_min_width(): number; get_outline_color(): Clutter.Color; get_outline_width(): number; get_padding(side: Side): number; get_paint_box(allocation: Clutter.ActorBox): Clutter.ActorBox; get_parent(): ThemeNode | null; get_pseudo_classes(): string[]; get_shadow(property_name: string): Shadow | null; get_text_align(): TextAlign; get_text_decoration(): TextDecoration; get_text_shadow(): Shadow | null; get_theme(): Theme; get_transition_duration(): number; get_url(property_name: string): Gio.File | null; get_vertical_padding(): number; get_width(): number; hash(): number; invalidate_background_image(): void; invalidate_border_image(): void; lookup_color(property_name: string, inherit: boolean): [boolean, Clutter.Color]; lookup_double(property_name: string, inherit: boolean): [boolean, number]; lookup_length(property_name: string, inherit: boolean): [boolean, number]; lookup_shadow(property_name: string, inherit: boolean): [boolean, Shadow]; lookup_time(property_name: string, inherit: boolean): [boolean, number]; lookup_url(property_name: string, inherit: boolean): [boolean, Gio.File]; paint_equal(other?: ThemeNode | null): boolean; to_string(): string; } export module Viewport { export interface ConstructorProperties< A = Clutter.LayoutManager, B = Clutter.Content, C extends Clutter.Actor = Clutter.Actor > extends Widget.ConstructorProperties<A, B> { [key: string]: any; } } export class Viewport<A = Clutter.LayoutManager, B = Clutter.Content, C extends Clutter.Actor = Clutter.Actor> extends Widget<A, B> implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<C>, Clutter.Scriptable, Scrollable { static $gtype: GObject.GType<Viewport>; constructor(properties?: Partial<Viewport.ConstructorProperties<A, B, C>>, ...args: any[]); _init(properties?: Partial<Viewport.ConstructorProperties<A, B, C>>, ...args: any[]): void; // Implemented Properties hadjustment: Adjustment; vadjustment: Adjustment; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: C): void; child_get_property(child: C, property: string, value: any): void; child_notify(child: C, pspec: GObject.ParamSpec): void; child_set_property(child: C, property: string, value: any): void; create_child_meta(actor: C): void; destroy_child_meta(actor: C): void; find_child_by_name(child_name: string): C; get_child_meta(actor: C): Clutter.ChildMeta; get_children(): C[]; get_children(...args: never[]): never; lower_child(actor: C, sibling?: C | null): void; raise_child(actor: C, sibling?: C | null): void; remove_actor(actor: C): void; sort_depth_order(): void; vfunc_actor_added(actor: C): void; vfunc_actor_removed(actor: C): void; vfunc_add(actor: C): void; vfunc_child_notify(child: C, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: C): void; vfunc_destroy_child_meta(actor: C): void; vfunc_get_child_meta(actor: C): Clutter.ChildMeta; vfunc_lower(actor: C, sibling?: C | null): void; vfunc_raise(actor: C, sibling?: C | null): void; vfunc_remove(actor: C): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; get_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; set_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; vfunc_get_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; vfunc_set_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; } export module Widget { export interface ConstructorProperties< A = Clutter.LayoutManager, B = Clutter.Content, C extends Clutter.Actor = Clutter.Actor > extends Clutter.Actor.ConstructorProperties<A, B> { [key: string]: any; accessible_name: string; accessibleName: string; accessible_role: Atk.Role; accessibleRole: Atk.Role; can_focus: boolean; canFocus: boolean; hover: boolean; label_actor: Clutter.Actor; labelActor: Clutter.Actor; pseudo_class: string; pseudoClass: string; style: string; style_class: string; styleClass: string; track_hover: boolean; trackHover: boolean; } } export class Widget<A = Clutter.LayoutManager, B = Clutter.Content, C extends Clutter.Actor = Clutter.Actor> extends Clutter.Actor<A, B> implements Atk.ImplementorIface, Clutter.Animatable, Clutter.Container<C>, Clutter.Scriptable { static $gtype: GObject.GType<Widget>; constructor(properties?: Partial<Widget.ConstructorProperties<A, B, C>>, ...args: any[]); _init(properties?: Partial<Widget.ConstructorProperties<A, B, C>>, ...args: any[]): void; // Properties accessible_name: string; accessibleName: string; accessible_role: Atk.Role; accessibleRole: Atk.Role; can_focus: boolean; canFocus: boolean; hover: boolean; label_actor: Clutter.Actor; labelActor: Clutter.Actor; pseudo_class: string; pseudoClass: string; style: string; style_class: string; styleClass: string; track_hover: boolean; trackHover: boolean; // Signals connect(id: string, callback: (...args: any[]) => any): number; connect_after(id: string, callback: (...args: any[]) => any): number; emit(id: string, ...args: any[]): void; connect(signal: "popup-menu", callback: (_source: this) => void): number; connect_after(signal: "popup-menu", callback: (_source: this) => void): number; emit(signal: "popup-menu"): void; connect(signal: "style-changed", callback: (_source: this) => void): number; connect_after(signal: "style-changed", callback: (_source: this) => void): number; emit(signal: "style-changed"): void; // Members add_accessible_state(state: Atk.StateType): void; add_style_class_name(style_class: string): void; add_style_pseudo_class(pseudo_class: string): void; ensure_style(): void; get_accessible_name(): string; get_accessible_role(): Atk.Role; get_can_focus(): boolean; get_focus_chain(): Clutter.Actor[]; get_hover(): boolean; get_label_actor(): Clutter.Actor; get_style(): string | null; get_style_class_name(): string; get_style_pseudo_class(): string; get_theme_node(): ThemeNode; get_track_hover(): boolean; has_style_class_name(style_class: string): boolean; has_style_pseudo_class(pseudo_class: string): boolean; navigate_focus(from: Clutter.Actor | null, direction: DirectionType, wrap_around: boolean): boolean; paint_background(paint_context: Clutter.PaintContext): void; peek_theme_node(): ThemeNode; popup_menu(): void; remove_accessible_state(state: Atk.StateType): void; remove_style_class_name(style_class: string): void; remove_style_pseudo_class(pseudo_class: string): void; set_accessible(accessible: Atk.Object): void; set_accessible_name(name?: string | null): void; set_accessible_role(role: Atk.Role): void; set_can_focus(can_focus: boolean): void; set_hover(hover: boolean): void; set_label_actor(label: Clutter.Actor): void; set_style(style?: string | null): void; set_style_class_name(style_class_list?: string | null): void; set_style_pseudo_class(pseudo_class_list?: string | null): void; set_track_hover(track_hover: boolean): void; style_changed(): void; sync_hover(): void; vfunc_get_focus_chain(): Clutter.Actor[]; vfunc_navigate_focus(from: Clutter.Actor | null, direction: DirectionType): boolean; vfunc_popup_menu(): void; vfunc_style_changed(): void; // Implemented Members find_property(property_name: string): GObject.ParamSpec; get_actor(): Clutter.Actor; get_initial_state(property_name: string, value: any): void; interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; set_final_state(property_name: string, value: any): void; vfunc_find_property(property_name: string): GObject.ParamSpec; vfunc_get_actor(): Clutter.Actor; vfunc_get_initial_state(property_name: string, value: any): void; vfunc_interpolate_value(property_name: string, interval: Clutter.Interval, progress: number): [boolean, unknown]; vfunc_set_final_state(property_name: string, value: any): void; add_actor(actor: C): void; child_get_property(child: C, property: string, value: any): void; child_notify(child: C, pspec: GObject.ParamSpec): void; child_set_property(child: C, property: string, value: any): void; create_child_meta(actor: C): void; destroy_child_meta(actor: C): void; find_child_by_name(child_name: string): C; get_child_meta(actor: C): Clutter.ChildMeta; get_children(): C[]; get_children(...args: never[]): never; lower_child(actor: C, sibling?: C | null): void; raise_child(actor: C, sibling?: C | null): void; remove_actor(actor: C): void; sort_depth_order(): void; vfunc_actor_added(actor: C): void; vfunc_actor_removed(actor: C): void; vfunc_add(actor: C): void; vfunc_child_notify(child: C, pspec: GObject.ParamSpec): void; vfunc_create_child_meta(actor: C): void; vfunc_destroy_child_meta(actor: C): void; vfunc_get_child_meta(actor: C): Clutter.ChildMeta; vfunc_lower(actor: C, sibling?: C | null): void; vfunc_raise(actor: C, sibling?: C | null): void; vfunc_remove(actor: C): void; vfunc_sort_depth_order(): void; get_id(): string; parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; set_custom_property(script: Clutter.Script, name: string, value: any): void; set_id(id_: string): void; vfunc_get_id(): string; vfunc_parse_custom_node(script: Clutter.Script, value: any, name: string, node: Json.Node): boolean; vfunc_set_custom_property(script: Clutter.Script, name: string, value: any): void; vfunc_set_id(id_: string): void; } export module WidgetAccessible { export interface ConstructorProperties extends Cally.Actor.ConstructorProperties { [key: string]: any; } } export class WidgetAccessible extends Cally.Actor implements Atk.Action, Atk.Component { static $gtype: GObject.GType<WidgetAccessible>; constructor(properties?: Partial<WidgetAccessible.ConstructorProperties>, ...args: any[]); _init(properties?: Partial<WidgetAccessible.ConstructorProperties>, ...args: any[]): void; // Implemented Members do_action(i: number): boolean; get_description(i: number): string | null; get_description(...args: never[]): never; get_keybinding(i: number): string | null; get_localized_name(i: number): string | null; get_n_actions(): number; get_name(i: number): string | null; get_name(...args: never[]): never; set_description(i: number, desc: string): boolean; set_description(...args: never[]): never; vfunc_do_action(i: number): boolean; vfunc_get_description(i: number): string | null; vfunc_get_description(...args: never[]): never; vfunc_get_keybinding(i: number): string | null; vfunc_get_localized_name(i: number): string | null; vfunc_get_n_actions(): number; vfunc_get_name(i: number): string | null; vfunc_get_name(...args: never[]): never; vfunc_set_description(i: number, desc: string): boolean; vfunc_set_description(...args: never[]): never; contains(x: number, y: number, coord_type: Atk.CoordType): boolean; get_alpha(): number; get_extents(coord_type: Atk.CoordType): [number | null, number | null, number | null, number | null]; get_layer(): Atk.Layer; get_mdi_zorder(): number; get_position(coord_type: Atk.CoordType): [number | null, number | null]; get_size(): [number | null, number | null]; grab_focus(): boolean; ref_accessible_at_point(x: number, y: number, coord_type: Atk.CoordType): Atk.Object | null; remove_focus_handler(handler_id: number): void; scroll_to(type: Atk.ScrollType): boolean; scroll_to_point(coords: Atk.CoordType, x: number, y: number): boolean; set_extents(x: number, y: number, width: number, height: number, coord_type: Atk.CoordType): boolean; set_position(x: number, y: number, coord_type: Atk.CoordType): boolean; set_size(width: number, height: number): boolean; vfunc_bounds_changed(bounds: Atk.Rectangle): void; vfunc_contains(x: number, y: number, coord_type: Atk.CoordType): boolean; vfunc_get_alpha(): number; vfunc_get_extents(coord_type: Atk.CoordType): [number | null, number | null, number | null, number | null]; vfunc_get_layer(): Atk.Layer; vfunc_get_mdi_zorder(): number; vfunc_get_position(coord_type: Atk.CoordType): [number | null, number | null]; vfunc_get_size(): [number | null, number | null]; vfunc_grab_focus(): boolean; vfunc_ref_accessible_at_point(x: number, y: number, coord_type: Atk.CoordType): Atk.Object | null; vfunc_remove_focus_handler(handler_id: number): void; vfunc_scroll_to(type: Atk.ScrollType): boolean; vfunc_scroll_to_point(coords: Atk.CoordType, x: number, y: number): boolean; vfunc_set_extents(x: number, y: number, width: number, height: number, coord_type: Atk.CoordType): boolean; vfunc_set_position(x: number, y: number, coord_type: Atk.CoordType): boolean; vfunc_set_size(width: number, height: number): boolean; } export class BoxLayoutPrivate { static $gtype: GObject.GType<BoxLayoutPrivate>; constructor(copy: BoxLayoutPrivate); } export class FocusManagerPrivate { static $gtype: GObject.GType<FocusManagerPrivate>; constructor(copy: FocusManagerPrivate); } export class GenericAccessiblePrivate { static $gtype: GObject.GType<GenericAccessiblePrivate>; constructor(copy: GenericAccessiblePrivate); } export class IconColors { static $gtype: GObject.GType<IconColors>; constructor(); constructor( properties?: Partial<{ ref_count?: number; foreground?: Clutter.Color; warning?: Clutter.Color; error?: Clutter.Color; success?: Clutter.Color; }> ); constructor(copy: IconColors); // Fields ref_count: number; foreground: Clutter.Color; warning: Clutter.Color; error: Clutter.Color; success: Clutter.Color; // Constructors static ["new"](): IconColors; // Members copy(): IconColors; equal(other: IconColors): boolean; ref(): IconColors; unref(): void; } export class IconPrivate { static $gtype: GObject.GType<IconPrivate>; constructor(copy: IconPrivate); } export class LabelPrivate { static $gtype: GObject.GType<LabelPrivate>; constructor(copy: LabelPrivate); } export class ScrollViewPrivate { static $gtype: GObject.GType<ScrollViewPrivate>; constructor(copy: ScrollViewPrivate); } export class Shadow { static $gtype: GObject.GType<Shadow>; constructor(color: Clutter.Color, xoffset: number, yoffset: number, blur: number, spread: number, inset: boolean); constructor( properties?: Partial<{ color?: Clutter.Color; xoffset?: number; yoffset?: number; blur?: number; spread?: number; inset?: boolean; ref_count?: number; }> ); constructor(copy: Shadow); // Fields color: Clutter.Color; xoffset: number; yoffset: number; blur: number; spread: number; inset: boolean; ref_count: number; // Constructors static ["new"]( color: Clutter.Color, xoffset: number, yoffset: number, blur: number, spread: number, inset: boolean ): Shadow; // Members equal(other: Shadow): boolean; get_box(actor_box: Clutter.ActorBox, shadow_box: Clutter.ActorBox): void; ref(): Shadow; unref(): void; } export class ShadowHelper { static $gtype: GObject.GType<ShadowHelper>; constructor(shadow: Shadow); constructor(copy: ShadowHelper); // Constructors static ["new"](shadow: Shadow): ShadowHelper; // Members copy(): ShadowHelper; free(): void; paint(framebuffer: Cogl.Framebuffer, actor_box: Clutter.ActorBox, paint_opacity: number): void; update(source: Clutter.Actor): void; } export class TextureCachePrivate { static $gtype: GObject.GType<TextureCachePrivate>; constructor(copy: TextureCachePrivate); } export class ThemeNodePaintState { static $gtype: GObject.GType<ThemeNodePaintState>; constructor(copy: ThemeNodePaintState); // Fields node: ThemeNode; alloc_width: number; alloc_height: number; box_shadow_width: number; box_shadow_height: number; resource_scale: number; box_shadow_pipeline: Cogl.Pipeline; prerendered_texture: Cogl.Pipeline; prerendered_pipeline: Cogl.Pipeline; corner_material: Cogl.Pipeline[]; // Members copy(other: ThemeNodePaintState): void; free(): void; init(): void; invalidate(): void; invalidate_for_file(file: Gio.File): boolean; set_node(node: ThemeNode): void; } export class WidgetAccessiblePrivate { static $gtype: GObject.GType<WidgetAccessiblePrivate>; constructor(copy: WidgetAccessiblePrivate); } export interface ScrollableNamespace { $gtype: GObject.GType<Scrollable>; prototype: ScrollablePrototype; } export type Scrollable = ScrollablePrototype; export interface ScrollablePrototype extends GObject.Object { // Properties hadjustment: Adjustment; vadjustment: Adjustment; // Members get_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; set_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; vfunc_get_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; vfunc_set_adjustments(hadjustment: Adjustment, vadjustment: Adjustment): void; } export const Scrollable: ScrollableNamespace;
the_stack
import StrictEventEmitter from "strict-event-emitter-types"; import { EventEmitter } from "events"; import fillInPrediction from "./imageRecognition/tensorflow"; import SudokuSolver from "./solver/sudokuSolver"; import getLargestConnectedComponent, { Point, } from "./imageProcessing/getLargestConnectedComponent"; import findHomographicTransform, { Transform, transformPoint, } from "./imageProcessing/findHomographicTransform"; import captureImage from "./imageProcessing/captureImage"; import adaptiveThreshold from "./imageProcessing/adaptiveThreshold"; import getCornerPoints from "./imageProcessing/getCornerPoints"; import extractSquareFromRegion from "./imageProcessing/applyHomographicTransform"; import extractBoxes from "./imageProcessing/extractBoxes"; // minimum number of boxes we want before trying to solve the puzzle const MIN_BOXES = 15; // size of image to use for processing const PROCESSING_SIZE = 900; export type VideoReadyPayload = { width: number; height: number }; interface ProcessorEvents { videoReady: VideoReadyPayload; } type ProcessorEventEmitter = StrictEventEmitter<EventEmitter, ProcessorEvents>; type SolvedBox = { // was this a known digit? isKnown: boolean; // the digit for this box digit: number; // a guess at how tall it should be drawn digitHeight: number; // a guess at the rotation to draw it at digitRotation: number; // where to draw it position: Point; }; export default class Processor extends (EventEmitter as { new (): ProcessorEventEmitter; }) { // the source for our video video: HTMLVideoElement; // is the video actually running? isVideoRunning: boolean = false; // are we in the middle of processing a frame? isProcessing: boolean = false; // the detected corners of the puzzle in video space corners: { topLeft: Point; topRight: Point; bottomLeft: Point; bottomRight: Point; }; // the calculated grid lines in the video space gridLines: { p1: Point; p2: Point }[]; // completely solved puzzle solvedPuzzle: SolvedBox[][]; // performance stats captureTime: number = 0; thresholdTime: number = 0; connectedComponentTime: number = 0; cornerPointTime: number = 0; extractPuzzleTime: number = 0; extractBoxesTime: number = 0; neuralNetTime: number = 0; solveTime: number = 0; /** * Start streaming video from the back camera of a phone (or webcam on a computer) * @param video A video element - needs to be on the page for iOS to work */ async startVideo(video: HTMLVideoElement) { this.video = video; // start up the video feed const stream = await navigator.mediaDevices.getUserMedia({ video: { facingMode: "environment", width: 640 }, audio: false, }); // grab the video dimensions once it has started up const canPlayListener = () => { this.video.removeEventListener("canplay", canPlayListener); this.emit("videoReady", { width: this.video.videoWidth, height: this.video.videoHeight, }); this.isVideoRunning = true; // start processing this.processFrame(); }; this.video.addEventListener("canplay", canPlayListener); this.video.srcObject = stream; this.video.play(); } /** * Creates a set of grid lines mapped onto video space * @param transform The homographic transform to video space */ createGridLines(transform: Transform) { const boxSize = PROCESSING_SIZE / 9; const gridLines = []; for (let l = 1; l < 9; l++) { // horizonal line gridLines.push({ p1: transformPoint({ x: 0, y: l * boxSize }, transform), p2: transformPoint({ x: PROCESSING_SIZE, y: l * boxSize }, transform), }); // vertical line gridLines.push({ p1: transformPoint({ y: 0, x: l * boxSize }, transform), p2: transformPoint({ y: PROCESSING_SIZE, x: l * boxSize }, transform), }); } return gridLines; } /** * Create a set of cells with coordinates in video space for drawing digits * @param x Cell X * @param y Cell Y * @param digit The digit * @param isKnown Is it a known digit? * @param transform The homographic transform to video space */ getTextDetailsForBox( x: number, y: number, digit: number, isKnown: boolean, transform: Transform ): SolvedBox { const boxSize = PROCESSING_SIZE / 9; // work out the line that runs vertically through the box in the original image space const p1 = transformPoint( { x: (x + 0.5) * boxSize, y: y * boxSize }, transform ); const p2 = transformPoint( { x: (x + 0.5) * boxSize, y: (y + 1) * boxSize }, transform ); // the center of the box const textPosition = transformPoint( { x: (x + 0.5) * boxSize, y: (y + 0.5) * boxSize }, transform ); // approximate angle of the text in the box const dx = p1.x - p2.x; const dy = p1.y - p2.y; const digitRotation = Math.atan2(dx, dy); // appriximate height of the text in the box const digitHeight = 0.8 * Math.sqrt(dx * dx + dy * dy); return { digit, digitHeight, digitRotation, isKnown: isKnown, position: textPosition, }; } /** * Map from the found solution to something that can be displayed in video space * @param solver The solver with the solution * @param transform The transform to video space */ createSolvedPuzzle(solver: SudokuSolver, transform: Transform) { const results: SolvedBox[][] = new Array(9); for (let y = 0; y < 9; y++) { results[y] = new Array(9); } solver.solution.forEach((sol) => { const { x, y, entry, isKnown } = sol.guess; results[y][x] = this.getTextDetailsForBox( x, y, entry, isKnown, transform ); }); return results; } sanityCheckCorners({ topLeft, topRight, bottomLeft, bottomRight, }: { topLeft: Point; topRight: Point; bottomLeft: Point; bottomRight: Point; }) { function length(p1: Point, p2: Point) { const dx = p1.x - p2.x; const dy = p1.y - p2.y; return Math.sqrt(dx * dx + dy * dy); } const topLineLength = length(topLeft, topRight); const leftLineLength = length(topLeft, bottomLeft); const rightLineLength = length(topRight, bottomRight); const bottomLineLength = length(bottomLeft, bottomRight); if ( topLineLength < 0.5 * bottomLineLength || topLineLength > 1.5 * bottomLineLength ) return false; if ( leftLineLength < 0.7 * rightLineLength || leftLineLength > 1.3 * rightLineLength ) return false; if ( leftLineLength < 0.5 * bottomLineLength || leftLineLength > 1.5 * bottomLineLength ) return false; return true; } /** * Process a frame of video */ async processFrame() { if (!this.isVideoRunning) { // no video stream so give up immediately return; } if (this.isProcessing) { // we're already processing a frame. Don't kill the computer! return; } try { // grab an image from the video camera let startTime = performance.now(); const image = captureImage(this.video); this.captureTime = 0.1 * (performance.now() - startTime) + this.captureTime * 0.9; // apply adaptive thresholding to the image startTime = performance.now(); const thresholded = adaptiveThreshold(image.clone(), 20, 20); this.thresholdTime = 0.1 * (performance.now() - startTime) + this.thresholdTime * 0.9; // extract the most likely candidate connected region from the image startTime = performance.now(); const largestConnectedComponent = getLargestConnectedComponent( thresholded, { minAspectRatio: 0.5, maxAspectRatio: 1.5, minSize: Math.min(this.video.videoWidth, this.video.videoHeight) * 0.3, maxSize: Math.min(this.video.videoWidth, this.video.videoHeight) * 0.9, } ); this.connectedComponentTime = 0.1 * (performance.now() - startTime) + this.connectedComponentTime * 0.9; // if we actually found something if (largestConnectedComponent) { // make a guess at where the corner points are using manhattan distance startTime = performance.now(); const potentialCorners = getCornerPoints(largestConnectedComponent); this.cornerPointTime = 0.1 * (performance.now() - startTime) + this.cornerPointTime * 0.9; if (this.sanityCheckCorners(potentialCorners)) { this.corners = potentialCorners; // compute the transform to go from a square puzzle of size PROCESSING_SIZE to the detected corner points startTime = performance.now(); const transform = findHomographicTransform( PROCESSING_SIZE, this.corners ); // we've got the transform so we can show where the gridlines are this.gridLines = this.createGridLines(transform); // extract the square puzzle from the original grey image const extractedImageGreyScale = extractSquareFromRegion( image, PROCESSING_SIZE, transform ); // extract the square puzzle from the thresholded image - we'll use the thresholded image for determining where the digits are in the cells const extractedImageThresholded = extractSquareFromRegion( thresholded, PROCESSING_SIZE, transform ); this.extractPuzzleTime = 0.1 * (performance.now() - startTime) + this.extractPuzzleTime * 0.9; // extract the boxes that should contain the numbers startTime = performance.now(); const boxes = extractBoxes( extractedImageGreyScale, extractedImageThresholded ); this.extractBoxesTime = 0.1 * (performance.now() - startTime) + this.extractBoxesTime * 0.9; // did we find sufficient boxes for a potentially valid sudoku puzzle? if (boxes.length > MIN_BOXES) { // apply the neural network to the found boxes and work out what the digits are startTime = performance.now(); await fillInPrediction(boxes); this.neuralNetTime = 0.1 * (performance.now() - startTime) + this.neuralNetTime * 0.9; // solve the suoku puzzle using the dancing links and algorithm X - https://en.wikipedia.org/wiki/Knuth%27s_Algorithm_X startTime = performance.now(); const solver = new SudokuSolver(); // set the known values boxes.forEach((box) => { if (box.contents !== 0) { solver.setNumber(box.x, box.y, box.contents - 1); } }); // search for a solution if (solver.search(0)) { this.solvedPuzzle = this.createSolvedPuzzle(solver, transform); } else { this.solvedPuzzle = null; } this.solveTime = 0.1 * (performance.now() - startTime) + this.solveTime * 0.9; } } else { this.corners = null; this.gridLines = null; this.solvedPuzzle = null; } } else { this.corners = null; this.gridLines = null; this.solvedPuzzle = null; } } catch (error) { console.error(error); } this.isProcessing = false; // process again setTimeout(() => this.processFrame(), 20); } }
the_stack
import type { Body } from 'p2-es' import { Ray, RaycastResult, vec2 } from 'p2-es' import type { Duplet } from './' import type KinematicCharacterController from './KinematicCharacterController' import type { RaycastControllerOptns } from './RaycastController' import RaycastController from './RaycastController' interface BodyWithUuid extends Body { uuid: string } // math helpers function sign(x: number) { return x >= 0 ? 1 : -1 } function clamp(value: number, min: number, max: number) { return Math.min(max, Math.max(min, value)) } // constants const ZERO = vec2.create() interface PlatformControllerOptns extends RaycastControllerOptns { controllers: { [uuid: string]: { controller: KinematicCharacterController } } dstBetweenRays?: number localWaypoints: Duplet[] passengerMask: number skinWidth?: number speed?: number } export default class PlatformController extends RaycastController { cyclic: boolean //[Range(0,2)] easeAmount: number fromWaypointIndex: number globalWaypoints: Duplet[] localWaypoints: Duplet[] nextMoveTime: number passengerDictionary: { [key: string]: KinematicCharacterController } passengerMask: number passengerMovement: PassengerMovement[] percentBetweenWaypoints: number ray: Ray raycastResult: RaycastResult raysData: [from: Duplet, to: Duplet, hitDistance?: number][] speed: number time: number waitTime: number constructor(options: PlatformControllerOptns) { super(options) this.passengerMask = options.passengerMask || -1 this.localWaypoints = options.localWaypoints this.globalWaypoints = [] this.speed = options.speed || 5 this.cyclic = false this.waitTime = 0 // Range(0,2) this.easeAmount = 0 this.fromWaypointIndex = 0 this.percentBetweenWaypoints = 0 this.nextMoveTime = 0 this.passengerMovement = [] this.passengerDictionary = {} this.time = 0 this.ray = new Ray({ from: [0, 0], mode: Ray.CLOSEST, skipBackfaces: true, to: [0, -1], }) this.raycastResult = new RaycastResult() this.raysData = [] this.globalWaypoints = new Array(this.localWaypoints.length) for (let i = 0; i < this.localWaypoints.length; i++) { const temp = vec2.create() vec2.add(temp, this.localWaypoints[i], this.body.position) this.globalWaypoints[i] = temp } Object.values(options.controllers).map((c) => { const body = c.controller.body as unknown as BodyWithUuid if (c.controller.constructor.name === 'KinematicCharacterController') this.passengerDictionary[body.uuid] = c.controller }) this.world.on('postStep', () => this.update(1 / 60)) } calculatePassengerMovement(velocity: Duplet) { const movedPassengers = new Set() this.passengerMovement = [] const directionX = sign(velocity[0]) const directionY = sign(velocity[1]) // Vertically moving platform if (velocity[1] !== 0) { const rayLength = Math.abs(velocity[1]) + this.skinWidth for (let i = 0; i < this.verticalRayCount; i++) { const ray = this.ray ray.collisionMask = this.passengerMask vec2.copy(ray.from, directionY === -1 ? this.raycastOrigins.bottomLeft : this.raycastOrigins.topLeft) vec2.set(ray.from, ray.from[0] + this.verticalRaySpacing * i, ray.from[1]) vec2.set(ray.to, ray.from[0], ray.from[1] + directionY * rayLength) ray.update() this.world.raycast(this.raycastResult, ray) this.raysData[i] = [[...ray.from], [...ray.to], undefined] if (this.raycastResult.body) { const distance = this.raycastResult.getHitDistance(ray) if (distance === 0) continue const body = this.raycastResult.body as BodyWithUuid if (!movedPassengers.has(body.uuid)) { movedPassengers.add(body.uuid) const pushX = directionY === 1 ? velocity[0] : 0 const pushY = velocity[1] - (distance - this.skinWidth) * directionY this.passengerMovement.push( new PassengerMovement({ moveBeforePlatform: true, standingOnPlatform: directionY === 1, uuid: body.uuid, velocity: vec2.fromValues(pushX, pushY), }), ) } } this.raycastResult.reset() } } // Horizontally moving platform if (velocity[0] !== 0) { const rayLength = Math.abs(velocity[0]) + this.skinWidth for (let i = 0; i < this.horizontalRayCount; i++) { const ray = this.ray ray.collisionMask = this.passengerMask vec2.copy( ray.from, directionX === -1 ? this.raycastOrigins.bottomLeft : this.raycastOrigins.bottomRight, ) ray.from[1] += this.horizontalRaySpacing * i vec2.copy(ray.to, ray.from) ray.to[0] += directionX * rayLength ray.update() this.world.raycast(this.raycastResult, ray) this.raysData[this.verticalRayCount + i] = [[...ray.from], [...ray.to], undefined] if (this.raycastResult.body) { const body = this.raycastResult.body as BodyWithUuid const distance = this.raycastResult.getHitDistance(ray) if (distance === 0) { continue } if (!movedPassengers.has(body.uuid)) { movedPassengers.add(body.uuid) const pushX = velocity[0] - (distance - this.skinWidth) * directionX const pushY = -this.skinWidth this.passengerMovement.push( new PassengerMovement({ moveBeforePlatform: true, standingOnPlatform: false, uuid: body.uuid, velocity: vec2.fromValues(pushX, pushY), }), ) } } this.raycastResult.reset() } } // Passenger on top of a horizontally or downward moving platform if (directionY === -1 || (velocity[1] === 0 && velocity[0] !== 0)) { const rayLength = this.skinWidth * 2 for (let i = 0; i < this.verticalRayCount; i++) { const ray = this.ray ray.collisionMask = this.passengerMask vec2.set( ray.from, this.raycastOrigins.topLeft[0] + this.verticalRaySpacing * i, this.raycastOrigins.topLeft[1], ) vec2.set(ray.to, ray.from[0], ray.from[1] + rayLength) ray.update() this.world.raycast(this.raycastResult, ray) this.raysData[this.verticalRayCount + this.horizontalRayCount + i] = [ [...ray.from], [...ray.to], undefined, ] if (this.raycastResult.body) { const distance = this.raycastResult.getHitDistance(ray) if (distance === 0) { continue } const body = this.raycastResult.body as BodyWithUuid if (!movedPassengers.has(body.uuid)) { movedPassengers.add(body.uuid) const pushX = velocity[0] const pushY = velocity[1] this.passengerMovement.push( new PassengerMovement({ moveBeforePlatform: false, standingOnPlatform: true, uuid: body.uuid, velocity: vec2.fromValues(pushX, pushY), }), ) } } this.raycastResult.reset() } } } calculatePlatformMovement(deltaTime: number): Duplet { if (this.time < this.nextMoveTime) { return ZERO } const { globalWaypoints, speed } = this this.fromWaypointIndex %= globalWaypoints.length const toWaypointIndex = (this.fromWaypointIndex + 1) % globalWaypoints.length const distanceBetweenWaypoints = vec2.distance( globalWaypoints[this.fromWaypointIndex], globalWaypoints[toWaypointIndex], ) this.percentBetweenWaypoints += (deltaTime * speed) / distanceBetweenWaypoints this.percentBetweenWaypoints = clamp(this.percentBetweenWaypoints, 0, 1) const easedPercentBetweenWaypoints = this.ease(this.percentBetweenWaypoints) const newPos = vec2.create() vec2.lerp( newPos, globalWaypoints[this.fromWaypointIndex], globalWaypoints[toWaypointIndex], easedPercentBetweenWaypoints, ) if (this.percentBetweenWaypoints >= 1) { this.percentBetweenWaypoints = 0 this.fromWaypointIndex++ if (!this.cyclic) { if (this.fromWaypointIndex >= globalWaypoints.length - 1) { this.fromWaypointIndex = 0 globalWaypoints.reverse() } } this.nextMoveTime = this.time + this.waitTime } const result = vec2.create() vec2.subtract(result, newPos, this.body.position) return result } ease(x: number) { const a = this.easeAmount + 1 return Math.pow(x, a) / (Math.pow(x, a) + Math.pow(1 - x, a)) } movePassengers(beforeMovePlatform: boolean) { this.passengerMovement.map((passenger) => { if (!(passenger.uuid in this.passengerDictionary)) { return console.error('passenger uuid not in passengerDictionary') } if (passenger.moveBeforePlatform === beforeMovePlatform) { this.passengerDictionary[passenger.uuid].moveWithZeroInput( passenger.velocity, passenger.standingOnPlatform, ) } }) } update(deltaTime: number) { this.time += deltaTime super.updateRaycastOrigins() const velocity = this.calculatePlatformMovement(deltaTime) this.updateRaycastOrigins() this.calculatePassengerMovement(velocity) this.movePassengers(true) vec2.set(this.body.position, this.body.position[0] + velocity[0], this.body.position[1] + velocity[1]) this.movePassengers(false) } } type PassengerMovementOptns = { moveBeforePlatform: boolean standingOnPlatform: boolean uuid: string velocity: Duplet } class PassengerMovement { moveBeforePlatform: boolean standingOnPlatform: boolean uuid: string velocity: Duplet constructor(options: PassengerMovementOptns) { this.velocity = options.velocity || [0, 0] this.standingOnPlatform = options.standingOnPlatform || false this.moveBeforePlatform = options.moveBeforePlatform || false this.uuid = options.uuid || '' } }
the_stack
declare module 'nact' { // Time export type Milliseconds = number; export const hour: Milliseconds; export const hours: Milliseconds; export const message: Milliseconds; export const messages: Milliseconds; export const millisecond: Milliseconds; export const milliseconds: Milliseconds; export const minute: Milliseconds; export const minutes: Milliseconds; export const second: Milliseconds; export const seconds: Milliseconds; // References export abstract class Ref<T> { } export type ActorSystemRef = Ref<never>; export class Nobody extends Ref<any> { constructor(); } // Actor Path export abstract class ActorPath { parts: [string, ...string[]]; system: string; toString(): string; } // Actor Facing Logger export interface Logger { trace(message: string): void; debug(message: string): void; info(message: string): void; warn(message: string): void; critical(message: string): void; error(message: string): void; event(name: string, eventProperties: any): void; exception(exception: Error): void; metric(name: string, values: any): void; } export type ActorName = string; export type ActorSystemName = string; // Contexts export type ActorContext<Msg, ParentRef extends Ref<any>> = { parent: ParentRef, path: ActorPath, self: Ref<Msg>, name: ActorName, children: Map<ActorName, Ref<unknown>>, log: Logger }; export type PersistentActorContext<Msg, ParentRef extends Ref<any>> = ActorContext<MSGesture, ParentRef> & { persist: (msg: Msg) => Promise<void> }; export type ActorContextWithMailbox<Msg, ParentRef extends Ref<any>> = ActorContext<Msg, ParentRef> & { mailbox: Msg[] }; export type SupervisionContext<Msg, ParentRef extends Ref<any>> = ActorContextWithMailbox<Msg, ParentRef> & { stop: Symbol, stopAll: Symbol, stopChild: Symbol, stopAllChildren: Symbol, escalate: Symbol, resume: Symbol, reset: Symbol, resetAll: Symbol, resetChild: Symbol, resetAllChildren: Symbol, mailbox: Msg[] }; // Actor Functions export type ActorFunc<State, Msg, ParentRef extends Ref<any>> = (state: State, msg: Msg, ctx: ActorContext<Msg, ParentRef>) => State | Promise<State>; export type StatelessActorFunc<Msg, ParentRef extends Ref<any>> = (msg: Msg, ctx: ActorContext<Msg, ParentRef>) => void | Promise<void>; export type PersistentActorFunc<State, Msg, ParentRef extends Ref<any>> = (state: State, msg: Msg, ctx: ActorContext<Msg, ParentRef>) => State | Promise<State>; export type SupervisionActorFunc<Msg, ParentRef extends Ref<any>, ChildRef extends Ref<any>> = (msg: Msg | undefined, err: Error | undefined, ctx: SupervisionContext<Msg, ParentRef>, child: ChildRef | undefined) => Symbol | Promise<Symbol>; export type PersistentQueryFunc<State, Msg> = (state: State, msg: Msg) => State | Promise<State>; // Actor configuration export type ActorProps<State, Msg, ParentRef extends Ref<any>> = { shutdownAfter?: Milliseconds, onCrash?: SupervisionActorFunc<Msg, ParentRef, Ref<any>>, initialState?: State, initialStateFunc?: (ctx: ActorContext<Msg, ParentRef>) => State, afterStop?: (state: State, ctx: ActorContextWithMailbox<Msg, ParentRef>) => void | Promise<void> }; export type StatelessActorProps<Msg, ParentRef extends Ref<any>> = Omit<ActorProps<any, Msg, ParentRef>, 'initialState' | 'initialStateFunc' | 'afterStop'>; export type NumberOfMessages = number; export type Json = unknown; export type PersistentActorProps<State, Msg, ParentRef extends Ref<any>> = ActorProps<State, Msg, ParentRef> & { snapshotEvery?: NumberOfMessages, snapshotEncoder?: (state: State) => Json, snapshotDecoder?: (state: Json) => State encoder?: (msg: Msg) => Json, decoder?: (msg: Json) => Msg }; export type PersistentQueryProps<State, Msg> = { snapshotKey?: string, snapshotEvery?: NumberOfMessages, cacheDuration?: Milliseconds, snapshotEncoder?: (state: State) => Json, snapshotDecoder?: (state: Json) => State, encoder?: (msg: Msg) => Json, decoder?: (msg: Json) => Msg, initialState?: State }; // Type helpers type InferMsgFromFunc<T extends ActorFunc<any, any, any>> = T extends ActorFunc<any, infer Msg, any> ? Msg : never; type InferStateFromFunc<T extends ActorFunc<any, any, any>> = T extends ActorFunc<infer State, any, any> ? State : never; type InferMsgFromPersistentFunc<T extends PersistentActorFunc<any, any, any>> = T extends PersistentActorFunc<any, infer Msg, any> ? Msg : never; type InferStateFromPersistentFunc<T extends PersistentActorFunc<any, any, any>> = T extends PersistentActorFunc<infer State, any, any> ? State : never; type InferMsgFromStatelessFunc<T extends StatelessActorFunc<any, any>> = T extends StatelessActorFunc<infer Msg, any> ? Msg : never; type InferMsgFromPersistentQuery<T extends PersistentQueryFunc<any, any>> = T extends PersistentQueryFunc<any, infer Msg> ? Msg : never; type InferStateFromPersistentQuery<T extends PersistentQueryFunc<any, any>> = T extends PersistentQueryFunc<infer State, any> ? State : never; // Main actor functions export function spawn<ParentRef extends Ref<any>, Func extends ActorFunc<any, any, ParentRef>>( parent: ParentRef, f: Func, name?: string, properties?: ActorProps<InferStateFromFunc<Func>, InferMsgFromFunc<Func>, ParentRef> ): Ref<InferMsgFromFunc<Func>>; export function spawnPersistent<ParentRef extends Ref<any>, Func extends PersistentActorFunc<any, any, ParentRef>>( parent: ParentRef, f: Func, key: string, name?: string, properties?: PersistentActorProps<InferStateFromPersistentFunc<Func>, InferMsgFromPersistentFunc<Func>, ParentRef> ): Ref<InferMsgFromPersistentFunc<Func>>; export function spawnStateless<ParentRef extends Ref<any>, Func extends StatelessActorFunc<any, ParentRef>>( parent: ParentRef, f: Func, name?: any, properties?: StatelessActorProps<InferMsgFromStatelessFunc<Func>, ParentRef> ): Ref<InferMsgFromStatelessFunc<Func>>; export function stop(actor: Ref<any>): void; /** Note: Sender when using dispatch has been intentionally omitted from the typescript bindings. * Sender simply cannot be strongly typed. A safer alternative is to include the sender * as part of the message protocol. For example: * ``` * dispatch(pizzaActor, { sender: deliveryActor, order: ['ONE_LARGE_PEPPERONI'] }); * ``` */ export function dispatch<T>(actor: Ref<T>, msg: T): void; export type QueryMsgFactory<Req, Res> = (tempRef: Ref<Res>) => Req; export type InferResponseFromMsgFactory<T extends QueryMsgFactory<any, any>> = T extends QueryMsgFactory<any, infer Res> ? Res : never; export function query<Msg, MsgCreator extends QueryMsgFactory<Msg, any>>(actor: Ref<Msg>, queryFactory: MsgCreator, timeout: Milliseconds): Promise<InferResponseFromMsgFactory<MsgCreator>>; export function persistentQuery<Func extends PersistentQueryFunc<any, any>>( parent: Ref<any>, f: Func, key: string, properties?: PersistentQueryProps<InferStateFromPersistentQuery<Func>, InferMsgFromPersistentQuery<Func>> ): () => Promise<InferStateFromPersistentFunc<Func>>; export abstract class ActorSystem { } export type Plugin = (system: ActorSystem) => void; export function start(fst?: Plugin | ActorSystemName, ...args: Plugin[]): ActorSystemRef; // Logging export type LogLevel = | 0 // OFF | 1 // TRACE | 2 // DEBUG | 3 // INFO | 4 // WARN | 5 // ERROR | 6 // CRITICAL ; export type LogTrace = { type: 'trace', level: LogLevel, actor: Ref<unknown>, message: string, createdAt: Date }; export type LogEvent = { type: 'event', name: string; properties: Json, actor: Ref<unknown>, createdAt: Date }; export type LogMetric = { type: 'metric', name: string; values: Json, actor: Ref<unknown>, createdAt: Date }; export type LogException = { type: 'exception', exception: Error | undefined, actor: Ref<unknown>; createdAt: Date; } export type LogMsg = | LogTrace | LogEvent | LogMetric | LogException; export type LoggingEngine = (systemRef: Ref<never>) => Ref<LogMsg>; export function logNothing(): LoggingEngine; export type ConsoleLoggerProperties = { consoleProxy?: Console, formatter?: (msg: LogMsg) => string, name?: string // actor name under system actor }; export function logToConsole(properties?: ConsoleLoggerProperties): LoggingEngine; export function configureLogging(engine: LoggingEngine): Plugin; // Persistence export type PersistedSnapshot = { data: Json, sequenceNumber: number, key: string, createdAt: Milliseconds }; export type PersistedEvent = { data: Json, sequenceNumber: number, key: string, createdAt: Milliseconds, isDeleted: boolean, tags: string[] // sorted alphabetically }; export type EventStream = PromiseLike<Event[]> & { /** * Calls the specified callback function for all the elements in an array. The return value of the callback function is the accumulated result, and is provided as an argument in the next call to the callback function. * @param callbackfn A function that accepts up to four arguments. The reduce method calls the callbackfn function one time for each element in the array. * @param initialValue If initialValue is specified, it is used as the initial value to start the accumulation. The first call to the callbackfn function provides this value as an argument instead of an array value. */ reduce<U>(callbackfn: (previousValue: U, currentValue: PersistedEvent, currentIndex: number) => U, initialValue: U): Promise<U>; }; export interface PersistenceEngine { events(persistenceKey: string, offset: number, limit: number, tags: string[]): EventStream; latestSnapshot(persistenceKey: string): Promise<PersistedSnapshot>; takeSnapshot(persistedSnapshot: PersistedSnapshot): Promise<void>; persist(persistedEvent: PersistedEvent): Promise<void>; } export function configurePersistence(engine: PersistenceEngine): Plugin; } // declare module 'nact/monad' { // export abstract class Effect<> { }; // export function* start(program: funct): void; // }
the_stack
export interface FTCreateParameters { /** * The expression of the 'FILTER' parameter. is a filter expression with the full RediSearch aggregation expression language. */ filter?: string, /** * The field of the 'PAYLOAD' parameter. If set indicates the document field that should be used as a binary safe payload string to the document, that can be evaluated at query time by a custom scoring function, or retrieved to the client. */ payloadField?: string, /** * The 'MAXTEXTFIELDS' parameter. For efficiency, RediSearch encodes indexes differently if they are created with less than 32 text fields. */ maxTextFields?: number, /** * The 'NOFFSETS' parameter. If set, we do not store term offsets for documents (saves memory, does not allow exact searches or highlighting). */ noOffsets?: boolean, /** * The 'TEMPORARY' parameter. Create a lightweight temporary index which will expire after the specified period of inactivity. */ temporary?: number, /** * The 'NOHL' parameter. Conserves storage space and memory by disabling highlighting support. If set, we do not store corresponding byte offsets for term positions. */ nohl?: boolean, /** * The 'NOFIELDS' parameter. If set, we do not store field bits for each term. */ noFields?: boolean, /** * The 'NOFREQS' parameter. If set, we avoid saving the term frequencies in the index. */ noFreqs?: boolean, /** * The 'SKIPINITIALSCAN' parameter. If set, we do not scan and index. */ skipInitialScan?: boolean /** * The 'PREFIX' parameter. tells the index which keys it should index. */ prefix?: { num?: number, prefixes: string | string[], }, /** * The 'LANGUAGE' parameter. If set indicates the default language for documents in the index. */ language?: string, /** * The 'LANGUAGE_FIELD' parameter. If set indicates the document field that should be used as the document language. */ languageField?: string, /** * The 'SCORE' parameter. If set indicates the default score for documents in the index. */ score?: string, /** * The 'SCORE_FIELD' parameter. If set indicates the document field that should be used as the document's rank based on the user's ranking. */ scoreField?: string /** * The 'STOPWORDS' parameter. If set, we set the index with a custom stopword list, to be ignored during indexing and search time. */ stopwords?: { num?: number, stopwords: string | string[], }, } /** * The field parameter */ export interface FTFieldOptions { /** * The 'SORTABLE' parameter. Numeric, tag or text field can have the optional SORTABLE argument that allows the user to later sort the results by the value of this field (this adds memory overhead so do not declare it on large text fields). */ sortable?: boolean, /** * The 'NOINDEX' parameter. Fields can have the NOINDEX option, which means they will not be indexed. This is useful in conjunction with SORTABLE , to create fields whose update using PARTIAL will not cause full reindexing of the document. If a field has NOINDEX and doesn't have SORTABLE, it will just be ignored by the index. */ noindex?: boolean, /** * The 'NOSTEM' parameter. Text fields can have the NOSTEM argument which will disable stemming when indexing its values. This may be ideal for things like proper names. */ nostem?: boolean, /** * The 'PHONETIC' parameter. Declaring a text field as PHONETIC will perform phonetic matching on it in searches by default. The obligatory {matcher} argument specifies the phonetic algorithm and language used. */ phonetic?: string, /** * The 'WEIGHT' parameter. For TEXT fields, declares the importance of this field when calculating result accuracy. This is a multiplication factor, and defaults to 1 if not specified. */ weight?: number, /** * The 'SEPERATOR' parameter. For TAG fields, indicates how the text contained in the field is to be split into individual tags. The default is , . The value must be a single character. */ seperator?: string /** * The 'UNF' parameter. By default, SORTABLE applies a normalization to the indexed value (characters set to lowercase, removal of diacritics). When using UNF (un-normalized form) it is possible to disable the normalization and keep the original form of the value. */ unf?: boolean, /** * For `TAG` attributes, keeps the original letter cases of the tags. If not specified, the characters are converted to lowercase. */ caseSensitive?: boolean, } /** * The parameters of the 'FT.CREATE' command, schema fields (Field comming after the 'SCHEMA' command) */ export interface FTSchemaField extends FTFieldOptions { /** * The name of the field */ name: string, /** * The type of the field */ type: FTFieldType, /** * The 'AS' parameter. Used when creating an index on 'JSON'. */ as?: string } /** * The parameter of the 'FT.SEARCH' command */ export interface FTSearchParameters { /** * The 'NOTCONTENT' parameter. If it appears after the query, we only return the document ids and not the content. */ noContent?: boolean, /** * The 'VERBATIM' parameter. if set, we do not try to use stemming for query expansion but search the query terms verbatim. */ verbatim?: boolean, /** * The 'noStopWords' parameter. If set, we do not filter stopwords from the query. */ noStopWords?: boolean, /** * The 'WITHSCORES' parameter. If set, we also return the relative internal score of each document. */ withScores?: boolean, /** * The 'WITHPAYLOADS' parameter. If set, we retrieve optional document payloads (see FT.ADD). */ withPayloads?: boolean, /** * The 'WITHSORTKEYS' parameter. Only relevant in conjunction with SORTBY . Returns the value of the sorting key, right after the id and score and /or payload if requested. */ withSortKeys?: boolean, /** * The 'FILTER' parameter. If set, and numeric_field is defined as a numeric field in FT.CREATE, we will limit results to those having numeric values ranging between min and max. min and max follow ZRANGE syntax, and can be -inf , +inf and use ( for exclusive ranges. */ filter?: { /** * The numeric_field argument of the 'FILTER' parameter */ field: string, /** * The min argument of the 'FILTER' parameter */ min: number, /** * The max argument of the 'FILTER' parameter */ max: number }[], /** * The 'GEOFILTER' parameter. If set, we filter the results to a given radius from lon and lat. Radius is given as a number and units. */ geoFilter?: { /** * The field of the 'GEOFILTER' parameter */ field: string, /** * The lon argument of the 'GEOFILTER' parameter */ lon: number, /** * The lat argument of the 'GEOFILTER' parameter */ lat: number, /** * The radius argument of the 'GEOFILTER' parameter */ radius: number, /** * The measurement argument of the 'GEOFILTER' parameter */ measurement: 'm' | 'km' | 'mi' | 'ft' }, /** * The 'INKEYS' parameter. If set, we limit the result to a given set of keys specified in the list. the first argument must be the length of the list, and greater than zero. */ inKeys?: { num?: number, keys?: string | string[], }, /** * The 'INFIELDS' parameter. If set, filter the results to ones appearing only in specific fields of the document, like title or URL. */ inFields?: { num?: number, fields?: string | string[], }, /** * The 'RETURN' parameter. Use this keyword to limit which fields from the document are returned. */ return?: { num?: number, fields: { /** * The name of the field. */ field: string, /** * The 'AS' parameter following a "field" name, used by index type "JSON". */ as?: string, }[], }, /** * The 'SUMMARIZE' parameter. Use this option to return only the sections of the field which contain the matched text. */ summarize?: { /** * The fields argument of the 'SUMMARIZE' parameter */ fields?: { num?: number, fields: string | string[], }, /** * The fargs argument of the 'SUMMARIZE' parameter */ frags?: number, /** * The len argument of the 'SUMMARIZE' parameter */ len?: number, /** * The seperator argument of the 'SUMMARIZE' parameter */ seperator?: string }, /** * The 'HIGHLIGHT' parameter. Use this option to format occurrences of matched text. */ highlight?: { /** * The fields argument of the 'HIGHLIGHT' parameter */ fields?: { num?: number, fields: string | string[], }, /** * The tags argument of the 'HIGHLIGHT' parameter */ tags?: { /** * The open argument of the tags argument */ open: string, /** * The close argument of the tags argument */ close: string }, }, /** * The 'SLOP' parameter. If set, we allow a maximum of N intervening number of unmatched offsets between phrase terms. */ slop?: number, /** * The 'INORDER' parameter. If set, and usually used in conjunction with SLOP, we make sure the query terms appear in the same order in the document as in the query, regardless of the offsets between them. */ inOrder?: boolean, /** * The 'LANGUAGE' parameter. If set, we use a stemmer for the supplied language during search for query expansion. */ language?: string, /** * The 'EXPANDER' parameter. If set, we will use a custom query expander instead of the stemmer. */ expander?: string, /** * The 'SCORER' parameter. If set, we will use a custom scoring function defined by the user. */ scorer?: string, /** * The 'EXPLAINSCORE' parameter. If set, will return a textual description of how the scores were calculated. */ explainScore?: boolean, /** * The 'PAYLOAD' parameter. Add an arbitrary, binary safe payload that will be exposed to custom scoring functions. */ payload?: string, /** * The 'SORTBY' parameter. If specified, the results are ordered by the value of this field. This applies to both text and numeric fields. */ sortBy?: { /** * The field argument of the 'SORTBY' parameter */ field: string, /** * The sort argument of the 'SORTBY' parameter */ sort: FTSort }, /** * The 'LIMIT' parameter. If the parameters appear after the query, we limit the results to the offset and number of results given. */ limit?: { /** * The first argument of the 'LIMIT' parameter */ first: number, /** * The num argument of the 'LIMIT' parameter */ num: number }, /** * If to parse search results to objects or leave them in their array form * @default true */ parseSearchQueries?: boolean } /** * The additional parameter of 'FT.AGGREGATE' command */ export interface FTAggregateParameters { /** * The 'LOAD' parameter. */ load?: { /** * The number of arguments */ nargs: string, /** * The property name */ properties: string[] }, /** * Create new fields using 'APPLY' keyword for aggregations */ apply?: FTExpression[], /** * The 'GROUPBY' parameter. */ groupby?: { /** * The number of arguments of the 'GROUPBY' parameter */ nargs: string, /** * The property name of the 'GROUPBY' parameter */ properties: string[] }, /** * The 'REDUCE' parameter. */ reduce?: FTReduce[], /** * The 'SORTBY' parameter. */ sortby?: { /** * The number of arguments of the 'SORTBY' parameter */ nargs: string, /** * A list of property names of the 'SORTBY' parameter */ properties: FTSortByProperty[], /** * The sort type of the 'SORTBY' parameter */ max: number }, /** * Given expressions starting by the 'APPLY' keyword */ expressions?: FTExpression[], /** * The 'LIMIT' parameter. */ limit?: { /** * The offset of the 'LIMIT' parameter */ offset: string, /** * The number of results of the 'LIMIT' parameter */ numberOfResults: number }, /** * The expression of the 'FILTER' parameter. */ filter?: string } /** * The expressions given to the 'APPLY' key word */ export interface FTExpression { /** * The expression given */ expression: string, /** * The value of 'AS' of the expression determining the name of it. */ as: string } /** * The 'REDUCE' parameter. */ export interface FTReduce { /** * A function of the 'REDUCE' parameter */ function: string, /** * The number of arguments of the 'REDUCE' parameter */ nargs: string, /** * The argument of the 'REDUCE' parameter */ args: string[], /** * The name of the function of the 'REDUCE' parameter */ as?: string } /** * The 'SORT BY' property object */ export interface FTSortByProperty { /** * The value of the property */ property: string, /** * The 'SORT' value of the property */ sort: FTSort } /** * The available 'SORT' methods * @param ASC The ascending sort * @param DESC The descending sort */ export type FTSort = 'ASC' | 'DESC'; /** * The additional parameters of 'FT.SUGADD' command */ export interface FTSugAddParameters { /** * The 'INCR' parameter. if set, we increment the existing entry of the suggestion by the given score, instead of replacing the score. This is useful for updating the dictionary based on user queries in real time */ incr: boolean, /** * The 'PAYLOAD' parameter. If set, we save an extra payload with the suggestion, that can be fetched by adding the WITHPAYLOADS argument to FT.SUGGET */ payload: string } /** * The additional parameters of 'FT.SUGGET' command */ export interface FTSugGetParameters { /** * The 'FUZZY' parameter. if set, we do a fuzzy prefix search, including prefixes at Levenshtein distance of 1 from the prefix sent */ fuzzy: boolean, /** * The 'MAX' parameter. If set, we limit the results to a maximum of num (default: 5). */ max: number, /** * The 'WITHSCORES' parameter. If set, we also return the score of each suggestion. this can be used to merge results from multiple instances */ withScores: boolean, /** * The 'WITHPAYLOADS' parameter. If set, we return optional payloads saved along with the suggestions. If no payload is present for an entry, we return a Null Reply. */ withPayloads: boolean } /** * The additional parameters of 'FT.SPELLCHECK' command */ export interface FTSpellCheck { /** * A list of terms */ terms?: { /** * The type of the term */ type: 'INCLUDE' | 'EXCLUDE', /** * The dict of the term */ dict?: string }[], /** * The maximal Levenshtein distance for spelling suggestions (default: 1, max: 4) */ distance?: number | string, } /** * The available field types * @param TEXT The text type * @param NUMERIC The number type * @param TAG The tag type */ export type FTFieldType = 'TEXT' | 'NUMERIC' | 'TAG' | string; /** * The available index types * @param HASH The hash index type * @param JSON The JSON index type */ export type FTIndexType = 'HASH' | 'JSON'; /** * The config response */ export interface FTConfig { EXTLOAD?: string | null, SAFEMODE?: string, CONCURRENT_WRITE_MODE?: string, NOGC?: string, MINPREFIX?: string, FORKGC_SLEEP_BEFORE_EXIT?: string, MAXDOCTABLESIZE?: string, MAXSEARCHRESULTS?: string, MAXAGGREGATERESULTS?: string, MAXEXPANSIONS?: string, MAXPREFIXEXPANSIONS?: string, TIMEOUT?: string, INDEX_THREADS?: string, SEARCH_THREADS?: string, FRISOINI?: string | null, ON_TIMEOUT?: string, GCSCANSIZE?: string, MIN_PHONETIC_TERM_LEN?: string, GC_POLICY?: string, FORK_GC_RUN_INTERVAL?: string, FORK_GC_CLEAN_THRESHOLD?: string, FORK_GC_RETRY_INTERVAL?: string, _MAX_RESULTS_TO_UNSORTED_MODE?: string, CURSOR_MAX_IDLE?: string, NO_MEM_POOLS?: string, PARTIAL_INDEXED_DOCS?: string, UPGRADE_INDEX?: string } /** * The info response */ export interface FTInfo { index_name?: string, index_options?: string[], index_definition?: { key_type?: string, prefixes?: string, language_field?: string, default_score?: string, score_field?: string, payload_field?: string }, fields?: string[], num_docs?: string, max_doc_id?: string, num_terms?: string, num_records?: string, inverted_sz_mb?: string, total_inverted_index_blocks?: string, offset_vectors_sz_mb?: string, doc_table_size_mb?: string, sortable_values_size_mb?: string, key_table_size_mb?: string, records_per_doc_avg?: string, bytes_per_record_avg?: string, offsets_per_term_avg?: string, offset_bits_per_record_avg?: string, hash_indexing_failures?: string, indexing?: string, percent_indexed?: string, gc_stats?: { bytes_collected?: string, total_ms_run?: string, total_cycles?: string, average_cycle_time_ms?: string, last_run_time_ms?: string, gc_numeric_trees_missed?: string, gc_blocks_denied?: string }, cursor_stats?: { global_idle?: number, global_total?: number, index_capacity?: number, index_total?: number } } /** * The FT.SPELLCHECK response object */ export interface FTSpellCheckResponse { /** * The term that was spellchecked */ term: string, /** * Suggested corrections */ suggestions: { /** * Score of the suggestion */ score: string, /** * Score of the suggestion */ suggestion: string }[], }
the_stack
declare module android { export module support { export module v4 { export module animation { export type ValueAnimatorCompat = any; } } } } import javalangClass = java.lang.Class; import javalangObject = java.lang.Object; /// <reference path="./java.lang.Class.d.ts" /> /// <reference path="./java.lang.Object.d.ts" /> declare module android { export module support { export module v7 { export module util { export class AsyncListUtil<T> { public refresh(): void; public constructor(param0: javalangClass<T>, param1: number, param2: android.support.v7.util.AsyncListUtil.DataCallback, param3: android.support.v7.util.AsyncListUtil.ViewCallback); public getItemCount(): number; public onRangeChanged(): void; public getItem(param0: number): javalangObject; } export module AsyncListUtil { export abstract class DataCallback { public recycleData(param0: native.Array<javalangObject>, param1: number): void; public constructor(); public fillData(param0: native.Array<javalangObject>, param1: number, param2: number): void; public refreshData(): number; public getMaxCachedTiles(): number; } export abstract class ViewCallback { public static HINT_SCROLL_NONE: number; public static HINT_SCROLL_DESC: number; public static HINT_SCROLL_ASC: number; public constructor(); public onDataRefresh(): void; public extendRangeInto(param0: native.Array<number>, param1: native.Array<number>, param2: number): void; public getItemRangeInto(param0: native.Array<number>): void; public onItemLoaded(param0: number): void; } } } } } } /// <reference path="./java.lang.Object.d.ts" /> declare module android { export module support { export module v7 { export module util { export class MessageThreadUtil { public getMainThreadProxy(param0: android.support.v7.util.ThreadUtil.MainThreadCallback): android.support.v7.util.ThreadUtil.MainThreadCallback; public getBackgroundProxy(param0: android.support.v7.util.ThreadUtil.BackgroundCallback): android.support.v7.util.ThreadUtil.BackgroundCallback; } export module MessageThreadUtil { export class MessageQueue { } export class SyncQueueItem { public what: number; public arg1: number; public arg2: number; public arg3: number; public arg4: number; public arg5: number; public data: javalangObject; } } } } } } import javautilCollection = java.util.Collection; /// <reference path="./java.lang.Class.d.ts" /> /// <reference path="./java.lang.Object.d.ts" /> /// <reference path="./java.util.Collection.d.ts" /> declare module android { export module support { export module v7 { export module util { export class SortedList<T> { public static INVALID_POSITION: number; public beginBatchedUpdates(): void; public constructor(param0: javalangClass<T>, param1: android.support.v7.util.SortedList.Callback, param2: number); public updateItemAt(param0: number, param1: javalangObject): void; public size(): number; public add(param0: javalangObject): number; public remove(param0: javalangObject): boolean; public addAll(param0: javautilCollection<T>): void; public addAll(param0: native.Array<javalangObject>): void; public recalculatePositionOfItemAt(param0: number): void; public indexOf(param0: javalangObject): number; public clear(): void; public constructor(param0: javalangClass<T>, param1: android.support.v7.util.SortedList.Callback); public addAll(param0: native.Array<javalangObject>, param1: boolean): void; public get(param0: number): javalangObject; public endBatchedUpdates(): void; public removeItemAt(param0: number): javalangObject; } export module SortedList { export class BatchedCallback extends android.support.v7.util.SortedList.Callback { public constructor(); public areContentsTheSame(param0: javalangObject, param1: javalangObject): boolean; public areItemsTheSame(param0: javalangObject, param1: javalangObject): boolean; public onChanged(param0: number, param1: number): void; public compare(param0: javalangObject, param1: javalangObject): number; public onRemoved(param0: number, param1: number): void; public constructor(param0: android.support.v7.util.SortedList.Callback); public onInserted(param0: number, param1: number): void; public dispatchLastEvent(): void; public onMoved(param0: number, param1: number): void; } export abstract class Callback { public constructor(); public areContentsTheSame(param0: javalangObject, param1: javalangObject): boolean; public areItemsTheSame(param0: javalangObject, param1: javalangObject): boolean; public onChanged(param0: number, param1: number): void; public compare(param0: javalangObject, param1: javalangObject): number; public onRemoved(param0: number, param1: number): void; public onInserted(param0: number, param1: number): void; public onMoved(param0: number, param1: number): void; } } } } } } declare module android { export module support { export module v7 { export module util { export class ThreadUtil { /** * Constructs a new instance of the android.support.v7.util.ThreadUtil interface with the provided implementation. */ public constructor(implementation: { getMainThreadProxy(param0: android.support.v7.util.ThreadUtil.MainThreadCallback): android.support.v7.util.ThreadUtil.MainThreadCallback; getBackgroundProxy(param0: android.support.v7.util.ThreadUtil.BackgroundCallback): android.support.v7.util.ThreadUtil.BackgroundCallback; }); public getMainThreadProxy(param0: android.support.v7.util.ThreadUtil.MainThreadCallback): android.support.v7.util.ThreadUtil.MainThreadCallback; public getBackgroundProxy(param0: android.support.v7.util.ThreadUtil.BackgroundCallback): android.support.v7.util.ThreadUtil.BackgroundCallback; } export module ThreadUtil { export class BackgroundCallback { /** * Constructs a new instance of the android.support.v7.util.ThreadUtil$BackgroundCallback interface with the provided implementation. */ public constructor(implementation: { refresh(param0: number): void; updateRange(param0: number, param1: number, param2: number, param3: number, param4: number): void; loadTile(param0: number, param1: number): void; recycleTile(param0: android.support.v7.util.TileList.Tile): void; }); public recycleTile(param0: android.support.v7.util.TileList.Tile): void; public refresh(param0: number): void; public updateRange(param0: number, param1: number, param2: number, param3: number, param4: number): void; public loadTile(param0: number, param1: number): void; } export class MainThreadCallback { /** * Constructs a new instance of the android.support.v7.util.ThreadUtil$MainThreadCallback interface with the provided implementation. */ public constructor(implementation: { updateItemCount(param0: number, param1: number): void; addTile(param0: number, param1: android.support.v7.util.TileList.Tile): void; removeTile(param0: number, param1: number): void; }); public updateItemCount(param0: number, param1: number): void; public addTile(param0: number, param1: android.support.v7.util.TileList.Tile): void; public removeTile(param0: number, param1: number): void; } } } } } } /// <reference path="./java.lang.Class.d.ts" /> /// <reference path="./java.lang.Object.d.ts" /> declare module android { export module support { export module v7 { export module util { export class TileList { public getItemAt(param0: number): javalangObject; public addOrReplace(param0: android.support.v7.util.TileList.Tile): android.support.v7.util.TileList.Tile; public getAtIndex(param0: number): android.support.v7.util.TileList.Tile; public clear(): void; public removeAtPos(param0: number): android.support.v7.util.TileList.Tile; public size(): number; public constructor(param0: number); } export module TileList { export class Tile { public mItems: native.Array<javalangObject>; public mStartPosition: number; public mItemCount: number; public constructor(param0: javalangClass<java.lang.Object>, param1: number); } } } } } } /// <reference path="./java.lang.Object.d.ts" /> declare module android { export module support { export module v7 { export module widget { export class AdapterHelper { public obtainUpdateOp(param0: number, param1: number, param2: number, param3: javalangObject): android.support.v7.widget.AdapterHelper.UpdateOp; public recycleUpdateOp(param0: android.support.v7.widget.AdapterHelper.UpdateOp): void; public applyPendingUpdatesToPosition(param0: number): number; } export module AdapterHelper { export class Callback { /** * Constructs a new instance of the android.support.v7.widget.AdapterHelper$Callback interface with the provided implementation. */ public constructor(implementation: { findViewHolder(param0: number): android.support.v7.widget.RecyclerView.ViewHolder; offsetPositionsForRemovingInvisible(param0: number, param1: number): void; offsetPositionsForRemovingLaidOutOrNewView(param0: number, param1: number): void; markViewHoldersUpdated(param0: number, param1: number, param2: javalangObject): void; onDispatchFirstPass(param0: android.support.v7.widget.AdapterHelper.UpdateOp): void; onDispatchSecondPass(param0: android.support.v7.widget.AdapterHelper.UpdateOp): void; offsetPositionsForAdd(param0: number, param1: number): void; offsetPositionsForMove(param0: number, param1: number): void; }); public offsetPositionsForRemovingLaidOutOrNewView(param0: number, param1: number): void; public onDispatchFirstPass(param0: android.support.v7.widget.AdapterHelper.UpdateOp): void; public offsetPositionsForAdd(param0: number, param1: number): void; public findViewHolder(param0: number): android.support.v7.widget.RecyclerView.ViewHolder; public markViewHoldersUpdated(param0: number, param1: number, param2: javalangObject): void; public offsetPositionsForMove(param0: number, param1: number): void; public onDispatchSecondPass(param0: android.support.v7.widget.AdapterHelper.UpdateOp): void; public offsetPositionsForRemovingInvisible(param0: number, param1: number): void; } export class UpdateOp { public equals(param0: javalangObject): boolean; public hashCode(): number; public toString(): string; } } } } } } /// <reference path="./android.view.View.d.ts" /> declare module android { export module support { export module v7 { export module widget { export class ChildHelper { public toString(): string; } export module ChildHelper { export class Bucket { public toString(): string; } export class Callback { /** * Constructs a new instance of the android.support.v7.widget.ChildHelper$Callback interface with the provided implementation. */ public constructor(implementation: { getChildCount(): number; addView(param0: android.view.View, param1: number): void; indexOfChild(param0: android.view.View): number; removeViewAt(param0: number): void; getChildAt(param0: number): android.view.View; removeAllViews(): void; getChildViewHolder(param0: android.view.View): android.support.v7.widget.RecyclerView.ViewHolder; attachViewToParent(param0: android.view.View, param1: number, param2: android.view.ViewGroup.LayoutParams): void; detachViewFromParent(param0: number): void; onEnteredHiddenState(param0: android.view.View): void; onLeftHiddenState(param0: android.view.View): void; }); public onLeftHiddenState(param0: android.view.View): void; public removeAllViews(): void; public onEnteredHiddenState(param0: android.view.View): void; public getChildCount(): number; public addView(param0: android.view.View, param1: number): void; public attachViewToParent(param0: android.view.View, param1: number, param2: android.view.ViewGroup.LayoutParams): void; public removeViewAt(param0: number): void; public getChildAt(param0: number): android.view.View; public indexOfChild(param0: android.view.View): number; public getChildViewHolder(param0: android.view.View): android.support.v7.widget.RecyclerView.ViewHolder; public detachViewFromParent(param0: number): void; } } } } } } import javautilList = java.util.List; /// <reference path="./android.view.View.d.ts" /> /// <reference path="./java.util.List.d.ts" /> declare module android { export module support { export module v7 { export module widget { export class DefaultItemAnimator extends android.support.v7.widget.SimpleItemAnimator { public isRunning(param0: android.support.v7.widget.RecyclerView.ItemAnimator.ItemAnimatorFinishedListener): boolean; public animateChange(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ViewHolder, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param3: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): boolean; public endAnimations(): void; public runPendingAnimations(): void; public endAnimation(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public canReuseUpdatedViewHolder(param0: android.support.v7.widget.RecyclerView.ViewHolder): boolean; public animateRemove(param0: android.support.v7.widget.RecyclerView.ViewHolder): boolean; public animateChange(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ViewHolder, param2: number, param3: number, param4: number, param5: number): boolean; public constructor(); public animateAdd(param0: android.support.v7.widget.RecyclerView.ViewHolder): boolean; public isRunning(): boolean; public canReuseUpdatedViewHolder(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: javautilList<java.lang.Object>): boolean; public animateMove(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: number, param2: number, param3: number, param4: number): boolean; } export module DefaultItemAnimator { export class ChangeInfo { public oldHolder: android.support.v7.widget.RecyclerView.ViewHolder; public newHolder: android.support.v7.widget.RecyclerView.ViewHolder; public fromX: number; public fromY: number; public toX: number; public toY: number; public toString(): string; } export class MoveInfo { public holder: android.support.v7.widget.RecyclerView.ViewHolder; public fromX: number; public fromY: number; public toX: number; public toY: number; } export class VpaListenerAdapter { public onAnimationCancel(param0: android.view.View): void; public onAnimationEnd(param0: android.view.View): void; public onAnimationStart(param0: android.view.View): void; } } } } } } /// <reference path="./android.content.Context.d.ts" /> /// <reference path="./android.graphics.Rect.d.ts" /> /// <reference path="./android.support.v4.view.accessibility.AccessibilityNodeInfoCompat.d.ts" /> /// <reference path="./android.support.v7.widget.RecyclerView.d.ts" /> /// <reference path="./android.util.AttributeSet.d.ts" /> /// <reference path="./android.view.View.d.ts" /> /// <reference path="./java.lang.Object.d.ts" /> declare module android { export module support { export module v7 { export module widget { export class GridLayoutManager extends android.support.v7.widget.LinearLayoutManager { public static DEFAULT_SPAN_COUNT: number; public setMeasuredDimension(param0: number, param1: number): void; public constructor(context: android.content.Context, spanCount: number); public onItemsChanged(param0: android.support.v7.widget.RecyclerView): void; public onItemsUpdated(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number): void; public onItemsMoved(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number, param3: number): void; public setSpanSizeLookup(param0: android.support.v7.widget.GridLayoutManager.SpanSizeLookup): void; public supportsPredictiveItemAnimations(): boolean; public checkLayoutParams(param0: android.support.v7.widget.RecyclerView.LayoutParams): boolean; public constructor(context: android.content.Context, spanCount: number, orientation: number, reverseLayout: boolean); public onItemsAdded(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number): void; public scrollVerticallyBy(param0: number, param1: android.support.v7.widget.RecyclerView.Recycler, param2: android.support.v7.widget.RecyclerView.State): number; public onInitializeAccessibilityNodeInfoForItem(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State, param2: android.view.View, param3: android.support.v4.view.accessibility.AccessibilityNodeInfoCompat): void; public scrollHorizontallyBy(param0: number, param1: android.support.v7.widget.RecyclerView.Recycler, param2: android.support.v7.widget.RecyclerView.State): number; public constructor(); public getRowCountForAccessibility(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): number; public prepareForDrop(param0: android.view.View, param1: android.view.View, param2: number, param3: number): void; public setSpanCount(param0: number): void; public setStackFromEnd(param0: boolean): void; public getSpanCount(): number; public onFocusSearchFailed(param0: android.view.View, param1: number, param2: android.support.v7.widget.RecyclerView.Recycler, param3: android.support.v7.widget.RecyclerView.State): android.view.View; public constructor(param0: android.content.Context, param1: number, param2: boolean); public generateDefaultLayoutParams(): android.support.v7.widget.RecyclerView.LayoutParams; public constructor(param0: android.content.Context, param1: android.util.AttributeSet, param2: number, param3: number); public generateLayoutParams(param0: android.content.Context, param1: android.util.AttributeSet): android.support.v7.widget.RecyclerView.LayoutParams; public generateLayoutParams(param0: android.view.ViewGroup.LayoutParams): android.support.v7.widget.RecyclerView.LayoutParams; public setMeasuredDimension(param0: android.graphics.Rect, param1: number, param2: number): void; public onItemsRemoved(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number): void; public constructor(param0: android.content.Context); public getSpanSizeLookup(): android.support.v7.widget.GridLayoutManager.SpanSizeLookup; public getColumnCountForAccessibility(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): number; public onItemsUpdated(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number, param3: javalangObject): void; public onLayoutChildren(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): void; } export module GridLayoutManager { export class DefaultSpanSizeLookup extends android.support.v7.widget.GridLayoutManager.SpanSizeLookup { public getSpanIndex(param0: number, param1: number): number; public getSpanSize(param0: number): number; public constructor(); } export class LayoutParams extends android.support.v7.widget.RecyclerView.LayoutParams { public static INVALID_SPAN_ID: number; public constructor(param0: android.view.ViewGroup.MarginLayoutParams); public constructor(param0: android.view.ViewGroup.LayoutParams); public constructor(param0: android.content.Context, param1: android.util.AttributeSet); public getSpanIndex(): number; public getSpanSize(): number; public constructor(param0: android.support.v7.widget.RecyclerView.LayoutParams); public constructor(param0: number, param1: number); } export abstract class SpanSizeLookup { public getSpanIndex(param0: number, param1: number): number; public getSpanSize(param0: number): number; public constructor(); public invalidateSpanIndexCache(): void; public setSpanIndexCacheEnabled(param0: boolean): void; public getSpanGroupIndex(param0: number, param1: number): number; public isSpanIndexCacheEnabled(): boolean; } } } } } } declare module android { export module support { export module v7 { export module widget { export class LayoutState { public toString(): string; } } } } } /// <reference path="./android.content.Context.d.ts" /> /// <reference path="./android.graphics.PointF.d.ts" /> /// <reference path="./android.os.Parcel.d.ts" /> /// <reference path="./android.os.Parcelable.d.ts" /> /// <reference path="./android.support.v7.widget.RecyclerView.d.ts" /> /// <reference path="./android.util.AttributeSet.d.ts" /> /// <reference path="./android.view.View.d.ts" /> /// <reference path="./android.view.accessibility.AccessibilityEvent.d.ts" /> /// <reference path="./java.lang.String.d.ts" /> declare module android { export module support { export module v7 { export module widget { export class LinearLayoutManager extends android.support.v7.widget.RecyclerView.LayoutManager implements android.support.v7.widget.helper.ItemTouchHelper.ViewDropHandler { public static HORIZONTAL: number; public static VERTICAL: number; public static INVALID_OFFSET: number; public onInitializeAccessibilityEvent(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State, param2: android.view.accessibility.AccessibilityEvent): void; public computeVerticalScrollOffset(param0: android.support.v7.widget.RecyclerView.State): number; public computeHorizontalScrollExtent(param0: android.support.v7.widget.RecyclerView.State): number; public computeScrollVectorForPosition(param0: number): android.graphics.PointF; public findLastCompletelyVisibleItemPosition(): number; public onInitializeAccessibilityEvent(param0: android.view.accessibility.AccessibilityEvent): void; public computeHorizontalScrollRange(param0: android.support.v7.widget.RecyclerView.State): number; public assertNotInLayoutOrScroll(param0: string): void; public getReverseLayout(): boolean; public supportsPredictiveItemAnimations(): boolean; public setReverseLayout(param0: boolean): void; public scrollToPositionWithOffset(param0: number, param1: number): void; public setRecycleChildrenOnDetach(param0: boolean): void; public scrollToPosition(param0: number): void; public setSmoothScrollbarEnabled(param0: boolean): void; public scrollVerticallyBy(param0: number, param1: android.support.v7.widget.RecyclerView.Recycler, param2: android.support.v7.widget.RecyclerView.State): number; public scrollHorizontallyBy(param0: number, param1: android.support.v7.widget.RecyclerView.Recycler, param2: android.support.v7.widget.RecyclerView.State): number; public findLastVisibleItemPosition(): number; public getStackFromEnd(): boolean; public computeVerticalScrollRange(param0: android.support.v7.widget.RecyclerView.State): number; public constructor(); public findFirstCompletelyVisibleItemPosition(): number; public prepareForDrop(param0: android.view.View, param1: android.view.View, param2: number, param3: number): void; public getRecycleChildrenOnDetach(): boolean; public findViewByPosition(param0: number): android.view.View; public computeHorizontalScrollOffset(param0: android.support.v7.widget.RecyclerView.State): number; public setStackFromEnd(param0: boolean): void; public smoothScrollToPosition(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.State, param2: number): void; public getOrientation(): number; public onDetachedFromWindow(param0: android.support.v7.widget.RecyclerView): void; public canScrollVertically(): boolean; public isLayoutRTL(): boolean; public getExtraLayoutSpace(param0: android.support.v7.widget.RecyclerView.State): number; public onFocusSearchFailed(param0: android.view.View, param1: number, param2: android.support.v7.widget.RecyclerView.Recycler, param3: android.support.v7.widget.RecyclerView.State): android.view.View; public constructor(param0: android.content.Context, param1: number, param2: boolean); public generateDefaultLayoutParams(): android.support.v7.widget.RecyclerView.LayoutParams; public onRestoreInstanceState(param0: android.os.Parcelable): void; public setOrientation(param0: number): void; public constructor(param0: android.content.Context, param1: android.util.AttributeSet, param2: number, param3: number); public isSmoothScrollbarEnabled(): boolean; public findFirstVisibleItemPosition(): number; public canScrollHorizontally(): boolean; public constructor(param0: android.content.Context); public computeVerticalScrollExtent(param0: android.support.v7.widget.RecyclerView.State): number; public onDetachedFromWindow(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.Recycler): void; public onLayoutChildren(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): void; public onSaveInstanceState(): android.os.Parcelable; } export module LinearLayoutManager { export class AnchorInfo { public assignFromViewAndKeepVisibleRect(param0: android.view.View): void; public assignFromView(param0: android.view.View): void; public toString(): string; } export class LayoutChunkResult { public mConsumed: number; public mFinished: boolean; public mIgnoreConsumed: boolean; public mFocusable: boolean; public constructor(); } export class LayoutState { public nextViewInLimitedList(param0: android.view.View): android.view.View; public assignPositionFromScrapList(): void; public assignPositionFromScrapList(param0: android.view.View): void; } export class SavedState { public static CREATOR: android.os.Parcelable.Creator<SavedState>; public constructor(); public describeContents(): number; public writeToParcel(param0: android.os.Parcel, param1: number): void; public constructor(param0: android.support.v7.widget.LinearLayoutManager.SavedState); } } } } } } /// <reference path="./android.content.Context.d.ts" /> /// <reference path="./android.graphics.PointF.d.ts" /> /// <reference path="./android.util.DisplayMetrics.d.ts" /> /// <reference path="./android.view.View.d.ts" /> /// <reference path="./android.view.animation.DecelerateInterpolator.d.ts" /> /// <reference path="./android.view.animation.LinearInterpolator.d.ts" /> declare module android { export module support { export module v7 { export module widget { export abstract class LinearSmoothScroller extends android.support.v7.widget.RecyclerView.SmoothScroller { public static SNAP_TO_START: number; public static SNAP_TO_END: number; public static SNAP_TO_ANY: number; public mLinearInterpolator: android.view.animation.LinearInterpolator; public mDecelerateInterpolator: android.view.animation.DecelerateInterpolator; public mTargetVector: android.graphics.PointF; public mInterimTargetDx: number; public mInterimTargetDy: number; public getHorizontalSnapPreference(): number; public calculateSpeedPerPixel(param0: android.util.DisplayMetrics): number; public calculateTimeForScrolling(param0: number): number; public computeScrollVectorForPosition(param0: number): android.graphics.PointF; public calculateDxToMakeVisible(param0: android.view.View, param1: number): number; public onTargetFound(param0: android.view.View, param1: android.support.v7.widget.RecyclerView.State, param2: android.support.v7.widget.RecyclerView.SmoothScroller.Action): void; public onStart(): void; public onSeekTargetStep(param0: number, param1: number, param2: android.support.v7.widget.RecyclerView.State, param3: android.support.v7.widget.RecyclerView.SmoothScroller.Action): void; public updateActionForInterimTarget(param0: android.support.v7.widget.RecyclerView.SmoothScroller.Action): void; public constructor(param0: android.content.Context); public constructor(); public onStop(): void; public calculateDyToMakeVisible(param0: android.view.View, param1: number): number; public calculateDtToFit(param0: number, param1: number, param2: number, param3: number, param4: number): number; public calculateTimeForDeceleration(param0: number): number; public getVerticalSnapPreference(): number; } } } } } /// <reference path="./java.lang.Object.d.ts" /> declare module android { export module support { export module v7 { export module widget { export class OpReorderer { public constructor(param0: android.support.v7.widget.OpReorderer.Callback); } export module OpReorderer { export class Callback { /** * Constructs a new instance of the android.support.v7.widget.OpReorderer$Callback interface with the provided implementation. */ public constructor(implementation: { obtainUpdateOp(param0: number, param1: number, param2: number, param3: javalangObject): android.support.v7.widget.AdapterHelper.UpdateOp; recycleUpdateOp(param0: android.support.v7.widget.AdapterHelper.UpdateOp): void; }); public obtainUpdateOp(param0: number, param1: number, param2: number, param3: javalangObject): android.support.v7.widget.AdapterHelper.UpdateOp; public recycleUpdateOp(param0: android.support.v7.widget.AdapterHelper.UpdateOp): void; } } } } } } /// <reference path="./android.view.View.d.ts" /> declare module android { export module support { export module v7 { export module widget { export abstract class OrientationHelper { public mLayoutManager: android.support.v7.widget.RecyclerView.LayoutManager; public static HORIZONTAL: number; public static VERTICAL: number; public getTotalSpace(): number; public getEnd(): number; public getTotalSpaceChange(): number; public getEndAfterPadding(): number; public getDecoratedEnd(param0: android.view.View): number; public getDecoratedMeasurementInOther(param0: android.view.View): number; public getEndPadding(): number; public getDecoratedMeasurement(param0: android.view.View): number; public getStartAfterPadding(): number; public offsetChildren(param0: number): void; public static createVerticalHelper(param0: android.support.v7.widget.RecyclerView.LayoutManager): android.support.v7.widget.OrientationHelper; public static createHorizontalHelper(param0: android.support.v7.widget.RecyclerView.LayoutManager): android.support.v7.widget.OrientationHelper; public offsetChild(param0: android.view.View, param1: number): void; public getDecoratedStart(param0: android.view.View): number; public onLayoutComplete(): void; public getMode(): number; public static createOrientationHelper(param0: android.support.v7.widget.RecyclerView.LayoutManager, param1: number): android.support.v7.widget.OrientationHelper; public getModeInOther(): number; } } } } } import javautilArrayList = java.util.ArrayList; /// <reference path="./java.lang.Object.d.ts" /> /// <reference path="./java.util.ArrayList.d.ts" /> declare module android { export module support { export module v7 { export module widget { export class PositionMap { public valueAt(param0: number): javalangObject; public keyAt(param0: number): number; public insertKeyRange(param0: number, param1: number): void; public indexOfValue(param0: javalangObject): number; public append(param0: number, param1: javalangObject): void; public size(): number; public removeAtRange(param0: number, param1: number): void; public clone(): android.support.v7.widget.PositionMap; public removeKeyRange(param0: javautilArrayList<java.lang.Object>, param1: number, param2: number): void; public setValueAt(param0: number, param1: javalangObject): void; public toString(): string; public put(param0: number, param1: javalangObject): void; public get(param0: number, param1: javalangObject): javalangObject; public constructor(); public delete(param0: number): void; public clear(): void; public get(param0: number): javalangObject; public remove(param0: number): void; public removeAt(param0: number): void; public constructor(param0: number); public indexOfKey(param0: number): number; } export module PositionMap { export class ContainerHelpers { } } } } } } import javalangRunnable = java.lang.Runnable; /// <reference path="./android.content.Context.d.ts" /> /// <reference path="./android.graphics.Canvas.d.ts" /> /// <reference path="./android.graphics.PointF.d.ts" /> /// <reference path="./android.graphics.Rect.d.ts" /> /// <reference path="./android.os.Bundle.d.ts" /> /// <reference path="./android.os.Parcel.d.ts" /> /// <reference path="./android.os.Parcelable.d.ts" /> /// <reference path="./android.support.v4.view.accessibility.AccessibilityNodeInfoCompat.d.ts" /> /// <reference path="./android.support.v7.widget.RecyclerView.d.ts" /> /// <reference path="./android.support.v7.widget.RecyclerViewAccessibilityDelegate.d.ts" /> /// <reference path="./android.util.AttributeSet.d.ts" /> /// <reference path="./android.util.SparseArray.d.ts" /> /// <reference path="./android.view.MotionEvent.d.ts" /> /// <reference path="./android.view.View.d.ts" /> /// <reference path="./android.view.ViewGroup.d.ts" /> /// <reference path="./android.view.accessibility.AccessibilityEvent.d.ts" /> /// <reference path="./android.view.animation.Interpolator.d.ts" /> /// <reference path="./java.lang.Object.d.ts" /> /// <reference path="./java.lang.Runnable.d.ts" /> /// <reference path="./java.lang.String.d.ts" /> /// <reference path="./java.util.ArrayList.d.ts" /> /// <reference path="./java.util.List.d.ts" /> declare module android { export module support { export module v7 { export module widget { export class RecyclerView { public static HORIZONTAL: number; public static VERTICAL: number; public static NO_POSITION: number; public static NO_ID: number; public static INVALID_TYPE: number; public static TOUCH_SLOP_DEFAULT: number; public static TOUCH_SLOP_PAGING: number; public static SCROLL_STATE_IDLE: number; public static SCROLL_STATE_DRAGGING: number; public static SCROLL_STATE_SETTLING: number; public onDraw(param0: android.graphics.Canvas): void; public getChildDrawingOrder(param0: number, param1: number): number; public addItemDecoration(param0: android.support.v7.widget.RecyclerView.ItemDecoration): void; public getChildAdapterPosition(param0: android.view.View): number; public scrollToPosition(param0: number): void; public removeItemDecoration(param0: android.support.v7.widget.RecyclerView.ItemDecoration): void; public onChildDetachedFromWindow(param0: android.view.View): void; public stopNestedScroll(): void; public computeHorizontalScrollExtent(): number; public getMinFlingVelocity(): number; public setAdapter(param0: android.support.v7.widget.RecyclerView.Adapter): void; public getAdapter(): android.support.v7.widget.RecyclerView.Adapter; public removeOnChildAttachStateChangeListener(param0: android.support.v7.widget.RecyclerView.OnChildAttachStateChangeListener): void; public onTouchEvent(param0: android.view.MotionEvent): boolean; public setRecycledViewPool(param0: android.support.v7.widget.RecyclerView.RecycledViewPool): void; public offsetChildrenHorizontal(param0: number): void; public isComputingLayout(): boolean; public addItemDecoration(param0: android.support.v7.widget.RecyclerView.ItemDecoration, param1: number): void; public onChildAttachedToWindow(param0: android.view.View): void; public removeDetachedView(param0: android.view.View, param1: boolean): void; public offsetChildrenVertical(param0: number): void; public dispatchNestedFling(param0: number, param1: number, param2: boolean): boolean; public setScrollingTouchSlop(param0: number): void; public scrollTo(param0: number, param1: number): void; public onGenericMotionEvent(param0: android.view.MotionEvent): boolean; public getRecycledViewPool(): android.support.v7.widget.RecyclerView.RecycledViewPool; public onInterceptTouchEvent(param0: android.view.MotionEvent): boolean; public removeOnItemTouchListener(param0: android.support.v7.widget.RecyclerView.OnItemTouchListener): void; public invalidateItemDecorations(): void; public isAnimating(): boolean; public getChildItemId(param0: android.view.View): number; public findViewHolderForLayoutPosition(param0: number): android.support.v7.widget.RecyclerView.ViewHolder; public requestChildRectangleOnScreen(param0: android.view.View, param1: android.graphics.Rect, param2: boolean): boolean; public getChildPosition(param0: android.view.View): number; public getChildViewHolder(param0: android.view.View): android.support.v7.widget.RecyclerView.ViewHolder; public addFocusables(param0: javautilArrayList<android.view.View>, param1: number, param2: number): void; public setNestedScrollingEnabled(param0: boolean): void; public generateDefaultLayoutParams(): android.view.ViewGroup.LayoutParams; public stopScroll(): void; public requestLayout(): void; public hasPendingAdapterUpdates(): boolean; public onDetachedFromWindow(): void; public onMeasure(param0: number, param1: number): void; public constructor(param0: android.content.Context, param1: android.util.AttributeSet, param2: number); public getItemAnimator(): android.support.v7.widget.RecyclerView.ItemAnimator; public computeVerticalScrollRange(): number; public onRestoreInstanceState(param0: android.os.Parcelable): void; public onScrolled(param0: number, param1: number): void; public setItemAnimator(param0: android.support.v7.widget.RecyclerView.ItemAnimator): void; public smoothScrollToPosition(param0: number): void; public constructor(param0: android.content.Context); public dispatchSaveInstanceState(param0: android.util.SparseArray<android.os.Parcelable>): void; public isNestedScrollingEnabled(): boolean; public focusSearch(focused: android.view.View, direction: number): android.view.View; public checkLayoutParams(param0: android.view.ViewGroup.LayoutParams): boolean; public getChildLayoutPosition(param0: android.view.View): number; public dispatchNestedPreFling(param0: number, param1: number): boolean; public computeHorizontalScrollOffset(): number; public setHasFixedSize(param0: boolean): void; public findContainingViewHolder(param0: android.view.View): android.support.v7.widget.RecyclerView.ViewHolder; public sendAccessibilityEventUnchecked(param0: android.view.accessibility.AccessibilityEvent): void; public getScrollState(): number; public dispatchNestedPreScroll(param0: number, param1: number, param2: native.Array<number>, param3: native.Array<number>): boolean; public computeHorizontalScrollRange(): number; public onAttachedToWindow(): void; public setAccessibilityDelegateCompat(param0: android.support.v7.widget.RecyclerViewAccessibilityDelegate): void; public findViewHolderForItemId(param0: number): android.support.v7.widget.RecyclerView.ViewHolder; public swapAdapter(param0: android.support.v7.widget.RecyclerView.Adapter, param1: boolean): void; public generateLayoutParams(param0: android.util.AttributeSet): android.view.ViewGroup.LayoutParams; public setItemViewCacheSize(param0: number): void; public computeVerticalScrollExtent(): number; public setChildDrawingOrderCallback(param0: android.support.v7.widget.RecyclerView.ChildDrawingOrderCallback): void; public addOnChildAttachStateChangeListener(param0: android.support.v7.widget.RecyclerView.OnChildAttachStateChangeListener): void; public requestChildFocus(param0: android.view.View, param1: android.view.View): void; public onScrollStateChanged(param0: number): void; public getCompatAccessibilityDelegate(): android.support.v7.widget.RecyclerViewAccessibilityDelegate; public getLayoutManager(): android.support.v7.widget.RecyclerView.LayoutManager; public computeVerticalScrollOffset(): number; public dispatchNestedScroll(param0: number, param1: number, param2: number, param3: number, param4: native.Array<number>): boolean; public findContainingItemView(param0: android.view.View): android.view.View; public setLayoutFrozen(param0: boolean): void; public dispatchRestoreInstanceState(param0: android.util.SparseArray<android.os.Parcelable>): void; public clearOnChildAttachStateChangeListeners(): void; public setLayoutManager(param0: android.support.v7.widget.RecyclerView.LayoutManager): void; public findViewHolderForAdapterPosition(param0: number): android.support.v7.widget.RecyclerView.ViewHolder; public onSaveInstanceState(): android.os.Parcelable; public isAttachedToWindow(): boolean; public scrollBy(param0: number, param1: number): void; public getBaseline(): number; public findChildViewUnder(param0: number, param1: number): android.view.View; public clearOnScrollListeners(): void; public addOnScrollListener(param0: android.support.v7.widget.RecyclerView.OnScrollListener): void; public requestDisallowInterceptTouchEvent(param0: boolean): void; public setClipToPadding(param0: boolean): void; public fling(param0: number, param1: number): boolean; public draw(param0: android.graphics.Canvas): void; public findViewHolderForPosition(param0: number): android.support.v7.widget.RecyclerView.ViewHolder; public hasFixedSize(): boolean; public isLayoutFrozen(): boolean; public generateLayoutParams(param0: android.view.ViewGroup.LayoutParams): android.view.ViewGroup.LayoutParams; public setOnScrollListener(param0: android.support.v7.widget.RecyclerView.OnScrollListener): void; public onSizeChanged(param0: number, param1: number, param2: number, param3: number): void; public getMaxFlingVelocity(): number; public setRecyclerListener(param0: android.support.v7.widget.RecyclerView.RecyclerListener): void; public constructor(param0: android.content.Context, param1: android.util.AttributeSet); public smoothScrollBy(param0: number, param1: number): void; public drawChild(param0: android.graphics.Canvas, param1: android.view.View, param2: number): boolean; public onLayout(param0: boolean, param1: number, param2: number, param3: number, param4: number): void; public setViewCacheExtension(param0: android.support.v7.widget.RecyclerView.ViewCacheExtension): void; public addOnItemTouchListener(param0: android.support.v7.widget.RecyclerView.OnItemTouchListener): void; public startNestedScroll(param0: number): boolean; public hasNestedScrollingParent(): boolean; public removeOnScrollListener(param0: android.support.v7.widget.RecyclerView.OnScrollListener): void; } export module RecyclerView { export abstract class Adapter { public notifyItemMoved(param0: number, param1: number): void; public setHasStableIds(param0: boolean): void; public unregisterAdapterDataObserver(param0: android.support.v7.widget.RecyclerView.AdapterDataObserver): void; public createViewHolder(param0: android.view.ViewGroup, param1: number): android.support.v7.widget.RecyclerView.ViewHolder; public hasStableIds(): boolean; public onCreateViewHolder(param0: android.view.ViewGroup, param1: number): android.support.v7.widget.RecyclerView.ViewHolder; public bindViewHolder(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: number): void; public onBindViewHolder(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: number, param2: javautilList<java.lang.Object>): void; public onFailedToRecycleView(param0: android.support.v7.widget.RecyclerView.ViewHolder): boolean; public getItemId(param0: number): number; public onViewRecycled(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public notifyItemChanged(param0: number): void; public notifyItemInserted(param0: number): void; public onAttachedToRecyclerView(param0: android.support.v7.widget.RecyclerView): void; public constructor(); public hasObservers(): boolean; public onViewAttachedToWindow(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public onViewDetachedFromWindow(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public onBindViewHolder(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: number): void; public getItemViewType(param0: number): number; public notifyItemRangeChanged(param0: number, param1: number): void; public notifyItemRangeChanged(param0: number, param1: number, param2: javalangObject): void; public notifyItemRangeRemoved(param0: number, param1: number): void; public onDetachedFromRecyclerView(param0: android.support.v7.widget.RecyclerView): void; public notifyDataSetChanged(): void; public notifyItemRangeInserted(param0: number, param1: number): void; public getItemCount(): number; public notifyItemChanged(param0: number, param1: javalangObject): void; public notifyItemRemoved(param0: number): void; public registerAdapterDataObserver(param0: android.support.v7.widget.RecyclerView.AdapterDataObserver): void; } export class AdapterDataObservable { public notifyItemMoved(param0: number, param1: number): void; public hasObservers(): boolean; public notifyItemRangeInserted(param0: number, param1: number): void; public notifyItemRangeChanged(param0: number, param1: number): void; public notifyItemRangeChanged(param0: number, param1: number, param2: javalangObject): void; public notifyChanged(): void; public notifyItemRangeRemoved(param0: number, param1: number): void; } export abstract class AdapterDataObserver { public onItemRangeChanged(param0: number, param1: number, param2: javalangObject): void; public constructor(); public onChanged(): void; public onItemRangeMoved(param0: number, param1: number, param2: number): void; public onItemRangeChanged(param0: number, param1: number): void; public onItemRangeInserted(param0: number, param1: number): void; public onItemRangeRemoved(param0: number, param1: number): void; } export class ChildDrawingOrderCallback { /** * Constructs a new instance of the android.support.v7.widget.RecyclerView$ChildDrawingOrderCallback interface with the provided implementation. */ public constructor(implementation: { onGetChildDrawingOrder(param0: number, param1: number): number; }); public onGetChildDrawingOrder(param0: number, param1: number): number; } export abstract class ItemAnimator { public static FLAG_CHANGED: number; public static FLAG_REMOVED: number; public static FLAG_INVALIDATED: number; public static FLAG_MOVED: number; public static FLAG_APPEARED_IN_PRE_LAYOUT: number; public setMoveDuration(param0: number): void; public animatePersistence(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): boolean; public dispatchAnimationStarted(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public getMoveDuration(): number; public endAnimations(): void; public animateAppearance(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): boolean; public isRunning(): boolean; public runPendingAnimations(): void; public animateDisappearance(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): boolean; public onAnimationFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public getAddDuration(): number; public setAddDuration(param0: number): void; public canReuseUpdatedViewHolder(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: javautilList<java.lang.Object>): boolean; public setChangeDuration(param0: number): void; public recordPostLayoutInformation(param0: android.support.v7.widget.RecyclerView.State, param1: android.support.v7.widget.RecyclerView.ViewHolder): android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo; public recordPreLayoutInformation(param0: android.support.v7.widget.RecyclerView.State, param1: android.support.v7.widget.RecyclerView.ViewHolder, param2: number, param3: javautilList<java.lang.Object>): android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo; public dispatchAnimationsFinished(): void; public getChangeDuration(): number; public constructor(); public obtainHolderInfo(): android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo; public dispatchAnimationFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public canReuseUpdatedViewHolder(param0: android.support.v7.widget.RecyclerView.ViewHolder): boolean; public endAnimation(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public setRemoveDuration(param0: number): void; public animateChange(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ViewHolder, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param3: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): boolean; public onAnimationStarted(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public isRunning(param0: android.support.v7.widget.RecyclerView.ItemAnimator.ItemAnimatorFinishedListener): boolean; public getRemoveDuration(): number; } export module ItemAnimator { export class AdapterChanges { /** * Constructs a new instance of the android.support.v7.widget.RecyclerView$ItemAnimator$AdapterChanges interface with the provided implementation. */ public constructor(implementation: { }); } export class ItemAnimatorFinishedListener { /** * Constructs a new instance of the android.support.v7.widget.RecyclerView$ItemAnimator$ItemAnimatorFinishedListener interface with the provided implementation. */ public constructor(implementation: { onAnimationsFinished(): void; }); public onAnimationsFinished(): void; } export class ItemAnimatorListener { /** * Constructs a new instance of the android.support.v7.widget.RecyclerView$ItemAnimator$ItemAnimatorListener interface with the provided implementation. */ public constructor(implementation: { onAnimationFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; }); public onAnimationFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; } export class ItemHolderInfo { public left: number; public top: number; public right: number; public bottom: number; public changeFlags: number; public constructor(); public setFrom(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: number): android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo; public setFrom(param0: android.support.v7.widget.RecyclerView.ViewHolder): android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo; } } export class ItemAnimatorRestoreListener { public onAnimationFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; } export abstract class ItemDecoration { public onDrawOver(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.support.v7.widget.RecyclerView.State): void; public constructor(); public onDraw(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView): void; public getItemOffsets(param0: android.graphics.Rect, param1: android.view.View, param2: android.support.v7.widget.RecyclerView, param3: android.support.v7.widget.RecyclerView.State): void; public onDraw(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.support.v7.widget.RecyclerView.State): void; public onDrawOver(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView): void; public getItemOffsets(param0: android.graphics.Rect, param1: number, param2: android.support.v7.widget.RecyclerView): void; } export abstract class LayoutManager { public onMeasure(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State, param2: number, param3: number): void; public isLayoutHierarchical(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): boolean; public measureChildWithMargins(param0: android.view.View, param1: number, param2: number): void; public onItemsRemoved(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number): void; public getPaddingBottom(): number; public onSaveInstanceState(): android.os.Parcelable; public onInitializeAccessibilityNodeInfoForItem(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State, param2: android.view.View, param3: android.support.v4.view.accessibility.AccessibilityNodeInfoCompat): void; public addView(param0: android.view.View): void; public computeVerticalScrollExtent(param0: android.support.v7.widget.RecyclerView.State): number; public getRowCountForAccessibility(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): number; public layoutDecorated(param0: android.view.View, param1: number, param2: number, param3: number, param4: number): void; public generateDefaultLayoutParams(): android.support.v7.widget.RecyclerView.LayoutParams; public constructor(); public onDetachedFromWindow(param0: android.support.v7.widget.RecyclerView): void; public postOnAnimation(param0: javalangRunnable): void; public isAutoMeasureEnabled(): boolean; public onItemsAdded(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number): void; public getDecoratedBottom(param0: android.view.View): number; public isSmoothScrolling(): boolean; public detachAndScrapAttachedViews(param0: android.support.v7.widget.RecyclerView.Recycler): void; public getFocusedChild(): android.view.View; public requestLayout(): void; public onItemsMoved(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number, param3: number): void; public attachView(param0: android.view.View, param1: number): void; public removeAndRecycleAllViews(param0: android.support.v7.widget.RecyclerView.Recycler): void; public static getChildMeasureSpec(param0: number, param1: number, param2: number, param3: boolean): number; public generateLayoutParams(param0: android.content.Context, param1: android.util.AttributeSet): android.support.v7.widget.RecyclerView.LayoutParams; public getPaddingTop(): number; public computeHorizontalScrollOffset(param0: android.support.v7.widget.RecyclerView.State): number; public onAddFocusables(param0: android.support.v7.widget.RecyclerView, param1: javautilArrayList<android.view.View>, param2: number, param3: number): boolean; public getMinimumWidth(): number; public removeViewAt(param0: number): void; public getPaddingLeft(): number; public setMeasuredDimension(param0: number, param1: number): void; public isFocused(): boolean; public onItemsUpdated(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number, param3: javalangObject): void; public getDecoratedLeft(param0: android.view.View): number; public scrollHorizontallyBy(param0: number, param1: android.support.v7.widget.RecyclerView.Recycler, param2: android.support.v7.widget.RecyclerView.State): number; public isMeasurementCacheEnabled(): boolean; public removeAndRecycleViewAt(param0: number, param1: android.support.v7.widget.RecyclerView.Recycler): void; public assertInLayoutOrScroll(param0: string): void; public performAccessibilityAction(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State, param2: number, param3: android.os.Bundle): boolean; public onItemsChanged(param0: android.support.v7.widget.RecyclerView): void; public canScrollVertically(): boolean; public onDetachedFromWindow(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.Recycler): void; public getChildCount(): number; public getRightDecorationWidth(param0: android.view.View): number; public getWidthMode(): number; public getHeight(): number; public calculateItemDecorationsForChild(param0: android.view.View, param1: android.graphics.Rect): void; public supportsPredictiveItemAnimations(): boolean; public removeAllViews(): void; public onScrollStateChanged(param0: number): void; public getItemCount(): number; public getColumnCountForAccessibility(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): number; public findContainingItemView(param0: android.view.View): android.view.View; public removeAndRecycleView(param0: android.view.View, param1: android.support.v7.widget.RecyclerView.Recycler): void; public requestChildRectangleOnScreen(param0: android.support.v7.widget.RecyclerView, param1: android.view.View, param2: android.graphics.Rect, param3: boolean): boolean; public startSmoothScroll(param0: android.support.v7.widget.RecyclerView.SmoothScroller): void; public getLayoutDirection(): number; public getPosition(param0: android.view.View): number; public checkLayoutParams(param0: android.support.v7.widget.RecyclerView.LayoutParams): boolean; public detachAndScrapViewAt(param0: number, param1: android.support.v7.widget.RecyclerView.Recycler): void; public setMeasurementCacheEnabled(param0: boolean): void; public computeHorizontalScrollExtent(param0: android.support.v7.widget.RecyclerView.State): number; public getItemViewType(param0: android.view.View): number; public generateLayoutParams(param0: android.view.ViewGroup.LayoutParams): android.support.v7.widget.RecyclerView.LayoutParams; public getBaseline(): number; public getTopDecorationHeight(param0: android.view.View): number; public setAutoMeasureEnabled(param0: boolean): void; public removeCallbacks(param0: javalangRunnable): boolean; public onRequestChildFocus(param0: android.support.v7.widget.RecyclerView, param1: android.view.View, param2: android.view.View): boolean; public addDisappearingView(param0: android.view.View, param1: number): void; public onInitializeAccessibilityEvent(param0: android.view.accessibility.AccessibilityEvent): void; public getPaddingRight(): number; public getChildAt(param0: number): android.view.View; public ignoreView(param0: android.view.View): void; public onInitializeAccessibilityEvent(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State, param2: android.view.accessibility.AccessibilityEvent): void; public offsetChildrenHorizontal(param0: number): void; public onFocusSearchFailed(param0: android.view.View, param1: number, param2: android.support.v7.widget.RecyclerView.Recycler, param3: android.support.v7.widget.RecyclerView.State): android.view.View; public onAdapterChanged(param0: android.support.v7.widget.RecyclerView.Adapter, param1: android.support.v7.widget.RecyclerView.Adapter): void; public removeView(param0: android.view.View): void; public detachAndScrapView(param0: android.view.View, param1: android.support.v7.widget.RecyclerView.Recycler): void; public onInitializeAccessibilityNodeInfo(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State, param2: android.support.v4.view.accessibility.AccessibilityNodeInfoCompat): void; public getPaddingEnd(): number; public onLayoutChildren(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): void; public computeVerticalScrollRange(param0: android.support.v7.widget.RecyclerView.State): number; public getClipToPadding(): boolean; public onRequestChildFocus(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.State, param2: android.view.View, param3: android.view.View): boolean; public attachView(param0: android.view.View, param1: number, param2: android.support.v7.widget.RecyclerView.LayoutParams): void; public static getChildMeasureSpec(param0: number, param1: number, param2: number, param3: number, param4: boolean): number; public isAttachedToWindow(): boolean; public addView(param0: android.view.View, param1: number): void; public attachView(param0: android.view.View): void; public setMeasuredDimension(param0: android.graphics.Rect, param1: number, param2: number): void; public removeDetachedView(param0: android.view.View): void; public endAnimation(param0: android.view.View): void; public offsetChildrenVertical(param0: number): void; public static getProperties(param0: android.content.Context, param1: android.util.AttributeSet, param2: number, param3: number): android.support.v7.widget.RecyclerView.LayoutManager.Properties; public moveView(param0: number, param1: number): void; public getHeightMode(): number; public computeVerticalScrollOffset(param0: android.support.v7.widget.RecyclerView.State): number; public scrollToPosition(param0: number): void; public scrollVerticallyBy(param0: number, param1: android.support.v7.widget.RecyclerView.Recycler, param2: android.support.v7.widget.RecyclerView.State): number; public assertNotInLayoutOrScroll(param0: string): void; public getDecoratedTop(param0: android.view.View): number; public onInterceptFocusSearch(param0: android.view.View, param1: number): android.view.View; public requestSimpleAnimationsInNextLayout(): void; public smoothScrollToPosition(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.State, param2: number): void; public computeHorizontalScrollRange(param0: android.support.v7.widget.RecyclerView.State): number; public performAccessibilityActionForItem(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State, param2: android.view.View, param3: number, param4: android.os.Bundle): boolean; public onItemsUpdated(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number): void; public getSelectionModeForAccessibility(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): number; public hasFocus(): boolean; public getMinimumHeight(): number; public detachView(param0: android.view.View): void; public stopIgnoringView(param0: android.view.View): void; public measureChild(param0: android.view.View, param1: number, param2: number): void; public getBottomDecorationHeight(param0: android.view.View): number; public onAttachedToWindow(param0: android.support.v7.widget.RecyclerView): void; public getDecoratedMeasuredHeight(param0: android.view.View): number; public canScrollHorizontally(): boolean; public detachViewAt(param0: number): void; public static chooseSize(param0: number, param1: number, param2: number): number; public getWidth(): number; public addDisappearingView(param0: android.view.View): void; public getPaddingStart(): number; public getDecoratedRight(param0: android.view.View): number; public getLeftDecorationWidth(param0: android.view.View): number; public getDecoratedMeasuredWidth(param0: android.view.View): number; public onRestoreInstanceState(param0: android.os.Parcelable): void; public findViewByPosition(param0: number): android.view.View; } export module LayoutManager { export class Properties { public orientation: number; public spanCount: number; public reverseLayout: boolean; public stackFromEnd: boolean; public constructor(); } } export class LayoutParams { public constructor(param0: android.view.ViewGroup.MarginLayoutParams); public isItemChanged(): boolean; public constructor(param0: android.content.Context, param1: android.util.AttributeSet); public constructor(param0: android.view.ViewGroup.LayoutParams); public isItemRemoved(): boolean; public getViewPosition(): number; public getViewAdapterPosition(): number; public isViewInvalid(): boolean; public constructor(param0: android.support.v7.widget.RecyclerView.LayoutParams); public getViewLayoutPosition(): number; public viewNeedsUpdate(): boolean; public constructor(param0: number, param1: number); } export class OnChildAttachStateChangeListener { /** * Constructs a new instance of the android.support.v7.widget.RecyclerView$OnChildAttachStateChangeListener interface with the provided implementation. */ public constructor(implementation: { onChildViewAttachedToWindow(param0: android.view.View): void; onChildViewDetachedFromWindow(param0: android.view.View): void; }); public onChildViewAttachedToWindow(param0: android.view.View): void; public onChildViewDetachedFromWindow(param0: android.view.View): void; } export class OnItemTouchListener { /** * Constructs a new instance of the android.support.v7.widget.RecyclerView$OnItemTouchListener interface with the provided implementation. */ public constructor(implementation: { onInterceptTouchEvent(param0: android.support.v7.widget.RecyclerView, param1: android.view.MotionEvent): boolean; onTouchEvent(param0: android.support.v7.widget.RecyclerView, param1: android.view.MotionEvent): void; onRequestDisallowInterceptTouchEvent(param0: boolean): void; }); public onInterceptTouchEvent(param0: android.support.v7.widget.RecyclerView, param1: android.view.MotionEvent): boolean; public onTouchEvent(param0: android.support.v7.widget.RecyclerView, param1: android.view.MotionEvent): void; public onRequestDisallowInterceptTouchEvent(param0: boolean): void; } export abstract class OnScrollListener { public constructor(); public onScrolled(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number): void; public onScrollStateChanged(param0: android.support.v7.widget.RecyclerView, param1: number): void; } export class RecycledViewPool { public constructor(); public putRecycledView(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public clear(): void; public setMaxRecycledViews(param0: number, param1: number): void; public getRecycledView(param0: number): android.support.v7.widget.RecyclerView.ViewHolder; } export class Recycler { public getViewForPosition(param0: number): android.view.View; public recycleView(param0: android.view.View): void; public constructor(param0: android.support.v7.widget.RecyclerView); public setViewCacheSize(param0: number): void; public getScrapList(): javautilList<android.support.v7.widget.RecyclerView.ViewHolder>; public clear(): void; public bindViewToPosition(param0: android.view.View, param1: number): void; public convertPreLayoutPositionToPostLayout(param0: number): number; } export class RecyclerListener { /** * Constructs a new instance of the android.support.v7.widget.RecyclerView$RecyclerListener interface with the provided implementation. */ public constructor(implementation: { onViewRecycled(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; }); public onViewRecycled(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; } export class RecyclerViewDataObserver extends android.support.v7.widget.RecyclerView.AdapterDataObserver { public onItemRangeChanged(param0: number, param1: number, param2: javalangObject): void; public onChanged(): void; public onItemRangeMoved(param0: number, param1: number, param2: number): void; public onItemRangeChanged(param0: number, param1: number): void; public onItemRangeInserted(param0: number, param1: number): void; public onItemRangeRemoved(param0: number, param1: number): void; } export class SavedState { public static CREATOR: android.os.Parcelable.Creator<SavedState>; public writeToParcel(param0: android.os.Parcel, param1: number): void; } export class SimpleOnItemTouchListener { public constructor(); public onInterceptTouchEvent(param0: android.support.v7.widget.RecyclerView, param1: android.view.MotionEvent): boolean; public onTouchEvent(param0: android.support.v7.widget.RecyclerView, param1: android.view.MotionEvent): void; public onRequestDisallowInterceptTouchEvent(param0: boolean): void; } export abstract class SmoothScroller { public onTargetFound(param0: android.view.View, param1: android.support.v7.widget.RecyclerView.State, param2: android.support.v7.widget.RecyclerView.SmoothScroller.Action): void; public constructor(); public getChildCount(): number; public getLayoutManager(): android.support.v7.widget.RecyclerView.LayoutManager; public onSeekTargetStep(param0: number, param1: number, param2: android.support.v7.widget.RecyclerView.State, param3: android.support.v7.widget.RecyclerView.SmoothScroller.Action): void; public onChildAttachedToWindow(param0: android.view.View): void; public stop(): void; public isRunning(): boolean; public onStop(): void; public setTargetPosition(param0: number): void; public getChildPosition(param0: android.view.View): number; public instantScrollToPosition(param0: number): void; public isPendingInitialRun(): boolean; public normalize(param0: android.graphics.PointF): void; public getTargetPosition(): number; public findViewByPosition(param0: number): android.view.View; public onStart(): void; } export module SmoothScroller { export class Action { public static UNDEFINED_DURATION: number; public constructor(param0: number, param1: number); public setDuration(param0: number): void; public getInterpolator(): android.view.animation.Interpolator; public setDy(param0: number): void; public update(param0: number, param1: number, param2: number, param3: android.view.animation.Interpolator): void; public getDx(): number; public constructor(param0: number, param1: number, param2: number); public constructor(param0: number, param1: number, param2: number, param3: android.view.animation.Interpolator); public setDx(param0: number): void; public getDy(): number; public setInterpolator(param0: android.view.animation.Interpolator): void; public getDuration(): number; public jumpTo(param0: number): void; } } export class State { public constructor(); public isMeasuring(): boolean; public toString(): string; public getTargetScrollPosition(): number; public willRunPredictiveAnimations(): boolean; public get(param0: number): javalangObject; public hasTargetScrollPosition(): boolean; public willRunSimpleAnimations(): boolean; public getItemCount(): number; public didStructureChange(): boolean; public isPreLayout(): boolean; public remove(param0: number): void; public put(param0: number, param1: javalangObject): void; } export module State { export class LayoutState { /** * Constructs a new instance of the android.support.v7.widget.RecyclerView$State$LayoutState interface with the provided implementation. */ public constructor(implementation: { }); } } export abstract class ViewCacheExtension { public constructor(); public getViewForPositionAndType(param0: android.support.v7.widget.RecyclerView.Recycler, param1: number, param2: number): android.view.View; } export class ViewFlinger { public smoothScrollBy(param0: number, param1: number, param2: number): void; public run(): void; public constructor(param0: android.support.v7.widget.RecyclerView); public smoothScrollBy(param0: number, param1: number, param2: number, param3: number): void; public smoothScrollBy(param0: number, param1: number): void; public smoothScrollBy(param0: number, param1: number, param2: number, param3: android.view.animation.Interpolator): void; public fling(param0: number, param1: number): void; public stop(): void; } export abstract class ViewHolder { public itemView: android.view.View; public isRecyclable(): boolean; public getAdapterPosition(): number; public constructor(param0: android.view.View); public getItemViewType(): number; public setIsRecyclable(param0: boolean): void; public getPosition(): number; public getLayoutPosition(): number; public getOldPosition(): number; public toString(): string; public getItemId(): number; } } } } } } /// <reference path="./android.os.Bundle.d.ts" /> /// <reference path="./android.support.v4.view.accessibility.AccessibilityNodeInfoCompat.d.ts" /> /// <reference path="./android.support.v7.widget.RecyclerView.d.ts" /> /// <reference path="./android.view.View.d.ts" /> /// <reference path="./android.view.accessibility.AccessibilityEvent.d.ts" /> declare module android { export module support { export module v7 { export module widget { export class RecyclerViewAccessibilityDelegate { public performAccessibilityAction(param0: android.view.View, param1: number, param2: android.os.Bundle): boolean; public onInitializeAccessibilityNodeInfo(param0: android.view.View, param1: android.support.v4.view.accessibility.AccessibilityNodeInfoCompat): void; public constructor(param0: android.support.v7.widget.RecyclerView); public onInitializeAccessibilityEvent(param0: android.view.View, param1: android.view.accessibility.AccessibilityEvent): void; } } } } } declare module android { export module support { export module v7 { export module widget { export class ScrollbarHelper { } } } } } /// <reference path="./java.util.List.d.ts" /> declare module android { export module support { export module v7 { export module widget { export abstract class SimpleItemAnimator extends android.support.v7.widget.RecyclerView.ItemAnimator { public dispatchRemoveFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public onMoveFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public onChangeStarting(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: boolean): void; public dispatchRemoveStarting(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public canReuseUpdatedViewHolder(param0: android.support.v7.widget.RecyclerView.ViewHolder): boolean; public dispatchChangeFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: boolean): void; public animateDisappearance(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): boolean; public animateRemove(param0: android.support.v7.widget.RecyclerView.ViewHolder): boolean; public onMoveStarting(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public onRemoveStarting(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public constructor(); public onAddFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public animatePersistence(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): boolean; public canReuseUpdatedViewHolder(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: javautilList<java.lang.Object>): boolean; public dispatchAddStarting(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public setSupportsChangeAnimations(param0: boolean): void; public animateChange(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ViewHolder, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param3: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): boolean; public dispatchMoveFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public dispatchMoveStarting(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public onChangeFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: boolean): void; public animateChange(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ViewHolder, param2: number, param3: number, param4: number, param5: number): boolean; public animateAppearance(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): boolean; public onRemoveFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public dispatchChangeStarting(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: boolean): void; public dispatchAddFinished(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public onAddStarting(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public getSupportsChangeAnimations(): boolean; public animateAdd(param0: android.support.v7.widget.RecyclerView.ViewHolder): boolean; public animateMove(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: number, param2: number, param3: number, param4: number): boolean; } } } } } /// <reference path="./android.content.Context.d.ts" /> /// <reference path="./android.graphics.Rect.d.ts" /> /// <reference path="./android.os.Parcel.d.ts" /> /// <reference path="./android.os.Parcelable.d.ts" /> /// <reference path="./android.support.v4.view.accessibility.AccessibilityNodeInfoCompat.d.ts" /> /// <reference path="./android.support.v7.widget.RecyclerView.d.ts" /> /// <reference path="./android.util.AttributeSet.d.ts" /> /// <reference path="./android.view.View.d.ts" /> /// <reference path="./android.view.accessibility.AccessibilityEvent.d.ts" /> /// <reference path="./java.lang.Object.d.ts" /> /// <reference path="./java.lang.String.d.ts" /> declare module android { export module support { export module v7 { export module widget { export class StaggeredGridLayoutManager extends android.support.v7.widget.RecyclerView.LayoutManager { public static TAG: string; public static HORIZONTAL: number; public static VERTICAL: number; public static GAP_HANDLING_NONE: number; public static GAP_HANDLING_LAZY: number; public static GAP_HANDLING_MOVE_ITEMS_BETWEEN_SPANS: number; public setMeasuredDimension(param0: number, param1: number): void; public findLastCompletelyVisibleItemPositions(param0: native.Array<number>): native.Array<number>; public computeVerticalScrollOffset(param0: android.support.v7.widget.RecyclerView.State): number; public onItemsChanged(param0: android.support.v7.widget.RecyclerView): void; public onItemsUpdated(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number): void; public invalidateSpanAssignments(): void; public computeHorizontalScrollRange(param0: android.support.v7.widget.RecyclerView.State): number; public supportsPredictiveItemAnimations(): boolean; public checkLayoutParams(param0: android.support.v7.widget.RecyclerView.LayoutParams): boolean; public setReverseLayout(param0: boolean): void; public scrollToPositionWithOffset(param0: number, param1: number): void; public findFirstCompletelyVisibleItemPositions(param0: native.Array<number>): native.Array<number>; public scrollToPosition(param0: number): void; public onItemsAdded(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number): void; public scrollVerticallyBy(param0: number, param1: android.support.v7.widget.RecyclerView.Recycler, param2: android.support.v7.widget.RecyclerView.State): number; public constructor(); public getRowCountForAccessibility(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): number; public computeHorizontalScrollOffset(param0: android.support.v7.widget.RecyclerView.State): number; public getOrientation(): number; public constructor(param0: number, param1: number); public canScrollVertically(): boolean; public onScrollStateChanged(param0: number): void; public offsetChildrenHorizontal(param0: number): void; public getSpanCount(): number; public onFocusSearchFailed(param0: android.view.View, param1: number, param2: android.support.v7.widget.RecyclerView.Recycler, param3: android.support.v7.widget.RecyclerView.State): android.view.View; public generateDefaultLayoutParams(): android.support.v7.widget.RecyclerView.LayoutParams; public setOrientation(param0: number): void; public findFirstVisibleItemPositions(param0: native.Array<number>): native.Array<number>; public constructor(param0: android.content.Context, param1: android.util.AttributeSet, param2: number, param3: number); public setGapStrategy(param0: number): void; public getGapStrategy(): number; public generateLayoutParams(param0: android.view.ViewGroup.LayoutParams): android.support.v7.widget.RecyclerView.LayoutParams; public onItemsRemoved(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number): void; public canScrollHorizontally(): boolean; public offsetChildrenVertical(param0: number): void; public onDetachedFromWindow(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.Recycler): void; public onSaveInstanceState(): android.os.Parcelable; public onInitializeAccessibilityEvent(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State, param2: android.view.accessibility.AccessibilityEvent): void; public computeHorizontalScrollExtent(param0: android.support.v7.widget.RecyclerView.State): number; public onItemsMoved(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number, param3: number): void; public onInitializeAccessibilityEvent(param0: android.view.accessibility.AccessibilityEvent): void; public assertNotInLayoutOrScroll(param0: string): void; public getReverseLayout(): boolean; public onInitializeAccessibilityNodeInfoForItem(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State, param2: android.view.View, param3: android.support.v4.view.accessibility.AccessibilityNodeInfoCompat): void; public scrollHorizontallyBy(param0: number, param1: android.support.v7.widget.RecyclerView.Recycler, param2: android.support.v7.widget.RecyclerView.State): number; public computeVerticalScrollRange(param0: android.support.v7.widget.RecyclerView.State): number; public setSpanCount(param0: number): void; public smoothScrollToPosition(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.State, param2: number): void; public onDetachedFromWindow(param0: android.support.v7.widget.RecyclerView): void; public onRestoreInstanceState(param0: android.os.Parcelable): void; public generateLayoutParams(param0: android.content.Context, param1: android.util.AttributeSet): android.support.v7.widget.RecyclerView.LayoutParams; public setMeasuredDimension(param0: android.graphics.Rect, param1: number, param2: number): void; public computeVerticalScrollExtent(param0: android.support.v7.widget.RecyclerView.State): number; public getColumnCountForAccessibility(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): number; public onItemsUpdated(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number, param3: javalangObject): void; public onLayoutChildren(param0: android.support.v7.widget.RecyclerView.Recycler, param1: android.support.v7.widget.RecyclerView.State): void; public findLastVisibleItemPositions(param0: native.Array<number>): native.Array<number>; } export module StaggeredGridLayoutManager { export class AnchorInfo { } export class LayoutParams extends android.support.v7.widget.RecyclerView.LayoutParams { public static INVALID_SPAN_ID: number; public constructor(param0: android.view.ViewGroup.MarginLayoutParams); public constructor(param0: android.view.ViewGroup.LayoutParams); public constructor(param0: android.content.Context, param1: android.util.AttributeSet); public isFullSpan(): boolean; public getSpanIndex(): number; public constructor(param0: android.support.v7.widget.RecyclerView.LayoutParams); public constructor(param0: number, param1: number); public setFullSpan(param0: boolean): void; } export class LazySpanLookup { public addFullSpanItem(param0: android.support.v7.widget.StaggeredGridLayoutManager.LazySpanLookup.FullSpanItem): void; public getFirstFullSpanItemInRange(param0: number, param1: number, param2: number, param3: boolean): android.support.v7.widget.StaggeredGridLayoutManager.LazySpanLookup.FullSpanItem; public getFullSpanItem(param0: number): android.support.v7.widget.StaggeredGridLayoutManager.LazySpanLookup.FullSpanItem; } export module LazySpanLookup { export class FullSpanItem { public static CREATOR: android.os.Parcelable.Creator<LazySpanLookup>; public constructor(); public describeContents(): number; public toString(): string; public writeToParcel(param0: android.os.Parcel, param1: number): void; public constructor(param0: android.os.Parcel); } } export class SavedState { public static CREATOR: android.os.Parcelable.Creator<SavedState>; public constructor(); public describeContents(): number; public writeToParcel(param0: android.os.Parcel, param1: number): void; public constructor(param0: android.support.v7.widget.StaggeredGridLayoutManager.SavedState); } export class Span { public getFocusableViewAfter(param0: number, param1: number): android.view.View; public findLastCompletelyVisibleItemPosition(): number; public findFirstCompletelyVisibleItemPosition(): number; public getDeletedSize(): number; public findFirstVisibleItemPosition(): number; public findLastVisibleItemPosition(): number; } } } } } } declare module android { export module support { export module v7 { export module widget { export class ViewInfoStore { public onViewDetached(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; } export module ViewInfoStore { export class InfoRecord { } export class ProcessCallback { /** * Constructs a new instance of the android.support.v7.widget.ViewInfoStore$ProcessCallback interface with the provided implementation. */ public constructor(implementation: { processDisappeared(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): void; processAppeared(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): void; processPersistent(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): void; unused(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; }); public unused(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public processDisappeared(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): void; public processAppeared(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): void; public processPersistent(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo, param2: android.support.v7.widget.RecyclerView.ItemAnimator.ItemHolderInfo): void; } } } } } } /// <reference path="./android.graphics.Canvas.d.ts" /> /// <reference path="./android.graphics.Rect.d.ts" /> /// <reference path="./android.support.v4.animation.ValueAnimatorCompat.d.ts" /> /// <reference path="./android.support.v7.widget.RecyclerView.d.ts" /> /// <reference path="./android.support.v7.widget.helper.ItemTouchHelper.d.ts" /> /// <reference path="./android.support.v7.widget.helper.ItemTouchUIUtil.d.ts" /> /// <reference path="./android.view.MotionEvent.d.ts" /> /// <reference path="./android.view.View.d.ts" /> /// <reference path="./java.util.List.d.ts" /> declare module android { export module support { export module v7 { export module widget { export module helper { export class ItemTouchHelper extends android.support.v7.widget.RecyclerView.ItemDecoration implements android.support.v7.widget.RecyclerView.OnChildAttachStateChangeListener { public static UP: number; public static DOWN: number; public static LEFT: number; public static RIGHT: number; public static START: number; public static END: number; public static ACTION_STATE_IDLE: number; public static ACTION_STATE_SWIPE: number; public static ACTION_STATE_DRAG: number; public static ANIMATION_TYPE_SWIPE_SUCCESS: number; public static ANIMATION_TYPE_SWIPE_CANCEL: number; public static ANIMATION_TYPE_DRAG: number; public onDrawOver(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.support.v7.widget.RecyclerView.State): void; public constructor(); public onDraw(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView): void; public getItemOffsets(param0: android.graphics.Rect, param1: android.view.View, param2: android.support.v7.widget.RecyclerView, param3: android.support.v7.widget.RecyclerView.State): void; public onDrawOver(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView): void; public onDraw(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.support.v7.widget.RecyclerView.State): void; public startDrag(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public getItemOffsets(param0: android.graphics.Rect, param1: number, param2: android.support.v7.widget.RecyclerView): void; public constructor(param0: android.support.v7.widget.helper.ItemTouchHelper.Callback); public startSwipe(param0: android.support.v7.widget.RecyclerView.ViewHolder): void; public onChildViewAttachedToWindow(param0: android.view.View): void; public attachToRecyclerView(param0: android.support.v7.widget.RecyclerView): void; public onChildViewDetachedFromWindow(param0: android.view.View): void; } export module ItemTouchHelper { export abstract class Callback { public static DEFAULT_DRAG_ANIMATION_DURATION: number; public static DEFAULT_SWIPE_ANIMATION_DURATION: number; public getAnimationDuration(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number, param3: number): number; public interpolateOutOfBoundsScroll(param0: android.support.v7.widget.RecyclerView, param1: number, param2: number, param3: number, param4: number): number; public static makeMovementFlags(param0: number, param1: number): number; public getSwipeVelocityThreshold(param0: number): number; public getMovementFlags(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.ViewHolder): number; public onChildDraw(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.support.v7.widget.RecyclerView.ViewHolder, param3: number, param4: number, param5: number, param6: boolean): void; public chooseDropTarget(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: javautilList<android.support.v7.widget.RecyclerView.ViewHolder>, param2: number, param3: number): android.support.v7.widget.RecyclerView.ViewHolder; public getSwipeEscapeVelocity(param0: number): number; public isItemViewSwipeEnabled(): boolean; public clearView(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.ViewHolder): void; public getMoveThreshold(param0: android.support.v7.widget.RecyclerView.ViewHolder): number; public constructor(); public onSelectedChanged(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: number): void; public onSwiped(param0: android.support.v7.widget.RecyclerView.ViewHolder, param1: number): void; public static getDefaultUIUtil(): android.support.v7.widget.helper.ItemTouchUIUtil; public static makeFlag(param0: number, param1: number): number; public getBoundingBoxMargin(): number; public canDropOver(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.ViewHolder, param2: android.support.v7.widget.RecyclerView.ViewHolder): boolean; public convertToAbsoluteDirection(param0: number, param1: number): number; public onMoved(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.ViewHolder, param2: number, param3: android.support.v7.widget.RecyclerView.ViewHolder, param4: number, param5: number, param6: number): void; public onChildDrawOver(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.support.v7.widget.RecyclerView.ViewHolder, param3: number, param4: number, param5: number, param6: boolean): void; public onMove(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.ViewHolder, param2: android.support.v7.widget.RecyclerView.ViewHolder): boolean; public getSwipeThreshold(param0: android.support.v7.widget.RecyclerView.ViewHolder): number; public static convertToRelativeDirection(param0: number, param1: number): number; public isLongPressDragEnabled(): boolean; } export class ItemTouchHelperGestureListener { public onLongPress(param0: android.view.MotionEvent): void; public onDown(param0: android.view.MotionEvent): boolean; } export class RecoverAnimation { public mIsPendingCleanup: boolean; public cancel(): void; public onAnimationCancel(param0: android.support.v4.animation.ValueAnimatorCompat): void; public setFraction(param0: number): void; public setDuration(param0: number): void; public onAnimationStart(param0: android.support.v4.animation.ValueAnimatorCompat): void; public start(): void; public constructor(param0: android.support.v7.widget.helper.ItemTouchHelper, param1: android.support.v7.widget.RecyclerView.ViewHolder, param2: number, param3: number, param4: number, param5: number, param6: number, param7: number); public onAnimationRepeat(param0: android.support.v4.animation.ValueAnimatorCompat): void; public onAnimationEnd(param0: android.support.v4.animation.ValueAnimatorCompat): void; public update(): void; } export abstract class SimpleCallback extends android.support.v7.widget.helper.ItemTouchHelper.Callback { public constructor(); public constructor(param0: number, param1: number); public getSwipeDirs(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.ViewHolder): number; public getMovementFlags(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.ViewHolder): number; public getDragDirs(param0: android.support.v7.widget.RecyclerView, param1: android.support.v7.widget.RecyclerView.ViewHolder): number; public setDefaultSwipeDirs(param0: number): void; public setDefaultDragDirs(param0: number): void; } export class ViewDropHandler { /** * Constructs a new instance of the android.support.v7.widget.helper.ItemTouchHelper$ViewDropHandler interface with the provided implementation. */ public constructor(implementation: { prepareForDrop(param0: android.view.View, param1: android.view.View, param2: number, param3: number): void; }); public prepareForDrop(param0: android.view.View, param1: android.view.View, param2: number, param3: number): void; } } } } } } } /// <reference path="./android.graphics.Canvas.d.ts" /> /// <reference path="./android.support.v7.widget.RecyclerView.d.ts" /> /// <reference path="./android.view.View.d.ts" /> declare module android { export module support { export module v7 { export module widget { export module helper { export class ItemTouchUIUtil { /** * Constructs a new instance of the android.support.v7.widget.helper.ItemTouchUIUtil interface with the provided implementation. */ public constructor(implementation: { onDraw(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.view.View, param3: number, param4: number, param5: number, param6: boolean): void; onDrawOver(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.view.View, param3: number, param4: number, param5: number, param6: boolean): void; clearView(param0: android.view.View): void; onSelected(param0: android.view.View): void; }); public onDraw(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.view.View, param3: number, param4: number, param5: number, param6: boolean): void; public clearView(param0: android.view.View): void; public onDrawOver(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.view.View, param3: number, param4: number, param5: number, param6: boolean): void; public onSelected(param0: android.view.View): void; } } } } } } /// <reference path="./android.graphics.Canvas.d.ts" /> /// <reference path="./android.support.v7.widget.RecyclerView.d.ts" /> /// <reference path="./android.view.View.d.ts" /> declare module android { export module support { export module v7 { export module widget { export module helper { export class ItemTouchUIUtilImpl { } export module ItemTouchUIUtilImpl { export class Gingerbread { public onDrawOver(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.view.View, param3: number, param4: number, param5: number, param6: boolean): void; public clearView(param0: android.view.View): void; public onSelected(param0: android.view.View): void; public onDraw(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.view.View, param3: number, param4: number, param5: number, param6: boolean): void; } export class Honeycomb { public onDrawOver(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.view.View, param3: number, param4: number, param5: number, param6: boolean): void; public clearView(param0: android.view.View): void; public onSelected(param0: android.view.View): void; public onDraw(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.view.View, param3: number, param4: number, param5: number, param6: boolean): void; } export class Lollipop extends android.support.v7.widget.helper.ItemTouchUIUtilImpl.Honeycomb { public onDrawOver(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.view.View, param3: number, param4: number, param5: number, param6: boolean): void; public clearView(param0: android.view.View): void; public onSelected(param0: android.view.View): void; public onDraw(param0: android.graphics.Canvas, param1: android.support.v7.widget.RecyclerView, param2: android.view.View, param3: number, param4: number, param5: number, param6: boolean): void; } } } } } } } declare module android { export module support { export module v7 { export module widget { export module util { export abstract class SortedListAdapterCallback extends android.support.v7.util.SortedList.Callback { public constructor(); public constructor(param0: android.support.v7.widget.RecyclerView.Adapter); public onChanged(param0: number, param1: number): void; public onRemoved(param0: number, param1: number): void; public onInserted(param0: number, param1: number): void; public onMoved(param0: number, param1: number): void; } } } } } }
the_stack
import { AfterContentInit, ContentChildren, Directive, ElementRef, forwardRef, HostBinding, HostListener, Input, OnDestroy, OnInit, Optional, QueryList, Renderer2, Self, Output, EventEmitter, Inject } from '@angular/core'; import { DOCUMENT } from '@angular/common'; import { NgControl } from '@angular/forms'; import { MDCTextFieldFoundation, MDCTextFieldAdapter } from '@material/textfield'; import { MDCLineRippleFoundation, MDCLineRippleAdapter } from '@material/line-ripple'; import { MDCTextFieldHelperTextFoundation, MDCTextFieldHelperTextAdapter } from '@material/textfield'; import { MDCTextFieldIconFoundation, MDCTextFieldIconAdapter } from '@material/textfield'; import { MdcFloatingLabelDirective } from '../floating-label/mdc.floating-label.directive'; import { AbstractMdcInput } from '../abstract/abstract.mdc.input'; import { asBoolean, asNumberOrNull } from '../../utils/value.utils'; import { AbstractMdcRipple } from '../ripple/abstract.mdc.ripple'; import { MdcNotchedOutlineDirective } from '../notched-outline/mdc.notched-outline.directive'; import { MdcEventRegistry } from '../../utils/mdc.event.registry'; import { Subject, merge } from 'rxjs'; import { takeUntil, debounceTime } from 'rxjs/operators'; import { HasId } from '../abstract/mixin.mdc.hasid'; import { applyMixins } from '../../utils/mixins'; let nextId = 1; /** * Directive for the native input of an `mdcTextField`. */ @Directive({ selector: 'input[mdcTextFieldInput], textarea[mdcTextFieldInput]', providers: [{provide: AbstractMdcInput, useExisting: forwardRef(() => MdcTextFieldInputDirective) }] }) export class MdcTextFieldInputDirective extends AbstractMdcInput implements OnInit, OnDestroy { /** @internal */ @HostBinding('class.mdc-text-field__input') readonly _cls = true; /** @internal */ @HostBinding('attr.aria-labelledby') _labeledBy: string | null = null; /** @internal */ @HostBinding('attr.aria-controls') _controls: string | null = null; /** @internal */ @HostBinding('attr.aria-describedby') _describedBy: string | null = null; /** @internal */ @Output() readonly _valueChange: EventEmitter<string | null> = new EventEmitter<string | null>(); private onDestroy$: Subject<any> = new Subject(); private _id: string | null = null; private _type = 'text'; private _value = ''; private _disabled = false; private cachedId: string | null = null; constructor(public _elm: ElementRef, private renderer: Renderer2, @Optional() @Self() public _cntr: NgControl) { super(); } ngOnInit() { // Force setter to be called in case id was not specified. this.id = this.id; this._cntr?.valueChanges!.pipe(takeUntil(this.onDestroy$)).subscribe(() => { // (using the value of the elemnt, because the value of the control might be of another type, // e.g. the ngModel for type=number inputs is a number) this.updateValue(this._elm.nativeElement.value, true); }); } ngOnDestroy() { this.onDestroy$.next(); this.onDestroy$.complete(); } /** * Mirrors the <code>id</code> attribute. If no id is assigned, this directive will * assign a unique id by itself. If an <code>mdcFloatingLabel</code> for this text-field * is available, the <code>mdcFloatingLabel</code> will automatically be associated * (either by a `for` attribute on the label, or by an `aria-labelledby` attribute * on this input element). */ @HostBinding() @Input() get id() { return this._id; } set id(value: string | null) { this._id = value || this._newId(); } /** * If set to a value other than false, the text-field will be in disabled state. */ @HostBinding() @Input() get disabled() { return this._cntr ? !!this._cntr.disabled : this._disabled; } set disabled(value: boolean) { this._disabled = asBoolean(value); } static ngAcceptInputType_disabled: boolean | ''; /** @internal */ @Input() get type() { return this._type; } set type(value: string) { this._type = value || 'text'; // Angular Input is not automatically set on the native input element: if (!this._isTextarea()) { try { this.renderer.setProperty(this._elm.nativeElement, 'type', this._type); } catch (e) { this.renderer.setAttribute(this._elm.nativeElement, 'type', this._type); } } } /** @internal */ @Input() get value() { return this._value; } /** @internal */ set value(value: string) { this.updateValue(value, false); } private updateValue(value: any, fromControl: boolean) { const newVal = (value ? `${value}` : ''); if (newVal !== this._value) { this._value = this._elm.nativeElement.value = newVal; this._valueChange.emit(this._elm.nativeElement.value); } if (!fromControl && this._cntr && newVal !== this._cntr.value) { this._cntr.control!.setValue(newVal); // TODO how to convert to the type of value the controlpects? } } /** @internal */ @HostListener('input') _onInput() { if (!this._cntr) this.updateValue(this._elm.nativeElement.value, false); } /** @internal */ get valid(): boolean { return this._cntr ? !!this._cntr.valid : (this._elm.nativeElement as HTMLInputElement).validity.valid; } /** @internal */ _isBadInput() { return (this._elm.nativeElement as HTMLInputElement).validity.badInput; } /** @internal */ _isTextarea() { return this._elm.nativeElement.nodeName.toLowerCase() === 'textarea'; } /** @internal */ _newId(): string { this.cachedId = this.cachedId || `mdc-input-${nextId++}`; return this.cachedId; } } /** * Directive for an optional leading or trailing icon on the text-field (see * `MdcTextFieldDirective`). An icon before the `mdcTextFieldInput` will be styled * as a leading icon. An icon after the `mdcTextFieldInput` will be styles as a * trailing icon. */ @Directive({ selector: '[mdcTextFieldIcon]' }) export class MdcTextFieldIconDirective implements AfterContentInit, OnDestroy { /** @internal */ @HostBinding('class.mdc-text-field__icon') readonly _cls = true; /** * Event emitted for icon interactions (a click or an 'enter' keypress). When this output is assigned, * the icon will also set the `role=button` and `tabindex=0` attributes, unless you give them another * explicit value. */ @Output() readonly interact: EventEmitter<void> = new EventEmitter(); /** @internal */ @HostBinding('class.mdc-text-field__icon--leading') _leading = false; /** @internal */ @HostBinding('class.mdc-text-field__icon--trailing') _trailing = false; private _tabIndex: number | null = null; private _role: string | null = null; /** @internal */ _textField: MdcTextFieldDirective | null = null; /** @internal */ _mdcAdapter: MDCTextFieldIconAdapter = { // by returning null for 'tabindex', the foundation will not set tabindex/role attributes when // disabled state changes. We want that, because we handle tabindex/role ourselves: getAttr: (name: string) => name === 'tabindex' ? null : this._el.nativeElement.getAttribute(name), setAttr: (name: string, value: string) => this._rndr.setAttribute(this._el.nativeElement, name, value), removeAttr: (name: string) => this._rndr.removeAttribute(this._el.nativeElement, name), setContent: (content: string) => this._el.nativeElement.textContent = content, registerInteractionHandler: (evtType, handler) => this._reg.listen(this._rndr, evtType, handler, this._el), deregisterInteractionHandler: (evtType, handler) => this._reg.unlisten(evtType, handler), notifyIconAction: () => !this._textField?._disabled && this.interact.emit() }; /** @internal */ _foundation: MDCTextFieldIconFoundation | null = new MDCTextFieldIconFoundation(this._mdcAdapter); constructor(private _rndr: Renderer2, public _el: ElementRef, private _reg: MdcEventRegistry) { } ngAfterContentInit() { this._foundation!.init(); } ngOnDestroy() { this._foundation?.destroy(); this._foundation = null; } /** * The `tabindex` for icons defaults to `null` (no tabindex set) for icons without * subscriptions to the `interact` output, and to `0` for icons that have an `interact` * binding. You can override this default, by setting a non-null value for this property. */ @HostBinding('attr.tabindex') @Input() get tabindex() { if (this.interact.observers.length > 0 && this._tabIndex == null && !this._textField?._disabled) return 0; return this._tabIndex; } set tabindex(value) { this._tabIndex = asNumberOrNull(value); } static ngAcceptInputType_tabindex: string | number | null; /** * The `role` attribute for icons defaults to `null` (no role set) for icons without * subscriptions to the `interact` output, and to `button` for icons that have an `interact` * binding. You can override this default, by setting a non-null value for this property. */ @HostBinding('attr.role') @Input() get role() { if (this.interact.observers.length > 0 && this._role == null && !this._textField?._disabled) return 'button'; return this._role; } set role(value) { this._role = value; } } /** * This directive wraps an optional `mdcTextFieldHelperText`. It should be the next sibling of the * associated `mdcTextField` if used. See `mdcTextFieldHelperText` for more info. */ @Directive({ selector: '[mdcTextFieldHelperLine]', }) export class MdcTextFieldHelperLineDirective { /** @internal */ @HostBinding('class.mdc-text-field-helper-line') readonly _cls = true; } @Directive() class MdcTextFieldHelperTextDirectiveBase {} interface MdcTextFieldHelperTextDirectiveBase extends HasId {} applyMixins(MdcTextFieldHelperTextDirectiveBase, [HasId]); /** * Directive for an optional helper-text to show supplemental information or validation * messages for an <code>mdcTextField</code>. This directive should be wrapped inside an * `mdcTextFieldHelperLine` that comes directly after the `mdcTextField` it belongs to. * Additionally, you must export it as an <code>mdcHelperText</code>, and * assign the exported object to the <code>helperText</code> property of the * <code>mdcHelperText</code>. See the examples for hints on how to do this. * * The `mdcTextFieldInput` of the textfield will get `aria-controls` and `aria-describedby` * accessibility attributes that point to the `id` of this helpertext element. If no `id` has * been assigned, a unique `id` attribute will automatically be assigned. If the `id` attribute * is changed, the aria attributes on the `mdcTextFieldInput` will be updated accordingly. */ @Directive({ selector: '[mdcTextFieldHelperText]', exportAs: 'mdcHelperText' }) export class MdcTextFieldHelperTextDirective extends MdcTextFieldHelperTextDirectiveBase implements OnInit, AfterContentInit, OnDestroy { /** @internal */ @HostBinding('class.mdc-text-field-helper-text') readonly _cls = true; private _validation = false; private _persistent = false; /** @internal */ _mdcAdapter: MDCTextFieldHelperTextAdapter = { addClass: (className: string) => this._rndr.addClass(this._elm.nativeElement, className), removeClass: (className: string) => this._rndr.removeClass(this._elm.nativeElement, className), hasClass: (className) => this._elm.nativeElement.classList.contains(className), setAttr: (name: string, value: string) => this._rndr.setAttribute(this._elm.nativeElement, name, value), removeAttr: (name: string) => this._rndr.removeAttribute(this._elm.nativeElement, name), setContent: () => { // helperText content can be set by simply wrapping (dynamic) content in the directive. // this is much more powerful than setContent, because it can also include html markup // therefore there is no reason to do anything with setContent throw new Error("MdcTextFieldHelperTextAdapter.setContent not supported"); } }; /** @internal */ _foundation: MDCTextFieldHelperTextFoundation | null = null; constructor(private _rndr: Renderer2, public _elm: ElementRef) { super(); } ngOnInit() { this.initId(); } ngAfterContentInit() { this._foundation = new MDCTextFieldHelperTextFoundation(this._mdcAdapter); this._foundation.setPersistent(this._persistent); this._foundation.setValidation(this._validation); } ngOnDestroy() { this._foundation?.destroy(); this._foundation = null; } /** * If set to a value other than false, the helper text is treated as a * validation message, and only shown when the input is invalid. */ @Input() set validation(value: boolean) { this._validation = asBoolean(value); if (this._foundation) this._foundation.setValidation(this._validation); } static ngAcceptInputType_validation: boolean | ''; /** * If set to a value other than false, the helper text is always visible. * Otherwise the helper text will only be shown when the input has focus * (or if `validation` is set, when the input is invalid). */ @Input() set persistent(value: boolean) { this._persistent = asBoolean(value); if (this._foundation) this._foundation.setPersistent(this._persistent); } static ngAcceptInputType_persistent: boolean | ''; } /** * Material design text-field. Text fields can be filled or outlined. * * Filled text-fields should have the following child directives: * * `mdcTextFieldIcon` (optional leading icon) * * `mdcTextFieldInput` (required, the native input) * * `mdcTextFieldIcon` (optional trailing icon) * * `mdcFloatingLabel` (optional floating label) * * Outlined text-fields should have the following child directives: * * `mdcTextFieldIcon` (optional leading icon) * * `mdcTextFieldInput` (required, the native input) * * `mdcTextFieldIcon` (optional trailing icon) * * `mdcNotchedOutline` (the outline, which can also contain an optional `mdcFloatingLabel`) * * Addditionally the text-field can be followed by an `mdcTextFieldHelperLine` containing an * `mdcHelperText`. */ @Directive({ selector: '[mdcTextField]', providers: [{provide: AbstractMdcRipple, useExisting: forwardRef(() => MdcTextFieldDirective) }] }) export class MdcTextFieldDirective extends AbstractMdcRipple implements AfterContentInit, OnDestroy { private onDestroy$: Subject<any> = new Subject(); private onInputChange$: Subject<any> = new Subject(); private onHelperTextChange$: Subject<any> = new Subject(); /** @internal */ @HostBinding('class.mdc-text-field') readonly _cls = true; /** @internal */ @ContentChildren(MdcTextFieldIconDirective) _icons?: QueryList<MdcTextFieldIconDirective>; /** @internal */ @ContentChildren(MdcTextFieldInputDirective) _inputs?: QueryList<MdcTextFieldInputDirective>; /** @internal */ @ContentChildren(MdcFloatingLabelDirective, {descendants: true}) _floatingLabels?: QueryList<MdcFloatingLabelDirective>; /** @internal */ @ContentChildren(MdcNotchedOutlineDirective) _outlines?: QueryList<MdcNotchedOutlineDirective>; private _leadingIcon: MdcTextFieldIconDirective | null = null; private _trailingIcon: MdcTextFieldIconDirective | null = null; private _helperText: MdcTextFieldHelperTextDirective | null = null; private _bottomLineElm: HTMLElement | null = null; private _valid: boolean | null = null; private mdcLineRippleAdapter: MDCLineRippleAdapter = { addClass: (className: string) => this.renderer.addClass(this._bottomLineElm, className), removeClass: (className: string) => this.renderer.removeClass(this._bottomLineElm, className), hasClass: (className) => this._bottomLineElm!.classList.contains(className), setStyle: (name: string, value: string) => this.renderer.setStyle(this._bottomLineElm, name, value), registerEventHandler: (evtType, handler) => this.registry.listenElm(this.renderer, evtType, handler, this._bottomLineElm!), deregisterEventHandler: (evtType, handler) => this.registry.unlisten(evtType, handler) }; private mdcAdapter: MDCTextFieldAdapter = { addClass: (className) => this.renderer.addClass(this.root.nativeElement, className), removeClass: (className) => this.renderer.removeClass(this.root.nativeElement, className), hasClass: (className) => this.root.nativeElement.classList.contains(className), registerTextFieldInteractionHandler: (evtType, handler) => { this.registry.listen(this.renderer, evtType, handler, this.root); }, deregisterTextFieldInteractionHandler: (evtType, handler) => { this.registry.unlisten(evtType, handler); }, registerInputInteractionHandler: (evtType, handler) => this._input && this.registry.listen(this.renderer, evtType, handler, this._input._elm), deregisterInputInteractionHandler: (evtType, handler) => this.registry.unlisten(evtType, handler), registerValidationAttributeChangeHandler: (handler: (arg: Array<any>) => void) => { const getAttributesList = (mutationsList: any) => mutationsList .map((mutation: any) => mutation.attributeName) .filter((attrName: string) => attrName); const observer = new MutationObserver((mutationsList) => handler(getAttributesList(mutationsList))); observer.observe(this._input!._elm.nativeElement, {attributes: true}); return observer; }, deregisterValidationAttributeChangeHandler: (observer: MutationObserver) => observer.disconnect(), getNativeInput: () => ({ value: this._input!.value, disabled: this._input!.disabled, maxLength: this._input!._elm.nativeElement.maxLength, type: this._input!.type, validity: { valid: this._valid == null ? this._input!.valid : !!this._valid, badInput: this._input!._isBadInput() } }), isFocused: () => !!this._input && document.activeElement === this._input._elm.nativeElement, shakeLabel: (shouldShake: boolean) => this._floatingLabel?.shake(shouldShake), floatLabel: (shouldFloat: boolean) => this._floatingLabel?.float(shouldFloat), hasLabel: () => !!this._floatingLabel, getLabelWidth: () => this._floatingLabel ? this._floatingLabel.getWidth() : 0, activateLineRipple: () => this.bottomLineFoundation?.activate(), deactivateLineRipple: () => this.bottomLineFoundation?.deactivate(), setLineRippleTransformOrigin: (normalizedX: number) => this.bottomLineFoundation?.setRippleCenter(normalizedX), hasOutline: () => !!this._outline, notchOutline: (labelWidth: number) => this._outline?.open(labelWidth), closeOutline: () => this._outline?.close() }; private bottomLineFoundation: MDCLineRippleFoundation | null = null; private foundation: MDCTextFieldFoundation | null = null; constructor(private renderer: Renderer2, private root: ElementRef, private registry: MdcEventRegistry, @Inject(DOCUMENT) doc: any) { super(root, renderer, registry, doc as Document); } ngAfterContentInit() { merge( this._floatingLabels!.changes, this._icons!.changes, this._inputs!.changes, this._outlines!.changes, this.onHelperTextChange$ ).pipe( takeUntil(this.onDestroy$), debounceTime(1) ).subscribe(() => { this.reconstructComponent(); }); this.initComponent(); } ngOnDestroy() { this.onInputChange$.next(); this.onInputChange$.complete(); this.onDestroy$.next(); this.onDestroy$.complete(); this.onHelperTextChange$.complete(); this.destroyComponent(); } private initComponent() { if (this._input && !this._outline && !this._input._isTextarea()) { this.addRippleSurface('mdc-text-field__ripple', true); this.initRipple(); this.initLineRipple(); } this.attachLabelToInput(); this.attachHelperTextToInput(); this.initIcons(); this.foundation = new MDCTextFieldFoundation(this.mdcAdapter, { helperText: this.helperText?._foundation ? this.helperText._foundation : undefined, leadingIcon: this._leadingIcon?._foundation ? this._leadingIcon._foundation : undefined, trailingIcon: this._trailingIcon?._foundation ? this._trailingIcon._foundation : undefined }); this.foundation.init(); this.subscribeInputChanges(); if (this._helperText) { this._helperText.idChange().pipe( takeUntil(this.onDestroy$), takeUntil(this.onHelperTextChange$) ).subscribe(() => this.attachHelperTextToInput()); } } private destroyComponent() { this.destroyRippleSurface(); this.destroyRipple(); this.destroyLineRipple(); this.foundation?.destroy(); this.foundation = null; } private reconstructComponent() { this.destroyComponent(); this.initComponent(); this.recomputeOutline(); // TODO check if we still need this with latest material-components-web } private initLineRipple() { if (!this._outline) { this._bottomLineElm = this.renderer.createElement('div'); this.renderer.addClass(this._bottomLineElm, 'mdc-line-ripple'); this.renderer.appendChild(this.root.nativeElement, this._bottomLineElm); this.bottomLineFoundation = new MDCLineRippleFoundation(this.mdcLineRippleAdapter); this.bottomLineFoundation.init(); } } private destroyLineRipple() { if (this._bottomLineElm) { this.bottomLineFoundation?.destroy(); this.bottomLineFoundation = null; this.renderer.removeChild(this.root.nativeElement, this._bottomLineElm); this._bottomLineElm = null; } } private recomputeOutline() { if (this._outline) { // the outline may not be valid after re-initialisation, recompute outline when all // style/structural changes have been employed: setTimeout(() => {this.foundation?.notchOutline(this.foundation.shouldFloat); }, 0); } } private initIcons() { this._leadingIcon = this.computeLeadingIcon(); this._trailingIcon = this.computeTrailingIcon(this._leadingIcon); this._icons!.forEach(icon => { icon._textField = this; icon._leading = icon === this._leadingIcon; icon._trailing = icon === this._trailingIcon; }); } private computeLeadingIcon() { if (this._icons!.length > 0) { let icon = this._icons!.first; let prev = this.previousElement(icon._el.nativeElement); let last = icon._el.nativeElement; while (true) { // if it is contained in another element, check the siblings of the container too: if (prev == null && last != null && last.parentElement !== this.root.nativeElement) prev = last.parentElement; // no more elements before, must be the leading icon: if (prev == null) return icon; // comes after the text, so it's not the leading icon: if (this._input && (prev === this._input._elm.nativeElement || prev.contains(this._input._elm.nativeElement))) return null; last = prev; prev = this.previousElement(prev); } } return null; } private computeTrailingIcon(leading: MdcTextFieldIconDirective | null) { if (this._icons!.length > 0) { let icon = this._icons!.last; if (icon === leading) return null; // if not the leading icon, it must be the trailing icon: return icon; } return null; } private previousElement(el: Element): Element { let result = el.previousSibling; while (result != null && !(result instanceof Element)) result = result.previousSibling; return <Element>result; } private attachLabelToInput() { // if the mdcTextField is a LABEL element wrapping the input OR the floatingLabel is NOT a LABEL element, // the input gets an aria-labelledby attaching it to the floatingLabel; // otherwise the floatingLabel gets a 'for' attribute, attaching it to the input: let first = true; const needLabeledBy = this.root.nativeElement.nodeName.toLowerCase() === 'label' || !this._floatingLabel?.isLabelElement(); this._inputs!.forEach(input => { input._labeledBy = (first && needLabeledBy) ? this._floatingLabel?.id || null : null; first = false; }); first = true; this._floatingLabels?.forEach(label => { label.for = (first && !needLabeledBy && this._floatingLabel?.isLabelElement()) ? this._input?.id || null : null; first = false; }); } private attachHelperTextToInput() { let first = true; this._inputs!.forEach(input => { const assign = first ? this._helperText?.id || null : null; input._controls = assign; input._describedBy = assign; first = false; }); } private subscribeInputChanges() { this.onInputChange$.next(); this._input?._valueChange.asObservable().pipe(takeUntil(this.onInputChange$)).subscribe((value) => { this.foundation?.setValue(value!); // value can be null, but null should be accepted by foundation }); } /** @internal */ protected getRippleInteractionElement() { return this._input!._elm; } /** * The <code>valid</code> property provides a way to override the validity checking of the * underlying angular form control or native input. A value of true or false will make the * text-field validity styling based on this value. A value of <code>null</code>, or * <code>undefined</code> will reset the validity styling to the state of the underlying * angular form control or native input. * * For most use cases messing with this input is not be needed. * When the input/textarea is an ngControl, the mdcTextField is already aware of that, * and is already using the 'valid' property of that control. * However, in some specific cases, binding to <code>valid</code> can help. Example: * When you want the mdcTextField to go to 'invalid' state only when the underlying * control is invalid AND that control's value is changed, you can bind as follows: * <code>valid="myControl.valid || !myControl.dirty"</code>. */ @Input() set valid(value: boolean) { if (value == null) { this._valid = null; // reset to null, validity now managed by the input control. this.foundation?.setUseNativeValidation(true); // foundation doesn't change style when we switch to native validation; // trigger possible new style: this.foundation && (<any>this.foundation)['styleValidity_'](this.mdcAdapter.getNativeInput()?.validity.valid); } else if (value !== this._valid) { this._valid = asBoolean(value); this.foundation?.setValid(this._valid); } } static ngAcceptInputType_valid: boolean | ''; /** @internal */ @HostBinding('class.mdc-text-field--textarea') get _textArea(): boolean { return this._input!._isTextarea(); } /** @internal */ @HostBinding('class.mdc-text-field--outlined') get outlined() { return !!this._outline; } /** @internal */ @HostBinding('class.mdc-text-field--no-label') get noLabel() { return !this._floatingLabel; } /** @internal */ @HostBinding('class.mdc-text-field--with-leading-icon') get _leading(): boolean { return !!this._leadingIcon; } /** @internal */ @HostBinding('class.mdc-text-field--with-trailing-icon') get _trailing(): boolean { return !!this._trailingIcon; } /** * Assign an <code>mdcTextFieldHelperText</code> (exported as <code>mdcHelperText</code>) to this * input to add a helper-text or validation message to the text-field. See the examples for hints * on how to do this. */ @Input() get helperText(): MdcTextFieldHelperTextDirective | null { return this._helperText; } set helperText(helperText: MdcTextFieldHelperTextDirective | null) { this._helperText = helperText; this.onHelperTextChange$.next(); } /** @internal */ @HostBinding('class.mdc-text-field--disabled') get _disabled() { // TODO: this mirrors what the text-field can update itself from adapter.getNativeInput // is there a way to trigger the textfield to re-read that when the disabled state of // the input changes? return this._input ? this._input.disabled : false; } private get _input() { return this._inputs?.first; } private get _floatingLabel() { return this._floatingLabels?.first; } private get _outline() { return this._outlines?.first; } } export const TEXT_FIELD_DIRECTIVES = [ MdcTextFieldInputDirective, MdcTextFieldIconDirective, MdcTextFieldHelperLineDirective, MdcTextFieldHelperTextDirective, MdcTextFieldDirective ];
the_stack
import * as ESTree from "estree"; import { Server } from "../tern"; export { }; // #### Context #### interface ContextConstructor { new(defs: any[], parent: Server): Context; } export const Context: ContextConstructor; export interface Context { parent?: Server; topScope: Scope; /** The primitive number type. */ num: Prim & { name: "number" }; /** The primitive string type. */ str: Prim & { name: "string" }; /** The primitive boolean type. */ bool: Prim & { name: "bool" }; } /** Returns the current context object. */ export function cx(): Context; /** Calls f with the current context bound to context. Basically, all code that does something with the inference engine should be wrapped in such a call. */ export function withContext<R>(context: Context, f: () => R): R; // #### Analysis #### /** Parse a piece of code for use by Tern. Will automatically fall back to the error-tolerant parser if the regular parser can’t parse the code. */ export function parse(text: string, options?: {}): ESTree.Program; /** * Analyze a syntax tree. `name` will be used to set the origin of types, properties, and variables produced by this code. * The optional `scope` argument can be used to specify a scope in which the code should be analyzed. * It will default to the top-level scope. */ export function analyze(ast: ESTree.Program, name: string, scope?: Scope): void; /** * Purges the types that have one of the origins given from the context. `start` and `end` can be given to only purge * types that occurred in the source code between those offsets. This is not entirely precise — the state of the * context won’t be back where it was before the file was analyzed — but it prevents most of the * noticeable inaccuracies that re-analysis tends to produce. */ export function purgeTypes(origins: string[], start?: number, end?: number): void; /** * Cleaning up variables is slightly trickier than cleaning up types. This does a first pass over the given scope, * and marks variables defined by the given origins. This is indended to be followed by a call to `analyze` and then a call to `purgeMarkedVariables`. */ export function markVariablesDefinedBy(scope: Scope, origins: string[], start?: number, end?: number): void; /** Purges variables that were marked by a call to markVariablesDefinedBy and not re-defined in the meantime. */ export function purgeMarkedVariables(): void; // #### Types #### interface ObjConstructor { new(proto: object | true | null, name?: string): Obj; } /** Constructor for the type that represents JavaScript objects. `proto` may be another object, or `true` as a short-hand for `Object.prototype`, or `null` for prototype-less objects. */ export const Obj: ObjConstructor; export interface Obj extends IType { /** The name of the type, if any. */ name: string | undefined; /** The prototype of the object, or null. */ proto: (Obj & { name: string }) | null; /** An object mapping the object’s known properties to AVals. Don’t manipulate this directly (ever), only use it if you have to iterate over the properties. */ props: Readonly<{ [key: string]: AVal; }>; /** Looks up the AVal associated with the given property, or returns null if it doesn’t exist. */ hasProp(prop: string): AVal | null; /** Looks up the given property, or defines it if it did not yet exist (in which case it will be associated with the given AST node). */ defProp(prop: string, originNode?: ESTree.Node): AVal; /** * Asks the AVal if it contains an Object type. Useful when * you aren’t interested in other kinds of types. */ getObjType(): Obj; getType(): Obj; /** Get an `AVal` that represents the named property of this type. */ getProp(prop: string): AVal; /** Call the given function for all properties of the object, including properties that are added in the future. */ forAllProps(f: (prop: string, val: AVal, local: boolean) => void): void; gatherProperties(f: (...args: any[]) => void, depth: number): void; } interface FnConstructor { new(name: string | undefined, self: AVal, args: AVal[], argNames: string[], retval: AVal): Fn; } /** Constructor for the type that implements functions. Inherits from `Obj`. The `AVal` types are used to track the input and output types of the function. */ export const Fn: FnConstructor; export interface Fn extends Obj { readonly args?: AVal[]; readonly argNames?: string[]; self?: Type; readonly retval: AVal; /** * Asks the AVal if it contains a function type. Useful when * you aren’t interested in other kinds of types. */ getFunctionType(): Fn; isArrowFn(): boolean; getType(): Fn; } interface PrimConstructor { new(proto: object | null, name?: string): Prim; } export const Prim: PrimConstructor; export interface Prim extends IType { /** The name of the type, if any. */ name: "string" | "bool" | "number"; /** The prototype of the object, or null. */ proto: Obj & { name: string }; /** Get an `AVal` that represents the named property of this type. */ getProp(prop: string): AVal; getType(): Prim; gatherProperties(f: (...args: any[]) => void, depth: number): void; } interface ArrConstructor { /** Constructor that creates an array type with the given content type. */ new(contentType?: AVal): Arr; } export const Arr: ArrConstructor; export interface Arr extends Obj { name: "Array"; getType(): Arr; } interface TypeConstructor { new(): Type; } export const Type: TypeConstructor; export type Type = Obj | Prim; // tslint:disable-next-line: interface-name export interface IType extends ANull { /** The origin file of the type. */ origin: string; /** * The syntax node that defined the type. Only present for object and function types, * and even for those it may be missing (if the type was created by a type definition file, * or synthesized in some other way). */ originNode?: ESTree.Node; /** Return a string that describes the type. maxDepth indicates the depth to which inner types should be shown. */ toString(maxDepth: number): string; /** Queries whether the AVal _currently_ holds the given type. */ hasType(type: Type): boolean; getType(): Type; } // #### Abstract Values #### interface AValConstructor { new(): AVal; } export const AVal: AValConstructor; /** * Abstract values are objects used to represent sets of types. Each variable * and property has an abstract value associated with it, but they are also * used for other purposes, such as tracking the return type of a function, * or building up the type for some kinds of expressions. * * In a cleanly typed program where each thing has only a single type, * abstract values will all have one type associated with them. When, * for example, a variable can hold two different types of values, * the associated abstract value will hold both these types. In some cases, * no type can be assigned to something at all, * in which case the abstract value remains empty. */ export interface AVal extends ANull { /** * Add a type to this abstract value. If the type is already in there, * this is a no-op. weight can be given to give this type a non-default * weight, which is mostly useful when adding a provisionary type that * should be overridden later if a real type is found. The default weight * is 100, and passing a weight lower than that will make the type * assignment “weak”. */ addType(type: Type, weight?: number): void; /** * Sets this AVal to propagate all types it receives to the given * constraint. This is the mechanism by which types are propagated * through the type graph. */ propagate(target: Constraint): void; /** Queries whether the AVal _currently_ holds the given type. */ hasType(type: Type): boolean; /** Queries whether the AVal is empty. */ isEmpty(): boolean; /** * Asks the abstract value for its current type. May return `null` * when there is no type, or conflicting types are present. When * `guess` is true or not given, an empty AVal will try to use * heuristics based on its propagation edges to guess a type. */ getType(guess?: boolean): Type | null; /** * Asks the AVal if it contains a function type. Useful when * you aren’t interested in other kinds of types. */ getFunctionType(): Fn | undefined; /** Get an `AVal` that represents the named property of this type. */ getProp(prop: string): AVal; /** Call the given function for all properties of the object, including properties that are added in the future. */ forAllProps(f: (prop: string, val: AVal, local: boolean) => void): void; /** * Asks the AVal if it contains an Object type. Useful when * you aren’t interested in other kinds of types. */ getObjType(): Obj | null; /** * Abstract values that are used to represent variables * or properties will have, when possible, an `originNode` * property pointing to an AST node. */ gatherProperties(f: (...args: any[]) => void, depth: number): void; originNode?: ESTree.Node; /** An object mapping the object’s known properties to AVals. Don’t manipulate this directly (ever), only use it if you have to iterate over the properties. */ props: Partial<Readonly<{ [key: string]: AVal; }>>; readonly types: Type[]; readonly propertyOf?: Obj; } /** * A variant of AVal used for unknown, dead-end values. Also serves * a prototype for AVals, Types, and Constraints because it * implements 'empty' versions of all the methods that the code expects. */ export const ANull: ANull; export interface ANull { addType(...args: any[]): void; propagate(...args: any[]): void; getProp(...args: any[]): ANull; forAllProps(...args: any[]): void; hasType(...args: any[]): boolean; isEmpty(...args: any[]): boolean; getFunctionType(...args: any[]): ANull | undefined; getObjType(...args: any[]): ANull | undefined | null; getSymbolType(...args: any[]): ANull | undefined; getType(...args: any[]): ANull | undefined | null; gatherProperties(...args: any[]): void; propagatesTo(): any; typeHint(...args: any[]): ANull | undefined | null; propHint(...args: any[]): string | undefined; toString(...args: any[]): string; } // #### Constraints #### interface ConstraintConstructor { new(methods: { [key: string]: any }): { new(): Constraint }; } /** * This is a constructor-constructor for constraints. It’ll create a * constructor with all the given methods copied into its prototype, * which will run its construct method on its arguments when instantiated. */ export const constraint: ConstraintConstructor; export interface Constraint extends ANull { /** May return a type that `getType` can use to “guess” its type based on the fact that it propagates to this constraint. */ typeHint(): Type | undefined; /** May return a string when this constraint is indicative of the presence of a specific property in the source AVal. */ propHint(): string | undefined; } // #### Scopes #### interface ScopeConstructor { new(): Scope; new(parent: Scope, originNode: ESTree.Node): Scope; } export const Scope: ScopeConstructor; export interface Scope extends Obj { /** * Ensures that this scope or some scope above it has a property by the given name * (defining it in the top scope if it is missing), and, if the property doesn’t * already have an `originNode`, assigns the given node to it. */ defVar(name: string, originNode: ESTree.Node): AVal; } // #### Utilities #### /** * Searches the given syntax tree for an expression that ends at the given `end` offset and, * if `start` is given, starts at the given start offset. `scope` can be given to override the * outer scope, which defaults to the context’s top scope. Will return a `{node, state}` * object if successful, where `node` is AST node, and `state` is the scope at that point. * Returns `null` if unsuccessful. */ export function findExpressionAt(ast: ESTree.Program, start: number | undefined, end: number, scope?: Scope): { node: ESTree.Node, state: Scope } | null; /** * Similar to `findExpressionAt`, except that it will return the innermost expression * node that spans the given range, rather than only exact matches. */ export function findExpressionAround(ast: ESTree.Program, start: number | undefined, end: number, scope?: Scope): { node: ESTree.Node, state: Scope } | null; /** Similar to `findExpressionAround`, except that it use the same AST walker as `findExpressionAt`. */ export function findClosestExpression(ast: ESTree.Program, start: number | undefined, end: number, scope?: Scope): { node: ESTree.Node, state: Scope } | null; /** Determine an expression for the given node and scope (as returned by the functions above). Will return an `AVal` or plain `Type`. */ export function expressionType(expr: { node: ESTree.Node, state: Scope | null }): AVal | Type; /** Find the scope at a given position in the syntax tree. The `scope` parameter can be used to override the scope used for code that isn’t wrapped in any function. */ export function scopeAt(ast: ESTree.Program, pos: number, scope?: Scope): Scope; /** * Will traverse the given syntax tree, using `scope` as the starting scope, looking for references to variable `name` that * resolve to scope `refScope`, and call `f` with the node of the reference and its local scope for each of them. */ export function findRefs(ast: ESTree.Program, scope: Scope, name: string, refScope: Scope, f: (Node: ESTree.Node, Scope: Scope) => void): void; /** * Analogous to `findRefs`, but used to look for references to a specific property instead. Whereas `findRefs` * is precise, this is dependent on type inference, and thus can not be relied on to be precise. */ export function findPropRefs(ast: ESTree.Program, scope: Scope, objType: Obj, propName: string, f: (Node: ESTree.Node) => void): void; /** Whenever infer guesses a type through fuzzy heuristics (through `getType` or `expressionType`), it sets a flag. `didGuess` tests whether the guessing flag is set. */ export function didGuess(): boolean; /** Whenever infer guesses a type through fuzzy heuristics (through `getType` or `expressionType`), it sets a flag. `resetGuessing` resets the guessing flag. */ export function resetGuessing(val?: boolean): void;
the_stack
import * as core from '@actions/core' import * as fs from 'fs' import * as gitDirectoryHelper from '../lib/git-directory-helper' import * as io from '@actions/io' import * as path from 'path' import {IGitCommandManager} from '../lib/git-command-manager' const testWorkspace = path.join(__dirname, '_temp', 'git-directory-helper') let repositoryPath: string let repositoryUrl: string let clean: boolean let ref: string let git: IGitCommandManager describe('git-directory-helper tests', () => { beforeAll(async () => { // Clear test workspace await io.rmRF(testWorkspace) }) beforeEach(() => { // Mock error/warning/info/debug jest.spyOn(core, 'error').mockImplementation(jest.fn()) jest.spyOn(core, 'warning').mockImplementation(jest.fn()) jest.spyOn(core, 'info').mockImplementation(jest.fn()) jest.spyOn(core, 'debug').mockImplementation(jest.fn()) }) afterEach(() => { // Unregister mocks jest.restoreAllMocks() }) const cleansWhenCleanTrue = 'cleans when clean true' it(cleansWhenCleanTrue, async () => { // Arrange await setup(cleansWhenCleanTrue) await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files.sort()).toEqual(['.git', 'my-file']) expect(git.tryClean).toHaveBeenCalled() expect(git.tryReset).toHaveBeenCalled() expect(core.warning).not.toHaveBeenCalled() }) const checkoutDetachWhenNotDetached = 'checkout detach when not detached' it(checkoutDetachWhenNotDetached, async () => { // Arrange await setup(checkoutDetachWhenNotDetached) await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files.sort()).toEqual(['.git', 'my-file']) expect(git.checkoutDetach).toHaveBeenCalled() }) const doesNotCheckoutDetachWhenNotAlreadyDetached = 'does not checkout detach when already detached' it(doesNotCheckoutDetachWhenNotAlreadyDetached, async () => { // Arrange await setup(doesNotCheckoutDetachWhenNotAlreadyDetached) await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') const mockIsDetached = git.isDetached as jest.Mock<any, any> mockIsDetached.mockImplementation(async () => { return true }) // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files.sort()).toEqual(['.git', 'my-file']) expect(git.checkoutDetach).not.toHaveBeenCalled() }) const doesNotCleanWhenCleanFalse = 'does not clean when clean false' it(doesNotCleanWhenCleanFalse, async () => { // Arrange await setup(doesNotCleanWhenCleanFalse) clean = false await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files.sort()).toEqual(['.git', 'my-file']) expect(git.isDetached).toHaveBeenCalled() expect(git.branchList).toHaveBeenCalled() expect(core.warning).not.toHaveBeenCalled() expect(git.tryClean).not.toHaveBeenCalled() expect(git.tryReset).not.toHaveBeenCalled() }) const removesContentsWhenCleanFails = 'removes contents when clean fails' it(removesContentsWhenCleanFails, async () => { // Arrange await setup(removesContentsWhenCleanFails) await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') let mockTryClean = git.tryClean as jest.Mock<any, any> mockTryClean.mockImplementation(async () => { return false }) // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files).toHaveLength(0) expect(git.tryClean).toHaveBeenCalled() expect(core.warning).toHaveBeenCalled() expect(git.tryReset).not.toHaveBeenCalled() }) const removesContentsWhenDifferentRepositoryUrl = 'removes contents when different repository url' it(removesContentsWhenDifferentRepositoryUrl, async () => { // Arrange await setup(removesContentsWhenDifferentRepositoryUrl) clean = false await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') const differentRepositoryUrl = 'https://github.com/my-different-org/my-different-repo' // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, differentRepositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files).toHaveLength(0) expect(core.warning).not.toHaveBeenCalled() expect(git.isDetached).not.toHaveBeenCalled() }) const removesContentsWhenNoGitDirectory = 'removes contents when no git directory' it(removesContentsWhenNoGitDirectory, async () => { // Arrange await setup(removesContentsWhenNoGitDirectory) clean = false await io.rmRF(path.join(repositoryPath, '.git')) await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files).toHaveLength(0) expect(core.warning).not.toHaveBeenCalled() expect(git.isDetached).not.toHaveBeenCalled() }) const removesContentsWhenResetFails = 'removes contents when reset fails' it(removesContentsWhenResetFails, async () => { // Arrange await setup(removesContentsWhenResetFails) await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') let mockTryReset = git.tryReset as jest.Mock<any, any> mockTryReset.mockImplementation(async () => { return false }) // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files).toHaveLength(0) expect(git.tryClean).toHaveBeenCalled() expect(git.tryReset).toHaveBeenCalled() expect(core.warning).toHaveBeenCalled() }) const removesContentsWhenUndefinedGitCommandManager = 'removes contents when undefined git command manager' it(removesContentsWhenUndefinedGitCommandManager, async () => { // Arrange await setup(removesContentsWhenUndefinedGitCommandManager) clean = false await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') // Act await gitDirectoryHelper.prepareExistingDirectory( undefined, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files).toHaveLength(0) expect(core.warning).not.toHaveBeenCalled() }) const removesLocalBranches = 'removes local branches' it(removesLocalBranches, async () => { // Arrange await setup(removesLocalBranches) await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') const mockBranchList = git.branchList as jest.Mock<any, any> mockBranchList.mockImplementation(async (remote: boolean) => { return remote ? [] : ['local-branch-1', 'local-branch-2'] }) // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files.sort()).toEqual(['.git', 'my-file']) expect(git.branchDelete).toHaveBeenCalledWith(false, 'local-branch-1') expect(git.branchDelete).toHaveBeenCalledWith(false, 'local-branch-2') }) const removesLockFiles = 'removes lock files' it(removesLockFiles, async () => { // Arrange await setup(removesLockFiles) clean = false await fs.promises.writeFile( path.join(repositoryPath, '.git', 'index.lock'), '' ) await fs.promises.writeFile( path.join(repositoryPath, '.git', 'shallow.lock'), '' ) await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert let files = await fs.promises.readdir(path.join(repositoryPath, '.git')) expect(files).toHaveLength(0) files = await fs.promises.readdir(repositoryPath) expect(files.sort()).toEqual(['.git', 'my-file']) expect(git.isDetached).toHaveBeenCalled() expect(git.branchList).toHaveBeenCalled() expect(core.warning).not.toHaveBeenCalled() expect(git.tryClean).not.toHaveBeenCalled() expect(git.tryReset).not.toHaveBeenCalled() }) const removesAncestorRemoteBranch = 'removes ancestor remote branch' it(removesAncestorRemoteBranch, async () => { // Arrange await setup(removesAncestorRemoteBranch) await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') const mockBranchList = git.branchList as jest.Mock<any, any> mockBranchList.mockImplementation(async (remote: boolean) => { return remote ? ['origin/remote-branch-1', 'origin/remote-branch-2'] : [] }) ref = 'remote-branch-1/conflict' // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files.sort()).toEqual(['.git', 'my-file']) expect(git.branchDelete).toHaveBeenCalledTimes(1) expect(git.branchDelete).toHaveBeenCalledWith( true, 'origin/remote-branch-1' ) }) const removesDescendantRemoteBranches = 'removes descendant remote branch' it(removesDescendantRemoteBranches, async () => { // Arrange await setup(removesDescendantRemoteBranches) await fs.promises.writeFile(path.join(repositoryPath, 'my-file'), '') const mockBranchList = git.branchList as jest.Mock<any, any> mockBranchList.mockImplementation(async (remote: boolean) => { return remote ? ['origin/remote-branch-1/conflict', 'origin/remote-branch-2'] : [] }) ref = 'remote-branch-1' // Act await gitDirectoryHelper.prepareExistingDirectory( git, repositoryPath, repositoryUrl, clean, ref ) // Assert const files = await fs.promises.readdir(repositoryPath) expect(files.sort()).toEqual(['.git', 'my-file']) expect(git.branchDelete).toHaveBeenCalledTimes(1) expect(git.branchDelete).toHaveBeenCalledWith( true, 'origin/remote-branch-1/conflict' ) }) }) async function setup(testName: string): Promise<void> { testName = testName.replace(/[^a-zA-Z0-9_]+/g, '-') // Repository directory repositoryPath = path.join(testWorkspace, testName) await fs.promises.mkdir(path.join(repositoryPath, '.git'), {recursive: true}) // Repository URL repositoryUrl = 'https://github.com/my-org/my-repo' // Clean clean = true // Ref ref = '' // Git command manager git = { branchDelete: jest.fn(), branchExists: jest.fn(), branchList: jest.fn(async () => { return [] }), checkout: jest.fn(), checkoutDetach: jest.fn(), config: jest.fn(), configExists: jest.fn(), fetch: jest.fn(), getDefaultBranch: jest.fn(), getWorkingDirectory: jest.fn(() => repositoryPath), init: jest.fn(), isDetached: jest.fn(), lfsFetch: jest.fn(), lfsInstall: jest.fn(), log1: jest.fn(), remoteAdd: jest.fn(), removeEnvironmentVariable: jest.fn(), revParse: jest.fn(), setEnvironmentVariable: jest.fn(), shaExists: jest.fn(), submoduleForeach: jest.fn(), submoduleSync: jest.fn(), submoduleUpdate: jest.fn(), tagExists: jest.fn(), tryClean: jest.fn(async () => { return true }), tryConfigUnset: jest.fn(), tryDisableAutomaticGarbageCollection: jest.fn(), tryGetFetchUrl: jest.fn(async () => { // Sanity check - this function shouldn't be called when the .git directory doesn't exist await fs.promises.stat(path.join(repositoryPath, '.git')) return repositoryUrl }), tryReset: jest.fn(async () => { return true }) } }
the_stack
import { getInternalReactConstants } from "./utils/getInternalReactConstants"; import { getFiberFlags } from "./utils/getFiberFlags"; import { CONCURRENT_MODE_NUMBER, CONCURRENT_MODE_SYMBOL_STRING, CONTEXT_NUMBER, CONTEXT_SYMBOL_STRING, DEPRECATED_ASYNC_MODE_SYMBOL_STRING, PROFILER_NUMBER, PROFILER_SYMBOL_STRING, PROVIDER_NUMBER, PROVIDER_SYMBOL_STRING, STRICT_MODE_NUMBER, STRICT_MODE_SYMBOL_STRING, } from "./utils/constants.js"; import { ReactInternals, Fiber, NativeType } from "../types"; import { ElementTypeClass, ElementTypeFunction, ElementTypeForwardRef, ElementTypeMemo, ElementTypeProvider, ElementTypeConsumer, ElementTypeHostRoot, ElementTypeHostComponent, ElementTypeSuspense, ElementTypeSuspenseList, ElementTypeProfiler, ElementTypeOtherOrUnknown, } from "../../common/constants"; export type CoreApi = ReturnType<typeof createIntegrationCore>; export function createIntegrationCore(renderer: ReactInternals) { // Newer versions of the reconciler package also specific reconciler version. // If that version number is present, use it. // Third party renderer versions may not match the reconciler version, // and the latter is what's important in terms of tags and symbols. const version = renderer.reconcilerVersion || renderer.version || ""; const { getDisplayNameForFiber, getTypeSymbol, ReactPriorityLevels, ReactTypeOfWork, ReactTypeOfSideEffect, } = getInternalReactConstants(version); const { PerformedWork } = ReactTypeOfSideEffect; const { ClassComponent, DehydratedSuspenseComponent, ForwardRef, Fragment, FunctionComponent, HostRoot, HostPortal, HostComponent, HostText, IncompleteClassComponent, IndeterminateComponent, LegacyHiddenComponent, MemoComponent, OffscreenComponent, SimpleMemoComponent, SuspenseComponent, SuspenseListComponent, ContextConsumer, LazyComponent, } = ReactTypeOfWork; // Map of one or more Fibers in a pair to their unique id number. // We track both Fibers to support Fast Refresh, // which may forcefully replace one of the pair as part of hot reloading. // In that case it's still important to be able to locate the previous ID during subsequent renders. const fiberToId = new Map<Fiber, number>(); let fiberIdSeed = 0; // Map of id to one (arbitrary) Fiber in a pair. // This Map is used to e.g. get the display name for a Fiber or schedule an update, // operations that should be the same whether the current and work-in-progress Fiber is used. const idToArbitraryFiber = new Map<number, Fiber>(); // Map for fiber type let typeIdSeed = 1; const fiberTypeId = new WeakMap<any, number>(); const fiberTypeIdNonWeakRef = new Map<symbol, Record<string, any>>(); // Roots don't have a real persistent identity. // A root's "pseudo key" is "childDisplayName:indexWithThatName". // For example, "App:0" or, in case of similar roots, "Story:0", "Story:1", etc. // We will use this to try to disambiguate roots when restoring selection between reloads. const rootPseudoKeys = new Map(); const rootDisplayNameCounter = new Map(); // NOTICE Keep in sync with get*ForFiber methods function shouldFilterFiber(fiber: Fiber) { switch (fiber.tag) { case DehydratedSuspenseComponent: // TODO: ideally we would show dehydrated Suspense immediately. // However, it has some special behavior (like disconnecting // an alternate and turning into real Suspense) which breaks DevTools. // For now, ignore it, and only show it once it gets hydrated. // https://github.com/bvaughn/react-devtools-experimental/issues/197 return true; case HostPortal: case HostComponent: case HostText: case Fragment: case LegacyHiddenComponent: case OffscreenComponent: return true; case HostRoot: // It is never valid to filter the root element. return false; default: switch (getTypeSymbol(fiber.type)) { case CONCURRENT_MODE_NUMBER: case CONCURRENT_MODE_SYMBOL_STRING: case DEPRECATED_ASYNC_MODE_SYMBOL_STRING: case STRICT_MODE_NUMBER: case STRICT_MODE_SYMBOL_STRING: return true; default: return false; } } } // NOTICE Keep in sync with shouldFilterFiber() and other get*ForFiber methods function getElementTypeForFiber(fiber: Fiber) { switch (fiber.tag) { case ClassComponent: case IncompleteClassComponent: return ElementTypeClass; case FunctionComponent: case IndeterminateComponent: return ElementTypeFunction; case MemoComponent: case SimpleMemoComponent: return ElementTypeMemo; case ForwardRef: return ElementTypeForwardRef; case HostRoot: return ElementTypeHostRoot; case HostComponent: return ElementTypeHostComponent; case SuspenseComponent: return ElementTypeSuspense; case SuspenseListComponent: return ElementTypeSuspenseList; case HostPortal: case HostText: case Fragment: return ElementTypeOtherOrUnknown; default: switch (getTypeSymbol(fiber.type)) { case PROVIDER_NUMBER: case PROVIDER_SYMBOL_STRING: return ElementTypeProvider; case CONTEXT_NUMBER: case CONTEXT_SYMBOL_STRING: return ElementTypeConsumer; case PROFILER_NUMBER: case PROFILER_SYMBOL_STRING: return ElementTypeProfiler; case CONCURRENT_MODE_NUMBER: case CONCURRENT_MODE_SYMBOL_STRING: case DEPRECATED_ASYNC_MODE_SYMBOL_STRING: case STRICT_MODE_NUMBER: case STRICT_MODE_SYMBOL_STRING: default: return ElementTypeOtherOrUnknown; } } } function getFiberTypeId(type: any): number { if (type === null) { return 0; } if (typeof type !== "object" && typeof type !== "function") { const replacement = fiberTypeIdNonWeakRef.get(type); if (replacement === undefined) { fiberTypeIdNonWeakRef.set(type, (type = {})); } else { type = replacement; } } let typeId = fiberTypeId.get(type); if (typeId === undefined) { fiberTypeId.set(type, (typeId = typeIdSeed++)); } return typeId; } function getDisplayNameForRoot(fiber: Fiber) { let preferredDisplayName = null; let fallbackDisplayName = null; let child = fiber.child; // Go at most three levels deep into direct children // while searching for a child that has a displayName. for (let i = 0; i < 3; i++) { if (child === null) { break; } const displayName = getDisplayNameForFiber(child); if (displayName !== null) { // Prefer display names that we get from user-defined components. // We want to avoid using e.g. 'Suspense' unless we find nothing else. if (typeof child.type === "function") { // There's a few user-defined tags, but we'll prefer the ones // that are usually explicitly named (function or class components). preferredDisplayName = displayName; } else if (fallbackDisplayName === null) { fallbackDisplayName = displayName; } } if (preferredDisplayName !== null) { break; } child = child.child; } return preferredDisplayName || fallbackDisplayName || "Anonymous"; } // Returns the unique ID for a Fiber or generates and caches a new one if the Fiber hasn't been seen before. // Once this method has been called for a Fiber, untrackFiber() should always be called later to avoid leaking. function getOrGenerateFiberId(fiber: Fiber) { let id: number | undefined; const { alternate } = fiber; if (fiberToId.has(fiber)) { id = fiberToId.get(fiber); } else { if (alternate !== null && fiberToId.has(alternate)) { id = fiberToId.get(alternate); } } if (typeof id === "undefined") { id = ++fiberIdSeed; } // Make sure we're tracking this Fiber // e.g. if it just mounted or an error was logged during initial render. if (!fiberToId.has(fiber)) { fiberToId.set(fiber, id); idToArbitraryFiber.set(id, fiber); } // Also make sure we're tracking its alternate, // e.g. in case this is the first update after mount. if (alternate !== null && !fiberToId.has(alternate)) { fiberToId.set(alternate, id); } return id; } // Returns an ID if one has already been generated for the Fiber or null if one has not been generated. // Use this method while e.g. logging to avoid over-retaining Fibers. function getFiberIdUnsafe(fiber: Fiber) { if (fiberToId.has(fiber)) { return fiberToId.get(fiber) || null; } const { alternate } = fiber; if (alternate !== null && fiberToId.has(alternate)) { return fiberToId.get(alternate) || null; } return null; } // Returns an ID if one has already been generated for the Fiber or throws. function getFiberIdThrows(fiber: Fiber) { const id = getFiberIdUnsafe(fiber); if (id === null) { throw Error( `Could not find ID for Fiber "${getDisplayNameForFiber(fiber) || ""}"` ); } return id; } function getFiberOwnerId(fiber: Fiber): number { const { _debugOwner = null } = fiber; if (_debugOwner !== null) { // Ideally we should call getFiberIDThrows() for _debugOwner, // since owners are almost always higher in the tree (and so have already been processed), // but in some (rare) instances reported in open source, a descendant mounts before an owner. // Since this is a DEV only field it's probably okay to also just lazily generate and ID here if needed. // See https://github.com/facebook/react/issues/21445 return getOrGenerateFiberId(_debugOwner); } const { return: parentFiber = null } = fiber; if (parentFiber?._debugOwner) { if ( parentFiber.tag === ForwardRef || parentFiber.tag === MemoComponent || parentFiber.tag === LazyComponent ) { return getFiberOwnerId(parentFiber); } } return -1; } function getFiberById(id: number) { return idToArbitraryFiber.get(id) || null; } function findFiberByHostInstance(hostInstance: NativeType) { return renderer.findFiberByHostInstance(hostInstance); } function removeFiber(fiber: Fiber) { idToArbitraryFiber.delete(getFiberIdUnsafe(fiber) as number); fiberToId.delete(fiber); fiberToId.delete(fiber.alternate as Fiber); } function isFiberRoot(fiber: Fiber) { return fiber.tag === HostRoot; } function setRootPseudoKey(id: number, fiber: Fiber) { const name = getDisplayNameForRoot(fiber); const counter = rootDisplayNameCounter.get(name) || 0; const pseudoKey = `${name}:${counter}`; rootDisplayNameCounter.set(name, counter + 1); rootPseudoKeys.set(id, pseudoKey); } function getRootPseudoKey(id: number) { return rootPseudoKeys.get(id) || null; } function removeRootPseudoKey(id: number) { const pseudoKey = rootPseudoKeys.get(id); if (pseudoKey === undefined) { throw new Error("Expected root pseudo key to be known."); } const name = pseudoKey.substring(0, pseudoKey.lastIndexOf(":")); const counter = rootDisplayNameCounter.get(name); if (counter === undefined) { throw new Error("Expected counter to be known."); } if (counter > 1) { rootDisplayNameCounter.set(name, counter - 1); } else { rootDisplayNameCounter.delete(name); } rootPseudoKeys.delete(id); } function didFiberRender(prevFiber: Fiber, nextFiber: Fiber) { switch (nextFiber.tag) { case ClassComponent: case FunctionComponent: case ContextConsumer: case MemoComponent: case SimpleMemoComponent: // For types that execute user code, we check PerformedWork effect. // We don't reflect bailouts (either referential or sCU) in DevTools. return (getFiberFlags(nextFiber) & PerformedWork) === PerformedWork; // Note: ContextConsumer only gets PerformedWork effect in 16.3.3+ // so it won't get highlighted with React 16.3.0 to 16.3.2. default: // For host components and other types, we compare inputs // to determine whether something is an update. return ( prevFiber.memoizedProps !== nextFiber.memoizedProps || prevFiber.memoizedState !== nextFiber.memoizedState || prevFiber.ref !== nextFiber.ref ); } } return { ReactTypeOfSideEffect, ReactTypeOfWork, ReactPriorityLevels, getElementTypeForFiber, getFiberTypeId, getOrGenerateFiberId, getFiberIdThrows, getFiberIdUnsafe, getFiberById, getFiberOwnerId, removeFiber, getDisplayNameForFiber, getDisplayNameForRoot, isFiberRoot, setRootPseudoKey, getRootPseudoKey, removeRootPseudoKey, didFiberRender, shouldFilterFiber, findFiberByHostInstance, }; }
the_stack
import * as Sequelize from 'sequelize'; import * as Bluebird from 'bluebird'; import { AbstractDialect } from './abstract'; import { MateriaError } from '../../error'; export class SqliteDialect extends AbstractDialect { constructor(sequelize) { super(sequelize); } define(entityName, cols, defOptions) { for (const colName in cols) { if (cols[colName]) { const col = cols[colName]; if (col && col.defaultValue && col.defaultValue === Sequelize.NOW && col.type === 'date') { col.defaultValue = Sequelize.literal('CURRENT_TIMESTAMP'); } } } return super.define(entityName, cols, defOptions); } showTables(): Bluebird<any> { const promises = []; return this.sequelize.getQueryInterface().showAllTables().then((tables: Array<string>) => { tables.forEach(table => { const queryInterface = this.sequelize.getQueryInterface(); const qg: any = this.sequelize.getQueryInterface().QueryGenerator; const infoQuery = queryInterface.describeTable(table); const indexQuery = queryInterface.showIndex(table); const fkQuery = qg.getForeignKeysQuery(table, 'public'); // const fkQuery = queryInterface.getForeignKeysForTables([table]); // getForeignKeysForTables not working: // https://github.com/sequelize/sequelize/issues/5748 const aiQuery = this.sequelize.query( `SELECT 1 as name FROM sqlite_master WHERE type = 'table' AND name = ? AND sql LIKE '%AUTOINCREMENT%'`, { replacements: [table], raw: true, plain: true }); promises.push(infoQuery); promises.push(indexQuery); // promises.push(fkQuery); promises.push(this.sequelize.query(fkQuery)); promises.push(aiQuery); }); return Promise.all(promises).then((result) => { const res = {}; tables.forEach((table, i) => { const info = result[i * 4]; const indexes = result[i * 4 + 1]; const fks = result[i * 4 + 2]; const hasAi = result[i * 4 + 3]; const fields = []; for (const name in info) { if (info[name]) { info[name].name = name; fields.push(info[name]); } } for (const field of fields) { for (const index of indexes) { for (const ind of index.fields) { if (ind.attribute == field.name) { field.primaryKey = field.primaryKey || index.primary || index.origin == 'pk'; if (index.fields.length > 1 || index.origin == 'c') { field.unique = index.name; } else { field.unique = field.unique || index.unique; } } } } if (field.primaryKey) { if (hasAi) { field.autoIncrement = true; } if (field.type == 'INTEGER') { field.allowNull = false; } } if (field.type == 'TINYINT(1)') { field.type = ['BOOLEAN', 'TINYINT(1)']; if (field.defaultValue === 0) { field.defaultValue = [0, 'false']; } if (field.defaultValue === 1) { field.defaultValue = [1, 'true']; } } for (const fk of fks) { if (field.name == fk.from) { if (fk.table.substr(0, 1) == '"') { fk.table = fk.table.substr(1, fk.table.length - 2); } if (fk.to.substr(0, 1) == '"') { fk.to = fk.to.substr(1, fk.to.length - 2); } field.fk = { entity: fk.table, field: fk.to }; field.onUpdate = fk.on_update && fk.on_update.toUpperCase(); field.onDelete = fk.on_delete && fk.on_delete.toUpperCase(); } } field.autoIncrement = field.autoIncrement || false; // not undefined } res[table] = fields; }); return res; }).catch((e) => { const err = new MateriaError('Error when scanning database'); err['originalError'] = e; throw err; }); }); } private _backupTmpTable(table) { const queryInterface = this.sequelize.getQueryInterface(); const qg: any = this.sequelize.getQueryInterface().QueryGenerator; const infoQuery = queryInterface.describeTable(table); const indexQuery = queryInterface.showIndex(table); const fkQuery = this.sequelize.query(qg.getForeignKeysQuery(table, 'public')); const aiQuery = this.sequelize.query( `SELECT 1 as name FROM sqlite_master WHERE type = 'table' AND name = ? AND sql LIKE '%AUTOINCREMENT%'`, { replacements: [table], raw: true, plain: true } ); const promises = [infoQuery, indexQuery, fkQuery, aiQuery]; return Promise.all(promises).then((results) => { const fields = results[0]; const indexes: any = results[1]; const fks: any = results[2]; const hasAi = results[3]; const uniqueKeys = []; for (const field_name in fields) { if (fields[field_name]) { const field = fields[field_name]; for (const index of indexes) { for (const ind of index.fields) { if (ind.attribute == field_name) { field.primaryKey = field.primaryKey || index.primary || index.origin == 'pk'; if (index.fields.length == 1) { field.unique = field.unique || index.unique; } } } } if (field.primaryKey) { if (hasAi) { field.autoIncrement = true; } if (field.type == 'INTEGER') { field.allowNull = false; } } for (const fk of fks) { if (field_name == fk.from) { if (fk.table.substr(0, 1) == '"') { fk.table = fk.table.substr(1, fk.table.length - 2); } if (fk.to.substr(0, 1) == '"') { fk.to = fk.to.substr(1, fk.to.length - 2); } field.references = { model: fk.table, key: fk.to }; field.onUpdate = fk.on_update && fk.on_update.toUpperCase(); field.onDelete = fk.on_delete && fk.on_delete.toUpperCase(); } } } } for (const index of indexes) { if (index.fields.length > 1) { const uniqueFields = []; for (const ind of index.fields) { uniqueFields.push(ind.attribute); } uniqueKeys.push({fields: uniqueFields, name: index.name, origin: index.origin, type: 'UNIQUE'}); } } // keep uniqueKeys / fields const tableData = { attributes: fields, options: {uniqueKeys: uniqueKeys}, done: null, rename: null }; const quotedTableName = queryInterface.quoteTable(table); const quotedBackupTableName = queryInterface.quoteTable(table + '_materia_backup'); tableData.done = () => { const attributeNames = Object.keys(tableData.attributes).map(attr => qg.quoteIdentifier(attr)).join(', '); let attributesNameImport; if (tableData.rename) { attributesNameImport = Object.keys(tableData.attributes).map((attr) => { if (attr == tableData.rename.after) { return queryInterface.quoteIdentifier(tableData.rename.before, false) + ' AS ' + queryInterface.quoteIdentifier(attr, false); } else { return queryInterface.quoteIdentifier(attr, false); } }).join(', '); } else { attributesNameImport = attributeNames; } const attributesSql = qg.attributesToSQL(tableData.attributes); const subQueries = [ () => this.sequelize.transaction(t => { const transactionQueries = [ `PRAGMA foreign_keys = 0;`, `PRAGMA defer_foreign_keys = 1;`, `DROP TABLE IF EXISTS ${quotedBackupTableName};`, `CREATE TEMPORARY TABLE ${quotedBackupTableName} AS SELECT * FROM ${quotedTableName};`, `DROP TABLE ${quotedTableName};`, qg.createTableQuery(table, attributesSql, tableData.options), `INSERT INTO ${quotedTableName} SELECT ${attributesNameImport} FROM ${quotedBackupTableName};`, `DROP TABLE ${quotedBackupTableName};` ].map(query => { return () => this.sequelize.query(query, {raw: true, transaction: t}); }); let promise: Promise<any> = Promise.resolve(); for (const query of transactionQueries) { promise = promise.then(() => { return query(); }); } return promise; }) ]; for (const uniq of tableData.options.uniqueKeys) { if (uniq.origin == 'c') { subQueries.push(() => queryInterface.addIndex(table, uniq)); } } let p = Promise.resolve(); for (const query of subQueries) { p = p.then(() => { return query(); }); } return p; }; return Promise.resolve(tableData); }); } addColumn(table, column_name, attributes): any { if (attributes.defaultValue === Sequelize.NOW) { attributes.defaultValue = new Date(); } if (attributes.references && attributes.allowNull === false) { // Adding a not null reference: // http://stackoverflow.com/questions/24524153/adding-not-null-column-to-sqlite-table-with-references/24524935#24524935 const queries = []; queries.push(this.sequelize.query('PRAGMA foreign_keys = 0;')); queries.push(super.addColumn(table, column_name, attributes)); queries.push(this.sequelize.query('PRAGMA foreign_keys = 1;')); return queries.reduce((p, f) => p.then(f), Promise.resolve()); } return super.addColumn(table, column_name, attributes); } changeColumn(table, column_name, attributes) { return this._backupTmpTable(table).then((tableData) => { for (const k in attributes) { if (attributes[k]) { tableData.attributes[column_name][k] = attributes[k]; } } tableData.attributes[column_name].type = (this.sequelize as any).normalizeDataType(tableData.attributes[column_name].type); if (tableData.attributes[column_name].default === false) { delete tableData.attributes[column_name].defaultValue; } return tableData.done(); }); } removeColumn(table, column_name): any { return this._backupTmpTable(table).then((tableData) => { delete tableData.attributes[column_name]; return tableData.done(); }); } renameColumn(table, column_name, column_new_name): any { return this._backupTmpTable(table).then((tableData) => { tableData.attributes[column_new_name] = tableData.attributes[column_name]; delete tableData.attributes[column_name]; tableData.rename = {before: column_name, after: column_new_name}; return tableData.done(); }); } addConstraint(table, constraint) { return this._backupTmpTable(table).then((tableData) => { if (constraint.type == 'primary') { for (const field of constraint.fields) { tableData.attributes[field].primaryKey = true; } } else if (constraint.type == 'unique') { if (constraint.fields.length == 1) { tableData.attributes[constraint.fields[0]].unique = true; } else { tableData.options.uniqueKeys.push({ fields: constraint.fields, name: constraint.name, type: 'UNIQUE', origin: 'c' }); } } return tableData.done(); }); } dropConstraint(table, constraint) { return this._backupTmpTable(table).then((tableData) => { let changed = false; if (constraint.name) { let uniqGroupFields; // drop custom index and keep group info tableData.options.uniqueKeys.forEach((uniq, j) => { if ( ! changed && uniq.indexName == constraint.name && uniq.origin == 'c') { tableData.options.uniqueKeys.splice(j, 1); uniqGroupFields = uniq.fields.join(','); changed = true; } }); // drop same index created by unique or primary tableData.options.uniqueKeys.forEach((uniq, j) => { if ( ! changed && uniqGroupFields == uniq.fields.join(',') && uniq.origin != 'c' ) { tableData.options.uniqueKeys.splice(j, 1); changed = true; } }); } else if (constraint.field) { if (constraint.type == 'unique') { // drop field constraint from groups tableData.options.uniqueKeys.forEach((uniq, j) => { for (const i in uniq.fields) { if (uniq.fields[i]) { const field = uniq.fields[i]; if (field == constraint.field) { uniq.fields.splice(i, 1); changed = true; break; } } } if (uniq.fields.length == 0) { tableData.options.uniqueKeys.splice(j, 1); } }); } // drop constraint in field for (const field_name in tableData.attributes) { if (tableData.attributes[field_name]) { const field = tableData.attributes[field_name]; if (constraint.type == 'primary') { if (field.primaryKey || field.unique) { field.primaryKey = false; field.unique = false; changed = true; } } else if (constraint.field == field_name) { if (constraint.type == 'unique' && field.unique) { field.unique = false; changed = true; } else if (constraint.type == 'references' && field.references) { delete field.references; changed = true; } } } } } if ( ! changed) { return Promise.resolve(); } return tableData.done(); }); } authenticate() { return this.sequelize.authenticate().then(() => { this.sequelize.query('PRAGMA foreign_keys = 1;', {raw: true}); }); } }
the_stack
import * as coreClient from "@azure/core-client"; export type BackupPolicyUnion = | BackupPolicy | PeriodicModeBackupPolicy | ContinuousModeBackupPolicy; /** Identity for the resource. */ export interface ManagedServiceIdentity { /** * The principal id of the system assigned identity. This property will only be provided for a system assigned identity. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly principalId?: string; /** * The tenant id of the system assigned identity. This property will only be provided for a system assigned identity. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly tenantId?: string; /** The type of identity used for the resource. The type 'SystemAssigned,UserAssigned' includes both an implicitly created identity and a set of user assigned identities. The type 'None' will remove any identities from the service. */ type?: ResourceIdentityType; /** The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. */ userAssignedIdentities?: { [propertyName: string]: Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties; }; } export interface Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties { /** * The principal id of user assigned identity. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly principalId?: string; /** * The client id of user assigned identity. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly clientId?: string; } /** IpAddressOrRange object */ export interface IpAddressOrRange { /** A single IPv4 address or a single IPv4 address range in CIDR format. Provided IPs must be well-formatted and cannot be contained in one of the following ranges: 10.0.0.0/8, 100.64.0.0/10, 172.16.0.0/12, 192.168.0.0/16, since these are not enforceable by the IP address filter. Example of valid inputs: “23.40.210.245” or “23.40.210.0/8”. */ ipAddressOrRange?: string; } /** The consistency policy for the Cosmos DB database account. */ export interface ConsistencyPolicy { /** The default consistency level and configuration settings of the Cosmos DB account. */ defaultConsistencyLevel: DefaultConsistencyLevel; /** When used with the Bounded Staleness consistency level, this value represents the number of stale requests tolerated. Accepted range for this value is 1 – 2,147,483,647. Required when defaultConsistencyPolicy is set to 'BoundedStaleness'. */ maxStalenessPrefix?: number; /** When used with the Bounded Staleness consistency level, this value represents the time amount of staleness (in seconds) tolerated. Accepted range for this value is 5 - 86400. Required when defaultConsistencyPolicy is set to 'BoundedStaleness'. */ maxIntervalInSeconds?: number; } /** Cosmos DB capability object */ export interface Capability { /** Name of the Cosmos DB capability. For example, "name": "EnableCassandra". Current values also include "EnableTable" and "EnableGremlin". */ name?: string; } /** A region in which the Azure Cosmos DB database account is deployed. */ export interface Location { /** * The unique identifier of the region within the database account. Example: &lt;accountName&gt;-&lt;locationName&gt;. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** The name of the region. */ locationName?: string; /** * The connection endpoint for the specific region. Example: https://&lt;accountName&gt;-&lt;locationName&gt;.documents.azure.com:443/ * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly documentEndpoint?: string; /** * The status of the Cosmos DB account at the time the operation was called. The status can be one of following. 'Creating' – the Cosmos DB account is being created. When an account is in Creating state, only properties that are specified as input for the Create Cosmos DB account operation are returned. 'Succeeded' – the Cosmos DB account is active for use. 'Updating' – the Cosmos DB account is being updated. 'Deleting' – the Cosmos DB account is being deleted. 'Failed' – the Cosmos DB account failed creation. 'DeletionFailed' – the Cosmos DB account deletion failed. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly provisioningState?: string; /** The failover priority of the region. A failover priority of 0 indicates a write region. The maximum value for a failover priority = (total number of regions - 1). Failover priority values must be unique for each of the regions in which the database account exists. */ failoverPriority?: number; /** Flag to indicate whether or not this region is an AvailabilityZone region */ isZoneRedundant?: boolean; } /** The failover policy for a given region of a database account. */ export interface FailoverPolicy { /** * The unique identifier of the region in which the database account replicates to. Example: &lt;accountName&gt;-&lt;locationName&gt;. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** The name of the region in which the database account exists. */ locationName?: string; /** The failover priority of the region. A failover priority of 0 indicates a write region. The maximum value for a failover priority = (total number of regions - 1). Failover priority values must be unique for each of the regions in which the database account exists. */ failoverPriority?: number; } /** Virtual Network ACL Rule object */ export interface VirtualNetworkRule { /** Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}. */ id?: string; /** Create firewall rule before the virtual network has vnet service endpoint enabled. */ ignoreMissingVNetServiceEndpoint?: boolean; } /** Private endpoint which the connection belongs to. */ export interface PrivateEndpointProperty { /** Resource id of the private endpoint. */ id?: string; } /** Connection State of the Private Endpoint Connection. */ export interface PrivateLinkServiceConnectionStateProperty { /** The private link service connection status. */ status?: string; /** The private link service connection description. */ description?: string; /** * Any action that is required beyond basic workflow (approve/ reject/ disconnect) * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly actionsRequired?: string; } /** Common fields that are returned in the response for all Azure Resource Manager resources */ export interface Resource { /** * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The name of the resource * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; } export interface ApiProperties { /** Describes the ServerVersion of an a MongoDB account. */ serverVersion?: ServerVersion; } /** Analytical storage specific properties. */ export interface AnalyticalStorageConfiguration { /** Describes the types of schema for analytical storage. */ schemaType?: AnalyticalStorageSchemaType; } /** Parameters to indicate the information about the restore. */ export interface RestoreParameters { /** Describes the mode of the restore. */ restoreMode?: RestoreMode; /** The id of the restorable database account from which the restore has to be initiated. For example: /subscriptions/{subscriptionId}/providers/Microsoft.DocumentDB/locations/{location}/restorableDatabaseAccounts/{restorableDatabaseAccountName} */ restoreSource?: string; /** Time to which the account has to be restored (ISO-8601 format). */ restoreTimestampInUtc?: Date; /** List of specific databases available for restore. */ databasesToRestore?: DatabaseRestoreResource[]; } /** Specific Databases to restore. */ export interface DatabaseRestoreResource { /** The name of the database available for restore. */ databaseName?: string; /** The names of the collections available for restore. */ collectionNames?: string[]; } /** The object representing the policy for taking backups on an account. */ export interface BackupPolicy { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Periodic" | "Continuous"; /** The object representing the state of the migration between the backup policies. */ migrationState?: BackupPolicyMigrationState; } /** The object representing the state of the migration between the backup policies. */ export interface BackupPolicyMigrationState { /** Describes the status of migration between backup policy types. */ status?: BackupPolicyMigrationStatus; /** Describes the target backup policy type of the backup policy migration. */ targetType?: BackupPolicyType; /** Time at which the backup policy migration started (ISO-8601 format). */ startTime?: Date; } /** The CORS policy for the Cosmos DB database account. */ export interface CorsPolicy { /** The origin domains that are permitted to make a request against the service via CORS. */ allowedOrigins: string; /** The methods (HTTP request verbs) that the origin domain may use for a CORS request. */ allowedMethods?: string; /** The request headers that the origin domain may specify on the CORS request. */ allowedHeaders?: string; /** The response headers that may be sent in the response to the CORS request and exposed by the browser to the request issuer. */ exposedHeaders?: string; /** The maximum amount time that a browser should cache the preflight OPTIONS request. */ maxAgeInSeconds?: number; } /** The object that represents all properties related to capacity enforcement on an account. */ export interface Capacity { /** The total throughput limit imposed on the account. A totalThroughputLimit of 2000 imposes a strict limit of max throughput that can be provisioned on that account to be 2000. A totalThroughputLimit of -1 indicates no limits on provisioning of throughput. */ totalThroughputLimit?: number; } /** Metadata pertaining to creation and last modification of the resource. */ export interface SystemData { /** The identity that created the resource. */ createdBy?: string; /** The type of identity that created the resource. */ createdByType?: CreatedByType; /** The timestamp of resource creation (UTC). */ createdAt?: Date; /** The identity that last modified the resource. */ lastModifiedBy?: string; /** The type of identity that last modified the resource. */ lastModifiedByType?: CreatedByType; /** The timestamp of resource last modification (UTC) */ lastModifiedAt?: Date; } /** The core properties of ARM resources. */ export interface ARMResourceProperties { /** * The unique resource identifier of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The name of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * The type of Azure resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** The location of the resource group to which the resource belongs. */ location?: string; /** Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB". */ tags?: { [propertyName: string]: string }; } /** Parameters for patching Azure Cosmos DB database account properties. */ export interface DatabaseAccountUpdateParameters { /** Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB". */ tags?: { [propertyName: string]: string }; /** The location of the resource group to which the resource belongs. */ location?: string; /** Identity for the resource. */ identity?: ManagedServiceIdentity; /** The consistency policy for the Cosmos DB account. */ consistencyPolicy?: ConsistencyPolicy; /** An array that contains the georeplication locations enabled for the Cosmos DB account. */ locations?: Location[]; /** List of IpRules. */ ipRules?: IpAddressOrRange[]; /** Flag to indicate whether to enable/disable Virtual Network ACL rules. */ isVirtualNetworkFilterEnabled?: boolean; /** Enables automatic failover of the write region in the rare event that the region is unavailable due to an outage. Automatic failover will result in a new write region for the account and is chosen based on the failover priorities configured for the account. */ enableAutomaticFailover?: boolean; /** List of Cosmos DB capabilities for the account */ capabilities?: Capability[]; /** List of Virtual Network ACL rules configured for the Cosmos DB account. */ virtualNetworkRules?: VirtualNetworkRule[]; /** Enables the account to write in multiple locations */ enableMultipleWriteLocations?: boolean; /** Enables the cassandra connector on the Cosmos DB C* account */ enableCassandraConnector?: boolean; /** The cassandra connector offer type for the Cosmos DB database C* account. */ connectorOffer?: ConnectorOffer; /** Disable write operations on metadata resources (databases, containers, throughput) via account keys */ disableKeyBasedMetadataWriteAccess?: boolean; /** The URI of the key vault */ keyVaultKeyUri?: string; /** The default identity for accessing key vault used in features like customer managed keys. The default identity needs to be explicitly set by the users. It can be "FirstPartyIdentity", "SystemAssignedIdentity" and more. */ defaultIdentity?: string; /** Whether requests from Public Network are allowed */ publicNetworkAccess?: PublicNetworkAccess; /** Flag to indicate whether Free Tier is enabled. */ enableFreeTier?: boolean; /** API specific properties. Currently, supported only for MongoDB API. */ apiProperties?: ApiProperties; /** Flag to indicate whether to enable storage analytics. */ enableAnalyticalStorage?: boolean; /** Analytical storage specific properties. */ analyticalStorageConfiguration?: AnalyticalStorageConfiguration; /** The object representing the policy for taking backups on an account. */ backupPolicy?: BackupPolicyUnion; /** The CORS policy for the Cosmos DB database account. */ cors?: CorsPolicy[]; /** Indicates what services are allowed to bypass firewall checks. */ networkAclBypass?: NetworkAclBypass; /** An array that contains the Resource Ids for Network Acl Bypass for the Cosmos DB account. */ networkAclBypassResourceIds?: string[]; /** Opt-out of local authentication and ensure only MSI and AAD can be used exclusively for authentication. */ disableLocalAuth?: boolean; /** The object that represents all properties related to capacity enforcement on an account. */ capacity?: Capacity; } /** The list of new failover policies for the failover priority change. */ export interface FailoverPolicies { /** List of failover policies. */ failoverPolicies: FailoverPolicy[]; } /** The List operation response, that contains the database accounts and their properties. */ export interface DatabaseAccountsListResult { /** * List of database account and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: DatabaseAccountGetResults[]; } /** The read-only access keys for the given database account. */ export interface DatabaseAccountListReadOnlyKeysResult { /** * Base 64 encoded value of the primary read-only key. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly primaryReadonlyMasterKey?: string; /** * Base 64 encoded value of the secondary read-only key. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly secondaryReadonlyMasterKey?: string; } /** The connection strings for the given database account. */ export interface DatabaseAccountListConnectionStringsResult { /** An array that contains the connection strings for the Cosmos DB account. */ connectionStrings?: DatabaseAccountConnectionString[]; } /** Connection string for the Cosmos DB account */ export interface DatabaseAccountConnectionString { /** * Value of the connection string * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly connectionString?: string; /** * Description of the connection string * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly description?: string; } /** Cosmos DB region to online or offline. */ export interface RegionForOnlineOffline { /** Cosmos DB region, with spaces between words and each word capitalized. */ region: string; } /** Error Response. */ export interface ErrorResponse { /** Error code. */ code?: string; /** Error message indicating why the operation failed. */ message?: string; } /** Parameters to regenerate the keys within the database account. */ export interface DatabaseAccountRegenerateKeyParameters { /** The access key to regenerate. */ keyKind: KeyKind; } /** Result of the request to list Resource Provider operations. It contains a list of operations and a URL link to get the next set of results. */ export interface OperationListResult { /** List of operations supported by the Resource Provider. */ value?: Operation[]; /** URL to get the next set of operation list results if there are any. */ nextLink?: string; } /** REST API operation */ export interface Operation { /** Operation name: {provider}/{resource}/{operation} */ name?: string; /** The object that represents the operation. */ display?: OperationDisplay; } /** The object that represents the operation. */ export interface OperationDisplay { /** Service provider: Microsoft.ResourceProvider */ provider?: string; /** Resource on which the operation is performed: Profile, endpoint, etc. */ resource?: string; /** Operation type: Read, write, delete, etc. */ operation?: string; /** Description of operation */ description?: string; } /** The response to a list metrics request. */ export interface MetricListResult { /** * The list of metrics for the account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: Metric[]; } /** Metric data */ export interface Metric { /** * The start time for the metric (ISO-8601 format). * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly startTime?: Date; /** * The end time for the metric (ISO-8601 format). * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly endTime?: Date; /** * The time grain to be used to summarize the metric values. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly timeGrain?: string; /** * The unit of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly unit?: UnitType; /** * The name information for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: MetricName; /** * The metric values for the specified time window and timestep. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly metricValues?: MetricValue[]; } /** A metric name. */ export interface MetricName { /** * The name of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: string; /** * The friendly name of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly localizedValue?: string; } /** Represents metrics values. */ export interface MetricValue { /** * The number of values for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly count?: number; /** * The average value of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly average?: number; /** * The max value of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly maximum?: number; /** * The min value of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly minimum?: number; /** * The metric timestamp (ISO-8601 format). * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly timestamp?: Date; /** * The total value of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly total?: number; } /** The response to a list percentile metrics request. */ export interface PercentileMetricListResult { /** * The list of percentile metrics for the account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: PercentileMetric[]; } /** Percentile Metric data */ export interface PercentileMetric { /** * The start time for the metric (ISO-8601 format). * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly startTime?: Date; /** * The end time for the metric (ISO-8601 format). * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly endTime?: Date; /** * The time grain to be used to summarize the metric values. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly timeGrain?: string; /** * The unit of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly unit?: UnitType; /** * The name information for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: MetricName; /** * The percentile metric values for the specified time window and timestep. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly metricValues?: PercentileMetricValue[]; } /** The response to a list partition metrics request. */ export interface PartitionMetricListResult { /** * The list of partition-level metrics for the account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: PartitionMetric[]; } /** The response to a list usage request. */ export interface UsagesResult { /** * The list of usages for the database. A usage is a point in time metric * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: Usage[]; } /** The usage data for a usage request. */ export interface Usage { /** * The unit of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly unit?: UnitType; /** * The name information for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: MetricName; /** * The quota period used to summarize the usage values. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly quotaPeriod?: string; /** * Maximum value for this metric * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly limit?: number; /** * Current value for this metric * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly currentValue?: number; } /** The response to a list partition level usage request. */ export interface PartitionUsagesResult { /** * The list of partition-level usages for the database. A usage is a point in time metric * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: PartitionUsage[]; } /** The response to a list metric definitions request. */ export interface MetricDefinitionsListResult { /** * The list of metric definitions for the account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: MetricDefinition[]; } /** The definition of a metric. */ export interface MetricDefinition { /** * The list of metric availabilities for the account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly metricAvailabilities?: MetricAvailability[]; /** * The primary aggregation type of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly primaryAggregationType?: PrimaryAggregationType; /** * The unit of the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly unit?: UnitType; /** * The resource uri of the database. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly resourceUri?: string; /** * The name information for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: MetricName; } /** The availability of the metric. */ export interface MetricAvailability { /** * The time grain to be used to summarize the metric values. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly timeGrain?: string; /** * The retention for the metric values. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly retention?: string; } /** The List operation response, that contains the SQL databases and their properties. */ export interface SqlDatabaseListResult { /** * List of SQL databases and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: SqlDatabaseGetResults[]; } /** Cosmos DB SQL database resource object */ export interface SqlDatabaseResource { /** Name of the Cosmos DB SQL database */ id: string; } /** The system generated resource properties associated with SQL databases, SQL containers, Gremlin databases and Gremlin graphs. */ export interface ExtendedResourceProperties { /** * A system generated property. A unique identifier. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly rid?: string; /** * A system generated property that denotes the last updated timestamp of the resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly ts?: number; /** * A system generated property representing the resource etag required for optimistic concurrency control. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly etag?: string; } /** Cosmos DB options resource object */ export interface OptionsResource { /** Value of the Cosmos DB resource throughput or autoscaleSettings. Use the ThroughputSetting resource when retrieving offer details. */ throughput?: number; /** Specifies the Autoscale settings. */ autoscaleSettings?: AutoscaleSettings; } export interface AutoscaleSettings { /** Represents maximum throughput, the resource can scale up to. */ maxThroughput?: number; } /** CreateUpdateOptions are a list of key-value pairs that describe the resource. Supported keys are "If-Match", "If-None-Match", "Session-Token" and "Throughput" */ export interface CreateUpdateOptions { /** Request Units per second. For example, "throughput": 10000. */ throughput?: number; /** Specifies the Autoscale settings. */ autoscaleSettings?: AutoscaleSettings; } /** Cosmos DB resource throughput object. Either throughput is required or autoscaleSettings is required, but not both. */ export interface ThroughputSettingsResource { /** Value of the Cosmos DB resource throughput. Either throughput is required or autoscaleSettings is required, but not both. */ throughput?: number; /** Cosmos DB resource for autoscale settings. Either throughput is required or autoscaleSettings is required, but not both. */ autoscaleSettings?: AutoscaleSettingsResource; /** * The minimum throughput of the resource * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly minimumThroughput?: string; /** * The throughput replace is pending * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly offerReplacePending?: string; } /** Cosmos DB provisioned throughput settings object */ export interface AutoscaleSettingsResource { /** Represents maximum throughput container can scale up to. */ maxThroughput: number; /** Cosmos DB resource auto-upgrade policy */ autoUpgradePolicy?: AutoUpgradePolicyResource; /** * Represents target maximum throughput container can scale up to once offer is no longer in pending state. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly targetMaxThroughput?: number; } /** Cosmos DB resource auto-upgrade policy */ export interface AutoUpgradePolicyResource { /** Represents throughput policy which service must adhere to for auto-upgrade */ throughputPolicy?: ThroughputPolicyResource; } /** Cosmos DB resource throughput policy */ export interface ThroughputPolicyResource { /** Determines whether the ThroughputPolicy is active or not */ isEnabled?: boolean; /** Represents the percentage by which throughput can increase every time throughput policy kicks in. */ incrementPercent?: number; } /** An error response from the service. */ export interface CloudError { /** Error Response. */ error?: ErrorResponse; } /** The List operation response, that contains the containers and their properties. */ export interface SqlContainerListResult { /** * List of containers and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: SqlContainerGetResults[]; } /** Cosmos DB SQL container resource object */ export interface SqlContainerResource { /** Name of the Cosmos DB SQL container */ id: string; /** The configuration of the indexing policy. By default, the indexing is automatic for all document paths within the container */ indexingPolicy?: IndexingPolicy; /** The configuration of the partition key to be used for partitioning data into multiple partitions */ partitionKey?: ContainerPartitionKey; /** Default time to live */ defaultTtl?: number; /** The unique key policy configuration for specifying uniqueness constraints on documents in the collection in the Azure Cosmos DB service. */ uniqueKeyPolicy?: UniqueKeyPolicy; /** The conflict resolution policy for the container. */ conflictResolutionPolicy?: ConflictResolutionPolicy; /** Analytical TTL. */ analyticalStorageTtl?: number; } /** Cosmos DB indexing policy */ export interface IndexingPolicy { /** Indicates if the indexing policy is automatic */ automatic?: boolean; /** Indicates the indexing mode. */ indexingMode?: IndexingMode; /** List of paths to include in the indexing */ includedPaths?: IncludedPath[]; /** List of paths to exclude from indexing */ excludedPaths?: ExcludedPath[]; /** List of composite path list */ compositeIndexes?: CompositePath[][]; /** List of spatial specifics */ spatialIndexes?: SpatialSpec[]; } /** The paths that are included in indexing */ export interface IncludedPath { /** The path for which the indexing behavior applies to. Index paths typically start with root and end with wildcard (/path/*) */ path?: string; /** List of indexes for this path */ indexes?: Indexes[]; } /** The indexes for the path. */ export interface Indexes { /** The datatype for which the indexing behavior is applied to. */ dataType?: DataType; /** The precision of the index. -1 is maximum precision. */ precision?: number; /** Indicates the type of index. */ kind?: IndexKind; } export interface ExcludedPath { /** The path for which the indexing behavior applies to. Index paths typically start with root and end with wildcard (/path/*) */ path?: string; } export interface CompositePath { /** The path for which the indexing behavior applies to. Index paths typically start with root and end with wildcard (/path/*) */ path?: string; /** Sort order for composite paths. */ order?: CompositePathSortOrder; } export interface SpatialSpec { /** The path for which the indexing behavior applies to. Index paths typically start with root and end with wildcard (/path/*) */ path?: string; /** List of path's spatial type */ types?: SpatialType[]; } /** The configuration of the partition key to be used for partitioning data into multiple partitions */ export interface ContainerPartitionKey { /** List of paths using which data within the container can be partitioned */ paths?: string[]; /** Indicates the kind of algorithm used for partitioning. For MultiHash, multiple partition keys (upto three maximum) are supported for container create */ kind?: PartitionKind; /** Indicates the version of the partition key definition */ version?: number; /** * Indicates if the container is using a system generated partition key * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly systemKey?: boolean; } /** The unique key policy configuration for specifying uniqueness constraints on documents in the collection in the Azure Cosmos DB service. */ export interface UniqueKeyPolicy { /** List of unique keys on that enforces uniqueness constraint on documents in the collection in the Azure Cosmos DB service. */ uniqueKeys?: UniqueKey[]; } /** The unique key on that enforces uniqueness constraint on documents in the collection in the Azure Cosmos DB service. */ export interface UniqueKey { /** List of paths must be unique for each document in the Azure Cosmos DB service */ paths?: string[]; } /** The conflict resolution policy for the container. */ export interface ConflictResolutionPolicy { /** Indicates the conflict resolution mode. */ mode?: ConflictResolutionMode; /** The conflict resolution path in the case of LastWriterWins mode. */ conflictResolutionPath?: string; /** The procedure to resolve conflicts in the case of custom mode. */ conflictResolutionProcedure?: string; } /** The List operation response, that contains the storedProcedures and their properties. */ export interface SqlStoredProcedureListResult { /** * List of storedProcedures and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: SqlStoredProcedureGetResults[]; } /** Cosmos DB SQL storedProcedure resource object */ export interface SqlStoredProcedureResource { /** Name of the Cosmos DB SQL storedProcedure */ id: string; /** Body of the Stored Procedure */ body?: string; } /** The List operation response, that contains the userDefinedFunctions and their properties. */ export interface SqlUserDefinedFunctionListResult { /** * List of userDefinedFunctions and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: SqlUserDefinedFunctionGetResults[]; } /** Cosmos DB SQL userDefinedFunction resource object */ export interface SqlUserDefinedFunctionResource { /** Name of the Cosmos DB SQL userDefinedFunction */ id: string; /** Body of the User Defined Function */ body?: string; } /** The List operation response, that contains the triggers and their properties. */ export interface SqlTriggerListResult { /** * List of triggers and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: SqlTriggerGetResults[]; } /** Cosmos DB SQL trigger resource object */ export interface SqlTriggerResource { /** Name of the Cosmos DB SQL trigger */ id: string; /** Body of the Trigger */ body?: string; /** Type of the Trigger */ triggerType?: TriggerType; /** The operation the trigger is associated with */ triggerOperation?: TriggerOperation; } /** The List operation response, that contains the MongoDB databases and their properties. */ export interface MongoDBDatabaseListResult { /** * List of MongoDB databases and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: MongoDBDatabaseGetResults[]; } /** Cosmos DB MongoDB database resource object */ export interface MongoDBDatabaseResource { /** Name of the Cosmos DB MongoDB database */ id: string; } /** The List operation response, that contains the MongoDB collections and their properties. */ export interface MongoDBCollectionListResult { /** * List of MongoDB collections and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: MongoDBCollectionGetResults[]; } /** Cosmos DB MongoDB collection resource object */ export interface MongoDBCollectionResource { /** Name of the Cosmos DB MongoDB collection */ id: string; /** A key-value pair of shard keys to be applied for the request. */ shardKey?: { [propertyName: string]: string }; /** List of index keys */ indexes?: MongoIndex[]; /** Analytical TTL. */ analyticalStorageTtl?: number; } /** Cosmos DB MongoDB collection index key */ export interface MongoIndex { /** Cosmos DB MongoDB collection index keys */ key?: MongoIndexKeys; /** Cosmos DB MongoDB collection index key options */ options?: MongoIndexOptions; } /** Cosmos DB MongoDB collection resource object */ export interface MongoIndexKeys { /** List of keys for each MongoDB collection in the Azure Cosmos DB service */ keys?: string[]; } /** Cosmos DB MongoDB collection index options */ export interface MongoIndexOptions { /** Expire after seconds */ expireAfterSeconds?: number; /** Is unique or not */ unique?: boolean; } /** The List operation response, that contains the Table and their properties. */ export interface TableListResult { /** * List of Table and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: TableGetResults[]; } /** Cosmos DB table resource object */ export interface TableResource { /** Name of the Cosmos DB table */ id: string; } /** The List operation response, that contains the Cassandra keyspaces and their properties. */ export interface CassandraKeyspaceListResult { /** * List of Cassandra keyspaces and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: CassandraKeyspaceGetResults[]; } /** Cosmos DB Cassandra keyspace resource object */ export interface CassandraKeyspaceResource { /** Name of the Cosmos DB Cassandra keyspace */ id: string; } /** The List operation response, that contains the Cassandra tables and their properties. */ export interface CassandraTableListResult { /** * List of Cassandra tables and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: CassandraTableGetResults[]; } /** Cosmos DB Cassandra table resource object */ export interface CassandraTableResource { /** Name of the Cosmos DB Cassandra table */ id: string; /** Time to live of the Cosmos DB Cassandra table */ defaultTtl?: number; /** Schema of the Cosmos DB Cassandra table */ schema?: CassandraSchema; /** Analytical TTL. */ analyticalStorageTtl?: number; } /** Cosmos DB Cassandra table schema */ export interface CassandraSchema { /** List of Cassandra table columns. */ columns?: Column[]; /** List of partition key. */ partitionKeys?: CassandraPartitionKey[]; /** List of cluster key. */ clusterKeys?: ClusterKey[]; } /** Cosmos DB Cassandra table column */ export interface Column { /** Name of the Cosmos DB Cassandra table column */ name?: string; /** Type of the Cosmos DB Cassandra table column */ type?: string; } /** Cosmos DB Cassandra table partition key */ export interface CassandraPartitionKey { /** Name of the Cosmos DB Cassandra table partition key */ name?: string; } /** Cosmos DB Cassandra table cluster key */ export interface ClusterKey { /** Name of the Cosmos DB Cassandra table cluster key */ name?: string; /** Order of the Cosmos DB Cassandra table cluster key, only support "Asc" and "Desc" */ orderBy?: string; } /** The List operation response, that contains the Gremlin databases and their properties. */ export interface GremlinDatabaseListResult { /** * List of Gremlin databases and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: GremlinDatabaseGetResults[]; } /** Cosmos DB Gremlin database resource object */ export interface GremlinDatabaseResource { /** Name of the Cosmos DB Gremlin database */ id: string; } /** The List operation response, that contains the graphs and their properties. */ export interface GremlinGraphListResult { /** * List of graphs and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: GremlinGraphGetResults[]; } /** Cosmos DB Gremlin graph resource object */ export interface GremlinGraphResource { /** Name of the Cosmos DB Gremlin graph */ id: string; /** The configuration of the indexing policy. By default, the indexing is automatic for all document paths within the graph */ indexingPolicy?: IndexingPolicy; /** The configuration of the partition key to be used for partitioning data into multiple partitions */ partitionKey?: ContainerPartitionKey; /** Default time to live */ defaultTtl?: number; /** The unique key policy configuration for specifying uniqueness constraints on documents in the collection in the Azure Cosmos DB service. */ uniqueKeyPolicy?: UniqueKeyPolicy; /** The conflict resolution policy for the graph. */ conflictResolutionPolicy?: ConflictResolutionPolicy; } /** The List operation response, that contains Cosmos DB locations and their properties. */ export interface LocationListResult { /** * List of Cosmos DB locations and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: LocationGetResult[]; } /** Cosmos DB location metadata */ export interface LocationProperties { /** * Flag indicating whether the location supports availability zones or not. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly supportsAvailabilityZone?: boolean; /** * Flag indicating whether the location is residency sensitive. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly isResidencyRestricted?: boolean; /** * The properties of available backup storage redundancies. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly backupStorageRedundancies?: BackupStorageRedundancy[]; } /** The resource model definition for a ARM proxy resource. It will have everything other than required location and tags */ export interface ARMProxyResource { /** * The unique resource identifier of the database account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The name of the database account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * The type of Azure resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; } /** A list of notebook workspace resources */ export interface NotebookWorkspaceListResult { /** Array of notebook workspace resources */ value?: NotebookWorkspace[]; } /** The connection info for the given notebook workspace */ export interface NotebookWorkspaceConnectionInfoResult { /** * Specifies auth token used for connecting to Notebook server (uses token-based auth). * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly authToken?: string; /** * Specifies the endpoint of Notebook server. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly notebookServerEndpoint?: string; } /** A list of private endpoint connections */ export interface PrivateEndpointConnectionListResult { /** Array of private endpoint connections */ value?: PrivateEndpointConnection[]; } /** A list of private link resources */ export interface PrivateLinkResourceListResult { /** Array of private link resources */ value?: PrivateLinkResource[]; } /** The set of data plane operations permitted through this Role Definition. */ export interface Permission { /** An array of data actions that are allowed. */ dataActions?: string[]; /** An array of data actions that are denied. */ notDataActions?: string[]; } /** Parameters to create and update an Azure Cosmos DB SQL Role Definition. */ export interface SqlRoleDefinitionCreateUpdateParameters { /** A user-friendly name for the Role Definition. Must be unique for the database account. */ roleName?: string; /** Indicates whether the Role Definition was built-in or user created. */ type?: RoleDefinitionType; /** A set of fully qualified Scopes at or below which Role Assignments may be created using this Role Definition. This will allow application of this Role Definition on the entire database account or any underlying Database / Collection. Must have at least one element. Scopes higher than Database account are not enforceable as assignable Scopes. Note that resources referenced in assignable Scopes need not exist. */ assignableScopes?: string[]; /** The set of operations allowed through this Role Definition. */ permissions?: Permission[]; } /** The relevant Role Definitions. */ export interface SqlRoleDefinitionListResult { /** * List of Role Definitions and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: SqlRoleDefinitionGetResults[]; } /** Parameters to create and update an Azure Cosmos DB SQL Role Assignment. */ export interface SqlRoleAssignmentCreateUpdateParameters { /** The unique identifier for the associated Role Definition. */ roleDefinitionId?: string; /** The data plane resource path for which access is being granted through this Role Assignment. */ scope?: string; /** The unique identifier for the associated AAD principal in the AAD graph to which access is being granted through this Role Assignment. Tenant ID for the principal is inferred using the tenant associated with the subscription. */ principalId?: string; } /** The relevant Role Assignments. */ export interface SqlRoleAssignmentListResult { /** * List of Role Assignments and their properties * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: SqlRoleAssignmentGetResults[]; } /** The List operation response, that contains the restorable database accounts and their properties. */ export interface RestorableDatabaseAccountsListResult { /** * List of restorable database accounts and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: RestorableDatabaseAccountGetResult[]; } /** A Azure Cosmos DB restorable database account. */ export interface RestorableDatabaseAccountGetResult { /** * The unique resource identifier of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The name of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * The type of Azure resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** The location of the resource group to which the resource belongs. */ location?: string; /** The name of the global database account */ accountName?: string; /** The creation time of the restorable database account (ISO-8601 format). */ creationTime?: Date; /** The time at which the restorable database account has been deleted (ISO-8601 format). */ deletionTime?: Date; /** * The API type of the restorable database account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly apiType?: ApiType; /** * List of regions where the of the database account can be restored from. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly restorableLocations?: RestorableLocationResource[]; } /** Properties of the regional restorable account. */ export interface RestorableLocationResource { /** * The location of the regional restorable account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly locationName?: string; /** * The instance id of the regional restorable account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly regionalDatabaseAccountInstanceId?: string; /** * The creation time of the regional restorable database account (ISO-8601 format). * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly creationTime?: Date; /** * The time at which the regional restorable database account has been deleted (ISO-8601 format). * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly deletionTime?: Date; } /** Properties of the regional restorable account. */ export interface ContinuousBackupRestoreLocation { /** The name of the continuous backup restore location. */ location?: string; } /** Backup information of a resource. */ export interface BackupInformation { /** * Information about the status of continuous backups. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly continuousBackupInformation?: ContinuousBackupInformation; } /** Information about the status of continuous backups. */ export interface ContinuousBackupInformation { /** The latest restorable timestamp for a resource. */ latestRestorableTimestamp?: string; } /** The List operation response, that contains the SQL database events and their properties. */ export interface RestorableSqlDatabasesListResult { /** * List of SQL database events and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: RestorableSqlDatabaseGetResult[]; } /** An Azure Cosmos DB SQL database event */ export interface RestorableSqlDatabaseGetResult { /** * The unique resource Identifier of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The name of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * The type of Azure resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** The resource of an Azure Cosmos DB SQL database event */ resource?: RestorableSqlDatabasePropertiesResource; } /** The resource of an Azure Cosmos DB SQL database event */ export interface RestorableSqlDatabasePropertiesResource { /** * A system generated property. A unique identifier. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly rid?: string; /** * The operation type of this database event. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly operationType?: OperationType; /** * The time when this database event happened. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly eventTimestamp?: string; /** * The name of the SQL database. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly ownerId?: string; /** * The resource ID of the SQL database. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly ownerResourceId?: string; /** Cosmos DB SQL database resource object */ database?: RestorableSqlDatabasePropertiesResourceDatabase; } /** The List operation response, that contains the SQL container events and their properties. */ export interface RestorableSqlContainersListResult { /** * List of SQL container events and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: RestorableSqlContainerGetResult[]; } /** An Azure Cosmos DB SQL container event */ export interface RestorableSqlContainerGetResult { /** * The unique resource Identifier of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The name of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * The type of Azure resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** The resource of an Azure Cosmos DB SQL container event */ resource?: RestorableSqlContainerPropertiesResource; } /** The resource of an Azure Cosmos DB SQL container event */ export interface RestorableSqlContainerPropertiesResource { /** * A system generated property. A unique identifier. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly rid?: string; /** * The operation type of this container event. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly operationType?: OperationType; /** * The when this container event happened. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly eventTimestamp?: string; /** * The name of this SQL container. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly ownerId?: string; /** * The resource ID of this SQL container. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly ownerResourceId?: string; /** Cosmos DB SQL container resource object */ container?: RestorableSqlContainerPropertiesResourceContainer; } /** The List operation response, that contains the restorable SQL resources. */ export interface RestorableSqlResourcesListResult { /** * List of restorable SQL resources, including the database and collection names. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: DatabaseRestoreResource[]; } /** The List operation response, that contains the MongoDB database events and their properties. */ export interface RestorableMongodbDatabasesListResult { /** * List of MongoDB database events and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: RestorableMongodbDatabaseGetResult[]; } /** An Azure Cosmos DB MongoDB database event */ export interface RestorableMongodbDatabaseGetResult { /** * The unique resource Identifier of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The name of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * The type of Azure resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** The resource of an Azure Cosmos DB MongoDB database event */ resource?: RestorableMongodbDatabasePropertiesResource; } /** The resource of an Azure Cosmos DB MongoDB database event */ export interface RestorableMongodbDatabasePropertiesResource { /** * A system generated property. A unique identifier. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly rid?: string; /** * The operation type of this database event. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly operationType?: OperationType; /** * The time when this database event happened. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly eventTimestamp?: string; /** * The name of this MongoDB database. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly ownerId?: string; /** * The resource ID of this MongoDB database. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly ownerResourceId?: string; } /** The List operation response, that contains the MongoDB collection events and their properties. */ export interface RestorableMongodbCollectionsListResult { /** * List of MongoDB collection events and their properties. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: RestorableMongodbCollectionGetResult[]; } /** An Azure Cosmos DB MongoDB collection event */ export interface RestorableMongodbCollectionGetResult { /** * The unique resource Identifier of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The name of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * The type of Azure resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** The resource of an Azure Cosmos DB MongoDB collection event */ resource?: RestorableMongodbCollectionPropertiesResource; } /** The resource of an Azure Cosmos DB MongoDB collection event */ export interface RestorableMongodbCollectionPropertiesResource { /** * A system generated property. A unique identifier. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly rid?: string; /** * The operation type of this collection event. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly operationType?: OperationType; /** * The time when this collection event happened. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly eventTimestamp?: string; /** * The name of this MongoDB collection. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly ownerId?: string; /** * The resource ID of this MongoDB collection. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly ownerResourceId?: string; } /** The List operation response, that contains the restorable MongoDB resources. */ export interface RestorableMongodbResourcesListResult { /** * List of restorable MongoDB resources, including the database and collection names. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: DatabaseRestoreResource[]; } /** List of managed Cassandra clusters. */ export interface ListClusters { /** Container for the array of clusters. */ value?: ClusterResource[]; } /** Properties of a managed Cassandra cluster. */ export interface ClusterResourceProperties { /** The status of the resource at the time the operation was called. */ provisioningState?: ManagedCassandraProvisioningState; /** To create an empty cluster, omit this field or set it to null. To restore a backup into a new cluster, set this field to the resource id of the backup. */ restoreFromBackupId?: string; /** Resource id of a subnet that this cluster's management service should have its network interface attached to. The subnet must be routable to all subnets that will be delegated to data centers. The resource id must be of the form '/subscriptions/<subscription id>/resourceGroups/<resource group>/providers/Microsoft.Network/virtualNetworks/<virtual network>/subnets/<subnet>' */ delegatedManagementSubnetId?: string; /** Which version of Cassandra should this cluster converge to running (e.g., 3.11). When updated, the cluster may take some time to migrate to the new version. */ cassandraVersion?: string; /** If you need to set the clusterName property in cassandra.yaml to something besides the resource name of the cluster, set the value to use on this property. */ clusterNameOverride?: string; /** Which authentication method Cassandra should use to authenticate clients. 'None' turns off authentication, so should not be used except in emergencies. 'Cassandra' is the default password based authentication. The default is 'Cassandra'. */ authenticationMethod?: AuthenticationMethod; /** Initial password for clients connecting as admin to the cluster. Should be changed after cluster creation. Returns null on GET. This field only applies when the authenticationMethod field is 'Cassandra'. */ initialCassandraAdminPassword?: string; /** Hostname or IP address where the Prometheus endpoint containing data about the managed Cassandra nodes can be reached. */ prometheusEndpoint?: SeedNode; /** Should automatic repairs run on this cluster? If omitted, this is true, and should stay true unless you are running a hybrid cluster where you are already doing your own repairs. */ repairEnabled?: boolean; /** List of TLS certificates used to authorize clients connecting to the cluster. All connections are TLS encrypted whether clientCertificates is set or not, but if clientCertificates is set, the managed Cassandra cluster will reject all connections not bearing a TLS client certificate that can be validated from one or more of the public certificates in this property. */ clientCertificates?: Certificate[]; /** List of TLS certificates used to authorize gossip from unmanaged data centers. The TLS certificates of all nodes in unmanaged data centers must be verifiable using one of the certificates provided in this property. */ externalGossipCertificates?: Certificate[]; /** * List of TLS certificates that unmanaged nodes must trust for gossip with managed nodes. All managed nodes will present TLS client certificates that are verifiable using one of the certificates provided in this property. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly gossipCertificates?: Certificate[]; /** List of IP addresses of seed nodes in unmanaged data centers. These will be added to the seed node lists of all managed nodes. */ externalSeedNodes?: SeedNode[]; /** * List of IP addresses of seed nodes in the managed data centers. These should be added to the seed node lists of all unmanaged nodes. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly seedNodes?: SeedNode[]; /** Number of hours to wait between taking a backup of the cluster. To disable backups, set this property to 0. */ hoursBetweenBackups?: number; /** Whether the cluster and associated data centers has been deallocated. */ deallocated?: boolean; /** Whether Cassandra audit logging is enabled */ cassandraAuditLoggingEnabled?: boolean; } export interface SeedNode { /** IP address of this seed node. */ ipAddress?: string; } export interface Certificate { /** PEM formatted public key. */ pem?: string; } /** The core properties of ARM resources. */ export interface ManagedCassandraARMResourceProperties { /** * The unique resource identifier of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The name of the ARM resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * The type of Azure resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** The location of the resource group to which the resource belongs. */ location?: string; /** Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB". */ tags?: { [propertyName: string]: string }; /** Identity for the resource. */ identity?: ManagedCassandraManagedServiceIdentity; } /** Identity for the resource. */ export interface ManagedCassandraManagedServiceIdentity { /** * The object id of the identity resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly principalId?: string; /** * The tenant id of the resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly tenantId?: string; /** The type of the resource. */ type?: ManagedCassandraResourceIdentityType; } /** Specification of which command to run where */ export interface CommandPostBody { /** The command which should be run */ command: string; /** The arguments for the command to be run */ arguments?: { [propertyName: string]: string }; /** IP address of the cassandra host to run the command on */ host: string; /** If true, stops cassandra before executing the command and then start it again */ cassandraStopStart?: boolean; /** If true, allows the command to *write* to the cassandra directory, otherwise read-only. */ readwrite?: boolean; } /** Response of /command api */ export interface CommandOutput { /** Output of the command. */ commandOutput?: string; } /** List of managed Cassandra data centers and their properties. */ export interface ListDataCenters { /** * Container for array of data centers. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly value?: DataCenterResource[]; } /** Properties of a managed Cassandra data center. */ export interface DataCenterResourceProperties { /** The status of the resource at the time the operation was called. */ provisioningState?: ManagedCassandraProvisioningState; /** The region this data center should be created in. */ dataCenterLocation?: string; /** Resource id of a subnet the nodes in this data center should have their network interfaces connected to. The subnet must be in the same region specified in 'dataCenterLocation' and must be able to route to the subnet specified in the cluster's 'delegatedManagementSubnetId' property. This resource id will be of the form '/subscriptions/<subscription id>/resourceGroups/<resource group>/providers/Microsoft.Network/virtualNetworks/<virtual network>/subnets/<subnet>'. */ delegatedSubnetId?: string; /** The number of nodes the data center should have. This is the desired number. After it is set, it may take some time for the data center to be scaled to match. To monitor the number of nodes and their status, use the fetchNodeStatus method on the cluster. */ nodeCount?: number; /** * IP addresses for seed nodes in this data center. This is for reference. Generally you will want to use the seedNodes property on the cluster, which aggregates the seed nodes from all data centers in the cluster. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly seedNodes?: SeedNode[]; /** A fragment of a cassandra.yaml configuration file to be included in the cassandra.yaml for all nodes in this data center. The fragment should be Base64 encoded, and only a subset of keys are allowed. */ base64EncodedCassandraYamlFragment?: string; /** Key uri to use for encryption of managed disks. Ensure the system assigned identity of the cluster has been assigned appropriate permissions(key get/wrap/unwrap permissions) on the key. */ managedDiskCustomerKeyUri?: string; /** Indicates the Key Uri of the customer key to use for encryption of the backup storage account. */ backupStorageCustomerKeyUri?: string; /** Virtual Machine SKU used for data centers. Default value is Standard_DS14_v2 */ sku?: string; /** Disk SKU used for data centers. Default value is P30. */ diskSku?: string; /** Number of disk used for data centers. Default value is 4. */ diskCapacity?: number; /** If the azure data center has Availability Zone support, apply it to the Virtual Machine ScaleSet that host the cassandra data center virtual machines. */ availabilityZone?: boolean; } /** Properties of a managed Cassandra cluster public status. */ export interface CassandraClusterPublicStatus { eTag?: string; reaperStatus?: ManagedCassandraReaperStatus; /** List relevant information about any connection errors to the Datacenters. */ connectionErrors?: ConnectionError[]; /** List of the status of each datacenter in this cluster. */ dataCenters?: CassandraClusterPublicStatusDataCentersItem[]; } export interface ManagedCassandraReaperStatus { healthy?: boolean; /** Dictionary of <string> */ repairRunIds?: { [propertyName: string]: string }; /** Dictionary of <string> */ repairSchedules?: { [propertyName: string]: string }; } export interface ConnectionError { /** The kind of connection error that occurred. */ connectionState?: ConnectionState; /** The IP of host that originated the failed connection. */ iPFrom?: string; /** The IP that the connection attempted to reach. */ iPTo?: string; /** The TCP port the connection was attempted on. */ port?: number; /** Detailed error message about the failed connection. */ exception?: string; } export interface CassandraClusterPublicStatusDataCentersItem { /** The name of this Datacenter. */ name?: string; /** A list of all seed nodes in the cluster, managed and unmanaged. */ seedNodes?: string[]; nodes?: ComponentsM9L909SchemasCassandraclusterpublicstatusPropertiesDatacentersItemsPropertiesNodesItems[]; } export interface ComponentsM9L909SchemasCassandraclusterpublicstatusPropertiesDatacentersItemsPropertiesNodesItems { /** The node's IP address. */ address?: string; /** The state of the node in Cassandra ring. */ state?: NodeState; status?: string; /** The amount of file system data in the data directory (e.g., 47.66 kB), excluding all content in the snapshots subdirectories. Because all SSTable data files are included, any data that is not cleaned up (such as TTL-expired cells or tombstones) is counted. */ load?: string; /** List of tokens this node covers. */ tokens?: string[]; size?: number; /** The network ID of the node. */ hostID?: string; /** The rack this node is part of. */ rack?: string; /** The timestamp when these statistics were captured. */ timestamp?: string; /** The amount of disk used, in kB, of the directory /var/lib/cassandra. */ diskUsedKB?: number; /** The amount of disk free, in kB, of the directory /var/lib/cassandra. */ diskFreeKB?: number; /** Used memory (calculated as total - free - buffers - cache), in kB. */ memoryUsedKB?: number; /** Memory used by kernel buffers (Buffers in /proc/meminfo) and page cache and slabs (Cached and SReclaimable in /proc/meminfo), in kB. */ memoryBuffersAndCachedKB?: number; /** Unused memory (MemFree and SwapFree in /proc/meminfo), in kB. */ memoryFreeKB?: number; /** Total installed memory (MemTotal and SwapTotal in /proc/meminfo), in kB. */ memoryTotalKB?: number; /** A float representing the current system-wide CPU utilization as a percentage. */ cpuUsage?: number; } /** Configuration values for periodic mode backup */ export interface PeriodicModeProperties { /** An integer representing the interval in minutes between two backups */ backupIntervalInMinutes?: number; /** An integer representing the time (in hours) that each backup is retained */ backupRetentionIntervalInHours?: number; /** Enum to indicate type of backup residency */ backupStorageRedundancy?: BackupStorageRedundancy; } /** The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location */ export type ProxyResource = Resource & {}; /** The object representing periodic mode backup policy. */ export type PeriodicModeBackupPolicy = BackupPolicy & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Periodic"; /** Configuration values for periodic mode backup */ periodicModeProperties?: PeriodicModeProperties; }; /** The object representing continuous mode backup policy. */ export type ContinuousModeBackupPolicy = BackupPolicy & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Continuous"; }; /** An Azure Cosmos DB database account. */ export type DatabaseAccountGetResults = ARMResourceProperties & { /** Indicates the type of database account. This can only be set at database account creation. */ kind?: DatabaseAccountKind; /** Identity for the resource. */ identity?: ManagedServiceIdentity; /** * The system meta data relating to this resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly systemData?: SystemData; /** * The status of the Cosmos DB account at the time the operation was called. The status can be one of following. 'Creating' – the Cosmos DB account is being created. When an account is in Creating state, only properties that are specified as input for the Create Cosmos DB account operation are returned. 'Succeeded' – the Cosmos DB account is active for use. 'Updating' – the Cosmos DB account is being updated. 'Deleting' – the Cosmos DB account is being deleted. 'Failed' – the Cosmos DB account failed creation. 'DeletionFailed' – the Cosmos DB account deletion failed. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly provisioningState?: string; /** * The connection endpoint for the Cosmos DB database account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly documentEndpoint?: string; /** * The offer type for the Cosmos DB database account. Default value: Standard. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly databaseAccountOfferType?: "Standard"; /** List of IpRules. */ ipRules?: IpAddressOrRange[]; /** Flag to indicate whether to enable/disable Virtual Network ACL rules. */ isVirtualNetworkFilterEnabled?: boolean; /** Enables automatic failover of the write region in the rare event that the region is unavailable due to an outage. Automatic failover will result in a new write region for the account and is chosen based on the failover priorities configured for the account. */ enableAutomaticFailover?: boolean; /** The consistency policy for the Cosmos DB database account. */ consistencyPolicy?: ConsistencyPolicy; /** List of Cosmos DB capabilities for the account */ capabilities?: Capability[]; /** * An array that contains the write location for the Cosmos DB account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly writeLocations?: Location[]; /** * An array that contains of the read locations enabled for the Cosmos DB account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly readLocations?: Location[]; /** * An array that contains all of the locations enabled for the Cosmos DB account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly locations?: Location[]; /** * An array that contains the regions ordered by their failover priorities. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly failoverPolicies?: FailoverPolicy[]; /** List of Virtual Network ACL rules configured for the Cosmos DB account. */ virtualNetworkRules?: VirtualNetworkRule[]; /** * List of Private Endpoint Connections configured for the Cosmos DB account. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly privateEndpointConnections?: PrivateEndpointConnection[]; /** Enables the account to write in multiple locations */ enableMultipleWriteLocations?: boolean; /** Enables the cassandra connector on the Cosmos DB C* account */ enableCassandraConnector?: boolean; /** The cassandra connector offer type for the Cosmos DB database C* account. */ connectorOffer?: ConnectorOffer; /** Disable write operations on metadata resources (databases, containers, throughput) via account keys */ disableKeyBasedMetadataWriteAccess?: boolean; /** The URI of the key vault */ keyVaultKeyUri?: string; /** The default identity for accessing key vault used in features like customer managed keys. The default identity needs to be explicitly set by the users. It can be "FirstPartyIdentity", "SystemAssignedIdentity" and more. */ defaultIdentity?: string; /** Whether requests from Public Network are allowed */ publicNetworkAccess?: PublicNetworkAccess; /** Flag to indicate whether Free Tier is enabled. */ enableFreeTier?: boolean; /** API specific properties. */ apiProperties?: ApiProperties; /** Flag to indicate whether to enable storage analytics. */ enableAnalyticalStorage?: boolean; /** Analytical storage specific properties. */ analyticalStorageConfiguration?: AnalyticalStorageConfiguration; /** * A unique identifier assigned to the database account * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly instanceId?: string; /** Enum to indicate the mode of account creation. */ createMode?: CreateMode; /** Parameters to indicate the information about the restore. */ restoreParameters?: RestoreParameters; /** The object representing the policy for taking backups on an account. */ backupPolicy?: BackupPolicyUnion; /** The CORS policy for the Cosmos DB database account. */ cors?: CorsPolicy[]; /** Indicates what services are allowed to bypass firewall checks. */ networkAclBypass?: NetworkAclBypass; /** An array that contains the Resource Ids for Network Acl Bypass for the Cosmos DB account. */ networkAclBypassResourceIds?: string[]; /** Opt-out of local authentication and ensure only MSI and AAD can be used exclusively for authentication. */ disableLocalAuth?: boolean; /** The object that represents all properties related to capacity enforcement on an account. */ capacity?: Capacity; }; /** Parameters to create and update Cosmos DB database accounts. */ export type DatabaseAccountCreateUpdateParameters = ARMResourceProperties & { /** Indicates the type of database account. This can only be set at database account creation. */ kind?: DatabaseAccountKind; /** Identity for the resource. */ identity?: ManagedServiceIdentity; /** The consistency policy for the Cosmos DB account. */ consistencyPolicy?: ConsistencyPolicy; /** An array that contains the georeplication locations enabled for the Cosmos DB account. */ locations: Location[]; /** The offer type for the database */ databaseAccountOfferType: "Standard"; /** List of IpRules. */ ipRules?: IpAddressOrRange[]; /** Flag to indicate whether to enable/disable Virtual Network ACL rules. */ isVirtualNetworkFilterEnabled?: boolean; /** Enables automatic failover of the write region in the rare event that the region is unavailable due to an outage. Automatic failover will result in a new write region for the account and is chosen based on the failover priorities configured for the account. */ enableAutomaticFailover?: boolean; /** List of Cosmos DB capabilities for the account */ capabilities?: Capability[]; /** List of Virtual Network ACL rules configured for the Cosmos DB account. */ virtualNetworkRules?: VirtualNetworkRule[]; /** Enables the account to write in multiple locations */ enableMultipleWriteLocations?: boolean; /** Enables the cassandra connector on the Cosmos DB C* account */ enableCassandraConnector?: boolean; /** The cassandra connector offer type for the Cosmos DB database C* account. */ connectorOffer?: ConnectorOffer; /** Disable write operations on metadata resources (databases, containers, throughput) via account keys */ disableKeyBasedMetadataWriteAccess?: boolean; /** The URI of the key vault */ keyVaultKeyUri?: string; /** The default identity for accessing key vault used in features like customer managed keys. The default identity needs to be explicitly set by the users. It can be "FirstPartyIdentity", "SystemAssignedIdentity" and more. */ defaultIdentity?: string; /** Whether requests from Public Network are allowed */ publicNetworkAccess?: PublicNetworkAccess; /** Flag to indicate whether Free Tier is enabled. */ enableFreeTier?: boolean; /** API specific properties. Currently, supported only for MongoDB API. */ apiProperties?: ApiProperties; /** Flag to indicate whether to enable storage analytics. */ enableAnalyticalStorage?: boolean; /** Analytical storage specific properties. */ analyticalStorageConfiguration?: AnalyticalStorageConfiguration; /** Enum to indicate the mode of account creation. */ createMode?: CreateMode; /** The object representing the policy for taking backups on an account. */ backupPolicy?: BackupPolicyUnion; /** The CORS policy for the Cosmos DB database account. */ cors?: CorsPolicy[]; /** Indicates what services are allowed to bypass firewall checks. */ networkAclBypass?: NetworkAclBypass; /** An array that contains the Resource Ids for Network Acl Bypass for the Cosmos DB account. */ networkAclBypassResourceIds?: string[]; /** Opt-out of local authentication and ensure only MSI and AAD can be used exclusively for authentication. */ disableLocalAuth?: boolean; /** Parameters to indicate the information about the restore. */ restoreParameters?: RestoreParameters; /** The object that represents all properties related to capacity enforcement on an account. */ capacity?: Capacity; }; /** An Azure Cosmos DB SQL database. */ export type SqlDatabaseGetResults = ARMResourceProperties & { resource?: SqlDatabaseGetPropertiesResource; options?: SqlDatabaseGetPropertiesOptions; }; /** Parameters to create and update Cosmos DB SQL database. */ export type SqlDatabaseCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a SQL database */ resource: SqlDatabaseResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB resource throughput. */ export type ThroughputSettingsGetResults = ARMResourceProperties & { resource?: ThroughputSettingsGetPropertiesResource; }; /** Parameters to update Cosmos DB resource throughput. */ export type ThroughputSettingsUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a resource throughput */ resource: ThroughputSettingsResource; }; /** An Azure Cosmos DB container. */ export type SqlContainerGetResults = ARMResourceProperties & { resource?: SqlContainerGetPropertiesResource; options?: SqlContainerGetPropertiesOptions; }; /** Parameters to create and update Cosmos DB container. */ export type SqlContainerCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a container */ resource: SqlContainerResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB storedProcedure. */ export type SqlStoredProcedureGetResults = ARMResourceProperties & { resource?: SqlStoredProcedureGetPropertiesResource; }; /** Parameters to create and update Cosmos DB storedProcedure. */ export type SqlStoredProcedureCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a storedProcedure */ resource: SqlStoredProcedureResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB userDefinedFunction. */ export type SqlUserDefinedFunctionGetResults = ARMResourceProperties & { resource?: SqlUserDefinedFunctionGetPropertiesResource; }; /** Parameters to create and update Cosmos DB userDefinedFunction. */ export type SqlUserDefinedFunctionCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a userDefinedFunction */ resource: SqlUserDefinedFunctionResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB trigger. */ export type SqlTriggerGetResults = ARMResourceProperties & { resource?: SqlTriggerGetPropertiesResource; }; /** Parameters to create and update Cosmos DB trigger. */ export type SqlTriggerCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a trigger */ resource: SqlTriggerResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB MongoDB database. */ export type MongoDBDatabaseGetResults = ARMResourceProperties & { resource?: MongoDBDatabaseGetPropertiesResource; options?: MongoDBDatabaseGetPropertiesOptions; }; /** Parameters to create and update Cosmos DB MongoDB database. */ export type MongoDBDatabaseCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a MongoDB database */ resource: MongoDBDatabaseResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB MongoDB collection. */ export type MongoDBCollectionGetResults = ARMResourceProperties & { resource?: MongoDBCollectionGetPropertiesResource; options?: MongoDBCollectionGetPropertiesOptions; }; /** Parameters to create and update Cosmos DB MongoDB collection. */ export type MongoDBCollectionCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a MongoDB collection */ resource: MongoDBCollectionResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB Table. */ export type TableGetResults = ARMResourceProperties & { resource?: TableGetPropertiesResource; options?: TableGetPropertiesOptions; }; /** Parameters to create and update Cosmos DB Table. */ export type TableCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a Table */ resource: TableResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB Cassandra keyspace. */ export type CassandraKeyspaceGetResults = ARMResourceProperties & { resource?: CassandraKeyspaceGetPropertiesResource; options?: CassandraKeyspaceGetPropertiesOptions; }; /** Parameters to create and update Cosmos DB Cassandra keyspace. */ export type CassandraKeyspaceCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a Cassandra keyspace */ resource: CassandraKeyspaceResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB Cassandra table. */ export type CassandraTableGetResults = ARMResourceProperties & { resource?: CassandraTableGetPropertiesResource; options?: CassandraTableGetPropertiesOptions; }; /** Parameters to create and update Cosmos DB Cassandra table. */ export type CassandraTableCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a Cassandra table */ resource: CassandraTableResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB Gremlin database. */ export type GremlinDatabaseGetResults = ARMResourceProperties & { resource?: GremlinDatabaseGetPropertiesResource; options?: GremlinDatabaseGetPropertiesOptions; }; /** Parameters to create and update Cosmos DB Gremlin database. */ export type GremlinDatabaseCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a Gremlin database */ resource: GremlinDatabaseResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** An Azure Cosmos DB Gremlin graph. */ export type GremlinGraphGetResults = ARMResourceProperties & { resource?: GremlinGraphGetPropertiesResource; options?: GremlinGraphGetPropertiesOptions; }; /** Parameters to create and update Cosmos DB Gremlin graph. */ export type GremlinGraphCreateUpdateParameters = ARMResourceProperties & { /** The standard JSON format of a Gremlin graph */ resource: GremlinGraphResource; /** A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request. */ options?: CreateUpdateOptions; }; /** The access keys for the given database account. */ export type DatabaseAccountListKeysResult = DatabaseAccountListReadOnlyKeysResult & { /** * Base 64 encoded value of the primary read-write key. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly primaryMasterKey?: string; /** * Base 64 encoded value of the secondary read-write key. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly secondaryMasterKey?: string; }; /** The metric values for a single partition. */ export type PartitionMetric = Metric & { /** * The partition id (GUID identifier) of the metric values. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly partitionId?: string; /** * The partition key range id (integer identifier) of the metric values. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly partitionKeyRangeId?: string; }; /** Represents percentile metrics values. */ export type PercentileMetricValue = MetricValue & { /** * The 10th percentile value for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly p10?: number; /** * The 25th percentile value for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly p25?: number; /** * The 50th percentile value for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly p50?: number; /** * The 75th percentile value for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly p75?: number; /** * The 90th percentile value for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly p90?: number; /** * The 95th percentile value for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly p95?: number; /** * The 99th percentile value for the metric. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly p99?: number; }; /** The partition level usage data for a usage request. */ export type PartitionUsage = Usage & { /** * The partition id (GUID identifier) of the usages. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly partitionId?: string; /** * The partition key range id (integer identifier) of the usages. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly partitionKeyRangeId?: string; }; export type SqlDatabaseGetPropertiesResource = SqlDatabaseResource & ExtendedResourceProperties & { /** A system generated property that specified the addressable path of the collections resource. */ colls?: string; /** A system generated property that specifies the addressable path of the users resource. */ users?: string; }; /** Cosmos DB SQL database resource object */ export type RestorableSqlDatabasePropertiesResourceDatabase = SqlDatabaseResource & ExtendedResourceProperties & { /** * A system generated property that specified the addressable path of the collections resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly colls?: string; /** * A system generated property that specifies the addressable path of the users resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly users?: string; /** * A system generated property that specifies the addressable path of the database resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly self?: string; }; export type ThroughputSettingsGetPropertiesResource = ThroughputSettingsResource & ExtendedResourceProperties & {}; export type SqlContainerGetPropertiesResource = SqlContainerResource & ExtendedResourceProperties & {}; export type SqlStoredProcedureGetPropertiesResource = SqlStoredProcedureResource & ExtendedResourceProperties & {}; export type SqlUserDefinedFunctionGetPropertiesResource = SqlUserDefinedFunctionResource & ExtendedResourceProperties & {}; export type SqlTriggerGetPropertiesResource = SqlTriggerResource & ExtendedResourceProperties & {}; export type MongoDBDatabaseGetPropertiesResource = MongoDBDatabaseResource & ExtendedResourceProperties & {}; export type MongoDBCollectionGetPropertiesResource = MongoDBCollectionResource & ExtendedResourceProperties & {}; export type TableGetPropertiesResource = TableResource & ExtendedResourceProperties & {}; export type CassandraKeyspaceGetPropertiesResource = CassandraKeyspaceResource & ExtendedResourceProperties & {}; export type CassandraTableGetPropertiesResource = CassandraTableResource & ExtendedResourceProperties & {}; export type GremlinDatabaseGetPropertiesResource = GremlinDatabaseResource & ExtendedResourceProperties & {}; export type GremlinGraphGetPropertiesResource = GremlinGraphResource & ExtendedResourceProperties & {}; /** Cosmos DB SQL container resource object */ export type RestorableSqlContainerPropertiesResourceContainer = SqlContainerResource & ExtendedResourceProperties & { /** * A system generated property that specifies the addressable path of the container resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly self?: string; }; export type SqlDatabaseGetPropertiesOptions = OptionsResource & {}; export type SqlContainerGetPropertiesOptions = OptionsResource & {}; export type MongoDBDatabaseGetPropertiesOptions = OptionsResource & {}; export type MongoDBCollectionGetPropertiesOptions = OptionsResource & {}; export type TableGetPropertiesOptions = OptionsResource & {}; export type CassandraKeyspaceGetPropertiesOptions = OptionsResource & {}; export type CassandraTableGetPropertiesOptions = OptionsResource & {}; export type GremlinDatabaseGetPropertiesOptions = OptionsResource & {}; export type GremlinGraphGetPropertiesOptions = OptionsResource & {}; /** Cosmos DB location get result */ export type LocationGetResult = ARMProxyResource & { /** Cosmos DB location metadata */ properties?: LocationProperties; }; /** A notebook workspace resource */ export type NotebookWorkspace = ARMProxyResource & { /** * Specifies the endpoint of Notebook server. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly notebookServerEndpoint?: string; /** * Status of the notebook workspace. Possible values are: Creating, Online, Deleting, Failed, Updating. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly status?: string; }; /** Parameters to create a notebook workspace resource */ export type NotebookWorkspaceCreateUpdateParameters = ARMProxyResource & {}; /** A private link resource */ export type PrivateLinkResource = ARMProxyResource & { /** * The private link resource group id. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly groupId?: string; /** * The private link resource required member names. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly requiredMembers?: string[]; /** * The private link resource required zone names. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly requiredZoneNames?: string[]; }; /** An Azure Cosmos DB SQL Role Definition. */ export type SqlRoleDefinitionGetResults = ARMProxyResource & { /** A user-friendly name for the Role Definition. Must be unique for the database account. */ roleName?: string; /** Indicates whether the Role Definition was built-in or user created. */ typePropertiesType?: RoleDefinitionType; /** A set of fully qualified Scopes at or below which Role Assignments may be created using this Role Definition. This will allow application of this Role Definition on the entire database account or any underlying Database / Collection. Must have at least one element. Scopes higher than Database account are not enforceable as assignable Scopes. Note that resources referenced in assignable Scopes need not exist. */ assignableScopes?: string[]; /** The set of operations allowed through this Role Definition. */ permissions?: Permission[]; }; /** An Azure Cosmos DB Role Assignment */ export type SqlRoleAssignmentGetResults = ARMProxyResource & { /** The unique identifier for the associated Role Definition. */ roleDefinitionId?: string; /** The data plane resource path for which access is being granted through this Role Assignment. */ scope?: string; /** The unique identifier for the associated AAD principal in the AAD graph to which access is being granted through this Role Assignment. Tenant ID for the principal is inferred using the tenant associated with the subscription. */ principalId?: string; }; /** A managed Cassandra data center. */ export type DataCenterResource = ARMProxyResource & { /** Properties of a managed Cassandra data center. */ properties?: DataCenterResourceProperties; }; /** Representation of a managed Cassandra cluster. */ export type ClusterResource = ManagedCassandraARMResourceProperties & { /** Properties of a managed Cassandra cluster. */ properties?: ClusterResourceProperties; }; /** A private endpoint connection */ export type PrivateEndpointConnection = ProxyResource & { /** Private endpoint which the connection belongs to. */ privateEndpoint?: PrivateEndpointProperty; /** Connection State of the Private Endpoint Connection. */ privateLinkServiceConnectionState?: PrivateLinkServiceConnectionStateProperty; /** Group id of the private endpoint. */ groupId?: string; /** Provisioning state of the private endpoint. */ provisioningState?: string; }; /** Known values of {@link DatabaseAccountKind} that the service accepts. */ export enum KnownDatabaseAccountKind { GlobalDocumentDB = "GlobalDocumentDB", MongoDB = "MongoDB", Parse = "Parse" } /** * Defines values for DatabaseAccountKind. \ * {@link KnownDatabaseAccountKind} can be used interchangeably with DatabaseAccountKind, * this enum contains the known values that the service supports. * ### Known values supported by the service * **GlobalDocumentDB** \ * **MongoDB** \ * **Parse** */ export type DatabaseAccountKind = string; /** Known values of {@link ConnectorOffer} that the service accepts. */ export enum KnownConnectorOffer { Small = "Small" } /** * Defines values for ConnectorOffer. \ * {@link KnownConnectorOffer} can be used interchangeably with ConnectorOffer, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Small** */ export type ConnectorOffer = string; /** Known values of {@link PublicNetworkAccess} that the service accepts. */ export enum KnownPublicNetworkAccess { Enabled = "Enabled", Disabled = "Disabled" } /** * Defines values for PublicNetworkAccess. \ * {@link KnownPublicNetworkAccess} can be used interchangeably with PublicNetworkAccess, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Enabled** \ * **Disabled** */ export type PublicNetworkAccess = string; /** Known values of {@link ServerVersion} that the service accepts. */ export enum KnownServerVersion { Three2 = "3.2", Three6 = "3.6", Four0 = "4.0" } /** * Defines values for ServerVersion. \ * {@link KnownServerVersion} can be used interchangeably with ServerVersion, * this enum contains the known values that the service supports. * ### Known values supported by the service * **3.2** \ * **3.6** \ * **4.0** */ export type ServerVersion = string; /** Known values of {@link AnalyticalStorageSchemaType} that the service accepts. */ export enum KnownAnalyticalStorageSchemaType { WellDefined = "WellDefined", FullFidelity = "FullFidelity" } /** * Defines values for AnalyticalStorageSchemaType. \ * {@link KnownAnalyticalStorageSchemaType} can be used interchangeably with AnalyticalStorageSchemaType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **WellDefined** \ * **FullFidelity** */ export type AnalyticalStorageSchemaType = string; /** Known values of {@link CreateMode} that the service accepts. */ export enum KnownCreateMode { Default = "Default", Restore = "Restore" } /** * Defines values for CreateMode. \ * {@link KnownCreateMode} can be used interchangeably with CreateMode, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Default** \ * **Restore** */ export type CreateMode = string; /** Known values of {@link RestoreMode} that the service accepts. */ export enum KnownRestoreMode { PointInTime = "PointInTime" } /** * Defines values for RestoreMode. \ * {@link KnownRestoreMode} can be used interchangeably with RestoreMode, * this enum contains the known values that the service supports. * ### Known values supported by the service * **PointInTime** */ export type RestoreMode = string; /** Known values of {@link BackupPolicyType} that the service accepts. */ export enum KnownBackupPolicyType { Periodic = "Periodic", Continuous = "Continuous" } /** * Defines values for BackupPolicyType. \ * {@link KnownBackupPolicyType} can be used interchangeably with BackupPolicyType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Periodic** \ * **Continuous** */ export type BackupPolicyType = string; /** Known values of {@link BackupPolicyMigrationStatus} that the service accepts. */ export enum KnownBackupPolicyMigrationStatus { Invalid = "Invalid", InProgress = "InProgress", Completed = "Completed", Failed = "Failed" } /** * Defines values for BackupPolicyMigrationStatus. \ * {@link KnownBackupPolicyMigrationStatus} can be used interchangeably with BackupPolicyMigrationStatus, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Invalid** \ * **InProgress** \ * **Completed** \ * **Failed** */ export type BackupPolicyMigrationStatus = string; /** Known values of {@link CreatedByType} that the service accepts. */ export enum KnownCreatedByType { User = "User", Application = "Application", ManagedIdentity = "ManagedIdentity", Key = "Key" } /** * Defines values for CreatedByType. \ * {@link KnownCreatedByType} can be used interchangeably with CreatedByType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **User** \ * **Application** \ * **ManagedIdentity** \ * **Key** */ export type CreatedByType = string; /** Known values of {@link KeyKind} that the service accepts. */ export enum KnownKeyKind { Primary = "primary", Secondary = "secondary", PrimaryReadonly = "primaryReadonly", SecondaryReadonly = "secondaryReadonly" } /** * Defines values for KeyKind. \ * {@link KnownKeyKind} can be used interchangeably with KeyKind, * this enum contains the known values that the service supports. * ### Known values supported by the service * **primary** \ * **secondary** \ * **primaryReadonly** \ * **secondaryReadonly** */ export type KeyKind = string; /** Known values of {@link UnitType} that the service accepts. */ export enum KnownUnitType { Count = "Count", Bytes = "Bytes", Seconds = "Seconds", Percent = "Percent", CountPerSecond = "CountPerSecond", BytesPerSecond = "BytesPerSecond", Milliseconds = "Milliseconds" } /** * Defines values for UnitType. \ * {@link KnownUnitType} can be used interchangeably with UnitType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Count** \ * **Bytes** \ * **Seconds** \ * **Percent** \ * **CountPerSecond** \ * **BytesPerSecond** \ * **Milliseconds** */ export type UnitType = string; /** Known values of {@link PrimaryAggregationType} that the service accepts. */ export enum KnownPrimaryAggregationType { None = "None", Average = "Average", Total = "Total", Minimum = "Minimum", Maximum = "Maximum", Last = "Last" } /** * Defines values for PrimaryAggregationType. \ * {@link KnownPrimaryAggregationType} can be used interchangeably with PrimaryAggregationType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **None** \ * **Average** \ * **Total** \ * **Minimum** \ * **Maximum** \ * **Last** */ export type PrimaryAggregationType = string; /** Known values of {@link IndexingMode} that the service accepts. */ export enum KnownIndexingMode { Consistent = "consistent", Lazy = "lazy", None = "none" } /** * Defines values for IndexingMode. \ * {@link KnownIndexingMode} can be used interchangeably with IndexingMode, * this enum contains the known values that the service supports. * ### Known values supported by the service * **consistent** \ * **lazy** \ * **none** */ export type IndexingMode = string; /** Known values of {@link DataType} that the service accepts. */ export enum KnownDataType { String = "String", Number = "Number", Point = "Point", Polygon = "Polygon", LineString = "LineString", MultiPolygon = "MultiPolygon" } /** * Defines values for DataType. \ * {@link KnownDataType} can be used interchangeably with DataType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **String** \ * **Number** \ * **Point** \ * **Polygon** \ * **LineString** \ * **MultiPolygon** */ export type DataType = string; /** Known values of {@link IndexKind} that the service accepts. */ export enum KnownIndexKind { Hash = "Hash", Range = "Range", Spatial = "Spatial" } /** * Defines values for IndexKind. \ * {@link KnownIndexKind} can be used interchangeably with IndexKind, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Hash** \ * **Range** \ * **Spatial** */ export type IndexKind = string; /** Known values of {@link CompositePathSortOrder} that the service accepts. */ export enum KnownCompositePathSortOrder { Ascending = "ascending", Descending = "descending" } /** * Defines values for CompositePathSortOrder. \ * {@link KnownCompositePathSortOrder} can be used interchangeably with CompositePathSortOrder, * this enum contains the known values that the service supports. * ### Known values supported by the service * **ascending** \ * **descending** */ export type CompositePathSortOrder = string; /** Known values of {@link SpatialType} that the service accepts. */ export enum KnownSpatialType { Point = "Point", LineString = "LineString", Polygon = "Polygon", MultiPolygon = "MultiPolygon" } /** * Defines values for SpatialType. \ * {@link KnownSpatialType} can be used interchangeably with SpatialType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Point** \ * **LineString** \ * **Polygon** \ * **MultiPolygon** */ export type SpatialType = string; /** Known values of {@link PartitionKind} that the service accepts. */ export enum KnownPartitionKind { Hash = "Hash", Range = "Range", MultiHash = "MultiHash" } /** * Defines values for PartitionKind. \ * {@link KnownPartitionKind} can be used interchangeably with PartitionKind, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Hash** \ * **Range** \ * **MultiHash** */ export type PartitionKind = string; /** Known values of {@link ConflictResolutionMode} that the service accepts. */ export enum KnownConflictResolutionMode { LastWriterWins = "LastWriterWins", Custom = "Custom" } /** * Defines values for ConflictResolutionMode. \ * {@link KnownConflictResolutionMode} can be used interchangeably with ConflictResolutionMode, * this enum contains the known values that the service supports. * ### Known values supported by the service * **LastWriterWins** \ * **Custom** */ export type ConflictResolutionMode = string; /** Known values of {@link TriggerType} that the service accepts. */ export enum KnownTriggerType { Pre = "Pre", Post = "Post" } /** * Defines values for TriggerType. \ * {@link KnownTriggerType} can be used interchangeably with TriggerType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Pre** \ * **Post** */ export type TriggerType = string; /** Known values of {@link TriggerOperation} that the service accepts. */ export enum KnownTriggerOperation { All = "All", Create = "Create", Update = "Update", Delete = "Delete", Replace = "Replace" } /** * Defines values for TriggerOperation. \ * {@link KnownTriggerOperation} can be used interchangeably with TriggerOperation, * this enum contains the known values that the service supports. * ### Known values supported by the service * **All** \ * **Create** \ * **Update** \ * **Delete** \ * **Replace** */ export type TriggerOperation = string; /** Known values of {@link BackupStorageRedundancy} that the service accepts. */ export enum KnownBackupStorageRedundancy { Geo = "Geo", Local = "Local", Zone = "Zone" } /** * Defines values for BackupStorageRedundancy. \ * {@link KnownBackupStorageRedundancy} can be used interchangeably with BackupStorageRedundancy, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Geo** \ * **Local** \ * **Zone** */ export type BackupStorageRedundancy = string; /** Known values of {@link NotebookWorkspaceName} that the service accepts. */ export enum KnownNotebookWorkspaceName { Default = "default" } /** * Defines values for NotebookWorkspaceName. \ * {@link KnownNotebookWorkspaceName} can be used interchangeably with NotebookWorkspaceName, * this enum contains the known values that the service supports. * ### Known values supported by the service * **default** */ export type NotebookWorkspaceName = string; /** Known values of {@link ApiType} that the service accepts. */ export enum KnownApiType { MongoDB = "MongoDB", Gremlin = "Gremlin", Cassandra = "Cassandra", Table = "Table", Sql = "Sql", GremlinV2 = "GremlinV2" } /** * Defines values for ApiType. \ * {@link KnownApiType} can be used interchangeably with ApiType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **MongoDB** \ * **Gremlin** \ * **Cassandra** \ * **Table** \ * **Sql** \ * **GremlinV2** */ export type ApiType = string; /** Known values of {@link OperationType} that the service accepts. */ export enum KnownOperationType { Create = "Create", Replace = "Replace", Delete = "Delete", SystemOperation = "SystemOperation" } /** * Defines values for OperationType. \ * {@link KnownOperationType} can be used interchangeably with OperationType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Create** \ * **Replace** \ * **Delete** \ * **SystemOperation** */ export type OperationType = string; /** Known values of {@link ManagedCassandraProvisioningState} that the service accepts. */ export enum KnownManagedCassandraProvisioningState { Creating = "Creating", Updating = "Updating", Deleting = "Deleting", Succeeded = "Succeeded", Failed = "Failed", Canceled = "Canceled" } /** * Defines values for ManagedCassandraProvisioningState. \ * {@link KnownManagedCassandraProvisioningState} can be used interchangeably with ManagedCassandraProvisioningState, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Creating** \ * **Updating** \ * **Deleting** \ * **Succeeded** \ * **Failed** \ * **Canceled** */ export type ManagedCassandraProvisioningState = string; /** Known values of {@link AuthenticationMethod} that the service accepts. */ export enum KnownAuthenticationMethod { None = "None", Cassandra = "Cassandra" } /** * Defines values for AuthenticationMethod. \ * {@link KnownAuthenticationMethod} can be used interchangeably with AuthenticationMethod, * this enum contains the known values that the service supports. * ### Known values supported by the service * **None** \ * **Cassandra** */ export type AuthenticationMethod = string; /** Known values of {@link ManagedCassandraResourceIdentityType} that the service accepts. */ export enum KnownManagedCassandraResourceIdentityType { SystemAssigned = "SystemAssigned", None = "None" } /** * Defines values for ManagedCassandraResourceIdentityType. \ * {@link KnownManagedCassandraResourceIdentityType} can be used interchangeably with ManagedCassandraResourceIdentityType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **SystemAssigned** \ * **None** */ export type ManagedCassandraResourceIdentityType = string; /** Known values of {@link ConnectionState} that the service accepts. */ export enum KnownConnectionState { Unknown = "Unknown", OK = "OK", OperatorToDataCenterNetworkError = "OperatorToDataCenterNetworkError", DatacenterToDatacenterNetworkError = "DatacenterToDatacenterNetworkError", InternalOperatorToDataCenterCertificateError = "InternalOperatorToDataCenterCertificateError", InternalError = "InternalError" } /** * Defines values for ConnectionState. \ * {@link KnownConnectionState} can be used interchangeably with ConnectionState, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Unknown** \ * **OK** \ * **OperatorToDataCenterNetworkError** \ * **DatacenterToDatacenterNetworkError** \ * **InternalOperatorToDataCenterCertificateError** \ * **InternalError** */ export type ConnectionState = string; /** Known values of {@link NodeState} that the service accepts. */ export enum KnownNodeState { Normal = "Normal", Leaving = "Leaving", Joining = "Joining", Moving = "Moving", Stopped = "Stopped" } /** * Defines values for NodeState. \ * {@link KnownNodeState} can be used interchangeably with NodeState, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Normal** \ * **Leaving** \ * **Joining** \ * **Moving** \ * **Stopped** */ export type NodeState = string; /** Known values of {@link NodeStatus} that the service accepts. */ export enum KnownNodeStatus { Up = "Up", Down = "Down" } /** * Defines values for NodeStatus. \ * {@link KnownNodeStatus} can be used interchangeably with NodeStatus, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Up** \ * **Down** */ export type NodeStatus = string; /** Defines values for ResourceIdentityType. */ export type ResourceIdentityType = | "SystemAssigned" | "UserAssigned" | "SystemAssigned,UserAssigned" | "None"; /** Defines values for DefaultConsistencyLevel. */ export type DefaultConsistencyLevel = | "Eventual" | "Session" | "BoundedStaleness" | "Strong" | "ConsistentPrefix"; /** Defines values for NetworkAclBypass. */ export type NetworkAclBypass = "None" | "AzureServices"; /** Defines values for RoleDefinitionType. */ export type RoleDefinitionType = "BuiltInRole" | "CustomRole"; /** Optional parameters. */ export interface DatabaseAccountsGetOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the get operation. */ export type DatabaseAccountsGetResponse = DatabaseAccountGetResults; /** Optional parameters. */ export interface DatabaseAccountsUpdateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the update operation. */ export type DatabaseAccountsUpdateResponse = DatabaseAccountGetResults; /** Optional parameters. */ export interface DatabaseAccountsCreateOrUpdateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createOrUpdate operation. */ export type DatabaseAccountsCreateOrUpdateResponse = DatabaseAccountGetResults; /** Optional parameters. */ export interface DatabaseAccountsDeleteOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface DatabaseAccountsFailoverPriorityChangeOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface DatabaseAccountsListOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the list operation. */ export type DatabaseAccountsListResponse = DatabaseAccountsListResult; /** Optional parameters. */ export interface DatabaseAccountsListByResourceGroupOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listByResourceGroup operation. */ export type DatabaseAccountsListByResourceGroupResponse = DatabaseAccountsListResult; /** Optional parameters. */ export interface DatabaseAccountsListKeysOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listKeys operation. */ export type DatabaseAccountsListKeysResponse = DatabaseAccountListKeysResult; /** Optional parameters. */ export interface DatabaseAccountsListConnectionStringsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listConnectionStrings operation. */ export type DatabaseAccountsListConnectionStringsResponse = DatabaseAccountListConnectionStringsResult; /** Optional parameters. */ export interface DatabaseAccountsOfflineRegionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface DatabaseAccountsOnlineRegionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface DatabaseAccountsGetReadOnlyKeysOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getReadOnlyKeys operation. */ export type DatabaseAccountsGetReadOnlyKeysResponse = DatabaseAccountListReadOnlyKeysResult; /** Optional parameters. */ export interface DatabaseAccountsListReadOnlyKeysOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listReadOnlyKeys operation. */ export type DatabaseAccountsListReadOnlyKeysResponse = DatabaseAccountListReadOnlyKeysResult; /** Optional parameters. */ export interface DatabaseAccountsRegenerateKeyOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface DatabaseAccountsCheckNameExistsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the checkNameExists operation. */ export type DatabaseAccountsCheckNameExistsResponse = { body: boolean; }; /** Optional parameters. */ export interface DatabaseAccountsListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type DatabaseAccountsListMetricsResponse = MetricListResult; /** Optional parameters. */ export interface DatabaseAccountsListUsagesOptionalParams extends coreClient.OperationOptions { /** An OData filter expression that describes a subset of usages to return. The supported parameter is name.value (name of the metric, can have an or of multiple names). */ filter?: string; } /** Contains response data for the listUsages operation. */ export type DatabaseAccountsListUsagesResponse = UsagesResult; /** Optional parameters. */ export interface DatabaseAccountsListMetricDefinitionsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetricDefinitions operation. */ export type DatabaseAccountsListMetricDefinitionsResponse = MetricDefinitionsListResult; /** Optional parameters. */ export interface OperationsListOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the list operation. */ export type OperationsListResponse = OperationListResult; /** Optional parameters. */ export interface OperationsListNextOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listNext operation. */ export type OperationsListNextResponse = OperationListResult; /** Optional parameters. */ export interface DatabaseListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type DatabaseListMetricsResponse = MetricListResult; /** Optional parameters. */ export interface DatabaseListUsagesOptionalParams extends coreClient.OperationOptions { /** An OData filter expression that describes a subset of usages to return. The supported parameter is name.value (name of the metric, can have an or of multiple names). */ filter?: string; } /** Contains response data for the listUsages operation. */ export type DatabaseListUsagesResponse = UsagesResult; /** Optional parameters. */ export interface DatabaseListMetricDefinitionsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetricDefinitions operation. */ export type DatabaseListMetricDefinitionsResponse = MetricDefinitionsListResult; /** Optional parameters. */ export interface CollectionListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type CollectionListMetricsResponse = MetricListResult; /** Optional parameters. */ export interface CollectionListUsagesOptionalParams extends coreClient.OperationOptions { /** An OData filter expression that describes a subset of usages to return. The supported parameter is name.value (name of the metric, can have an or of multiple names). */ filter?: string; } /** Contains response data for the listUsages operation. */ export type CollectionListUsagesResponse = UsagesResult; /** Optional parameters. */ export interface CollectionListMetricDefinitionsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetricDefinitions operation. */ export type CollectionListMetricDefinitionsResponse = MetricDefinitionsListResult; /** Optional parameters. */ export interface CollectionRegionListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type CollectionRegionListMetricsResponse = MetricListResult; /** Optional parameters. */ export interface DatabaseAccountRegionListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type DatabaseAccountRegionListMetricsResponse = MetricListResult; /** Optional parameters. */ export interface PercentileSourceTargetListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type PercentileSourceTargetListMetricsResponse = PercentileMetricListResult; /** Optional parameters. */ export interface PercentileTargetListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type PercentileTargetListMetricsResponse = PercentileMetricListResult; /** Optional parameters. */ export interface PercentileListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type PercentileListMetricsResponse = PercentileMetricListResult; /** Optional parameters. */ export interface CollectionPartitionRegionListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type CollectionPartitionRegionListMetricsResponse = PartitionMetricListResult; /** Optional parameters. */ export interface CollectionPartitionListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type CollectionPartitionListMetricsResponse = PartitionMetricListResult; /** Optional parameters. */ export interface CollectionPartitionListUsagesOptionalParams extends coreClient.OperationOptions { /** An OData filter expression that describes a subset of usages to return. The supported parameter is name.value (name of the metric, can have an or of multiple names). */ filter?: string; } /** Contains response data for the listUsages operation. */ export type CollectionPartitionListUsagesResponse = PartitionUsagesResult; /** Optional parameters. */ export interface PartitionKeyRangeIdListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type PartitionKeyRangeIdListMetricsResponse = PartitionMetricListResult; /** Optional parameters. */ export interface PartitionKeyRangeIdRegionListMetricsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMetrics operation. */ export type PartitionKeyRangeIdRegionListMetricsResponse = PartitionMetricListResult; /** Optional parameters. */ export interface SqlResourcesListSqlDatabasesOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listSqlDatabases operation. */ export type SqlResourcesListSqlDatabasesResponse = SqlDatabaseListResult; /** Optional parameters. */ export interface SqlResourcesGetSqlDatabaseOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSqlDatabase operation. */ export type SqlResourcesGetSqlDatabaseResponse = SqlDatabaseGetResults; /** Optional parameters. */ export interface SqlResourcesCreateUpdateSqlDatabaseOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateSqlDatabase operation. */ export type SqlResourcesCreateUpdateSqlDatabaseResponse = SqlDatabaseGetResults; /** Optional parameters. */ export interface SqlResourcesDeleteSqlDatabaseOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface SqlResourcesGetSqlDatabaseThroughputOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSqlDatabaseThroughput operation. */ export type SqlResourcesGetSqlDatabaseThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface SqlResourcesUpdateSqlDatabaseThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the updateSqlDatabaseThroughput operation. */ export type SqlResourcesUpdateSqlDatabaseThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface SqlResourcesMigrateSqlDatabaseToAutoscaleOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateSqlDatabaseToAutoscale operation. */ export type SqlResourcesMigrateSqlDatabaseToAutoscaleResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface SqlResourcesMigrateSqlDatabaseToManualThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateSqlDatabaseToManualThroughput operation. */ export type SqlResourcesMigrateSqlDatabaseToManualThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface SqlResourcesListSqlContainersOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listSqlContainers operation. */ export type SqlResourcesListSqlContainersResponse = SqlContainerListResult; /** Optional parameters. */ export interface SqlResourcesGetSqlContainerOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSqlContainer operation. */ export type SqlResourcesGetSqlContainerResponse = SqlContainerGetResults; /** Optional parameters. */ export interface SqlResourcesCreateUpdateSqlContainerOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateSqlContainer operation. */ export type SqlResourcesCreateUpdateSqlContainerResponse = SqlContainerGetResults; /** Optional parameters. */ export interface SqlResourcesDeleteSqlContainerOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface SqlResourcesGetSqlContainerThroughputOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSqlContainerThroughput operation. */ export type SqlResourcesGetSqlContainerThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface SqlResourcesUpdateSqlContainerThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the updateSqlContainerThroughput operation. */ export type SqlResourcesUpdateSqlContainerThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface SqlResourcesMigrateSqlContainerToAutoscaleOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateSqlContainerToAutoscale operation. */ export type SqlResourcesMigrateSqlContainerToAutoscaleResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface SqlResourcesMigrateSqlContainerToManualThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateSqlContainerToManualThroughput operation. */ export type SqlResourcesMigrateSqlContainerToManualThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface SqlResourcesListSqlStoredProceduresOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listSqlStoredProcedures operation. */ export type SqlResourcesListSqlStoredProceduresResponse = SqlStoredProcedureListResult; /** Optional parameters. */ export interface SqlResourcesGetSqlStoredProcedureOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSqlStoredProcedure operation. */ export type SqlResourcesGetSqlStoredProcedureResponse = SqlStoredProcedureGetResults; /** Optional parameters. */ export interface SqlResourcesCreateUpdateSqlStoredProcedureOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateSqlStoredProcedure operation. */ export type SqlResourcesCreateUpdateSqlStoredProcedureResponse = SqlStoredProcedureGetResults; /** Optional parameters. */ export interface SqlResourcesDeleteSqlStoredProcedureOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface SqlResourcesListSqlUserDefinedFunctionsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listSqlUserDefinedFunctions operation. */ export type SqlResourcesListSqlUserDefinedFunctionsResponse = SqlUserDefinedFunctionListResult; /** Optional parameters. */ export interface SqlResourcesGetSqlUserDefinedFunctionOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSqlUserDefinedFunction operation. */ export type SqlResourcesGetSqlUserDefinedFunctionResponse = SqlUserDefinedFunctionGetResults; /** Optional parameters. */ export interface SqlResourcesCreateUpdateSqlUserDefinedFunctionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateSqlUserDefinedFunction operation. */ export type SqlResourcesCreateUpdateSqlUserDefinedFunctionResponse = SqlUserDefinedFunctionGetResults; /** Optional parameters. */ export interface SqlResourcesDeleteSqlUserDefinedFunctionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface SqlResourcesListSqlTriggersOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listSqlTriggers operation. */ export type SqlResourcesListSqlTriggersResponse = SqlTriggerListResult; /** Optional parameters. */ export interface SqlResourcesGetSqlTriggerOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSqlTrigger operation. */ export type SqlResourcesGetSqlTriggerResponse = SqlTriggerGetResults; /** Optional parameters. */ export interface SqlResourcesCreateUpdateSqlTriggerOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateSqlTrigger operation. */ export type SqlResourcesCreateUpdateSqlTriggerResponse = SqlTriggerGetResults; /** Optional parameters. */ export interface SqlResourcesDeleteSqlTriggerOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface SqlResourcesGetSqlRoleDefinitionOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSqlRoleDefinition operation. */ export type SqlResourcesGetSqlRoleDefinitionResponse = SqlRoleDefinitionGetResults; /** Optional parameters. */ export interface SqlResourcesCreateUpdateSqlRoleDefinitionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateSqlRoleDefinition operation. */ export type SqlResourcesCreateUpdateSqlRoleDefinitionResponse = SqlRoleDefinitionGetResults; /** Optional parameters. */ export interface SqlResourcesDeleteSqlRoleDefinitionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface SqlResourcesListSqlRoleDefinitionsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listSqlRoleDefinitions operation. */ export type SqlResourcesListSqlRoleDefinitionsResponse = SqlRoleDefinitionListResult; /** Optional parameters. */ export interface SqlResourcesGetSqlRoleAssignmentOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSqlRoleAssignment operation. */ export type SqlResourcesGetSqlRoleAssignmentResponse = SqlRoleAssignmentGetResults; /** Optional parameters. */ export interface SqlResourcesCreateUpdateSqlRoleAssignmentOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateSqlRoleAssignment operation. */ export type SqlResourcesCreateUpdateSqlRoleAssignmentResponse = SqlRoleAssignmentGetResults; /** Optional parameters. */ export interface SqlResourcesDeleteSqlRoleAssignmentOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface SqlResourcesListSqlRoleAssignmentsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listSqlRoleAssignments operation. */ export type SqlResourcesListSqlRoleAssignmentsResponse = SqlRoleAssignmentListResult; /** Optional parameters. */ export interface SqlResourcesRetrieveContinuousBackupInformationOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the retrieveContinuousBackupInformation operation. */ export type SqlResourcesRetrieveContinuousBackupInformationResponse = BackupInformation; /** Optional parameters. */ export interface MongoDBResourcesListMongoDBDatabasesOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMongoDBDatabases operation. */ export type MongoDBResourcesListMongoDBDatabasesResponse = MongoDBDatabaseListResult; /** Optional parameters. */ export interface MongoDBResourcesGetMongoDBDatabaseOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getMongoDBDatabase operation. */ export type MongoDBResourcesGetMongoDBDatabaseResponse = MongoDBDatabaseGetResults; /** Optional parameters. */ export interface MongoDBResourcesCreateUpdateMongoDBDatabaseOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateMongoDBDatabase operation. */ export type MongoDBResourcesCreateUpdateMongoDBDatabaseResponse = MongoDBDatabaseGetResults; /** Optional parameters. */ export interface MongoDBResourcesDeleteMongoDBDatabaseOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface MongoDBResourcesGetMongoDBDatabaseThroughputOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getMongoDBDatabaseThroughput operation. */ export type MongoDBResourcesGetMongoDBDatabaseThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface MongoDBResourcesUpdateMongoDBDatabaseThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the updateMongoDBDatabaseThroughput operation. */ export type MongoDBResourcesUpdateMongoDBDatabaseThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface MongoDBResourcesMigrateMongoDBDatabaseToAutoscaleOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateMongoDBDatabaseToAutoscale operation. */ export type MongoDBResourcesMigrateMongoDBDatabaseToAutoscaleResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface MongoDBResourcesMigrateMongoDBDatabaseToManualThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateMongoDBDatabaseToManualThroughput operation. */ export type MongoDBResourcesMigrateMongoDBDatabaseToManualThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface MongoDBResourcesListMongoDBCollectionsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listMongoDBCollections operation. */ export type MongoDBResourcesListMongoDBCollectionsResponse = MongoDBCollectionListResult; /** Optional parameters. */ export interface MongoDBResourcesGetMongoDBCollectionOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getMongoDBCollection operation. */ export type MongoDBResourcesGetMongoDBCollectionResponse = MongoDBCollectionGetResults; /** Optional parameters. */ export interface MongoDBResourcesCreateUpdateMongoDBCollectionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateMongoDBCollection operation. */ export type MongoDBResourcesCreateUpdateMongoDBCollectionResponse = MongoDBCollectionGetResults; /** Optional parameters. */ export interface MongoDBResourcesDeleteMongoDBCollectionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface MongoDBResourcesGetMongoDBCollectionThroughputOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getMongoDBCollectionThroughput operation. */ export type MongoDBResourcesGetMongoDBCollectionThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface MongoDBResourcesUpdateMongoDBCollectionThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the updateMongoDBCollectionThroughput operation. */ export type MongoDBResourcesUpdateMongoDBCollectionThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface MongoDBResourcesMigrateMongoDBCollectionToAutoscaleOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateMongoDBCollectionToAutoscale operation. */ export type MongoDBResourcesMigrateMongoDBCollectionToAutoscaleResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface MongoDBResourcesMigrateMongoDBCollectionToManualThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateMongoDBCollectionToManualThroughput operation. */ export type MongoDBResourcesMigrateMongoDBCollectionToManualThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface MongoDBResourcesRetrieveContinuousBackupInformationOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the retrieveContinuousBackupInformation operation. */ export type MongoDBResourcesRetrieveContinuousBackupInformationResponse = BackupInformation; /** Optional parameters. */ export interface TableResourcesListTablesOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listTables operation. */ export type TableResourcesListTablesResponse = TableListResult; /** Optional parameters. */ export interface TableResourcesGetTableOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getTable operation. */ export type TableResourcesGetTableResponse = TableGetResults; /** Optional parameters. */ export interface TableResourcesCreateUpdateTableOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateTable operation. */ export type TableResourcesCreateUpdateTableResponse = TableGetResults; /** Optional parameters. */ export interface TableResourcesDeleteTableOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface TableResourcesGetTableThroughputOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getTableThroughput operation. */ export type TableResourcesGetTableThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface TableResourcesUpdateTableThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the updateTableThroughput operation. */ export type TableResourcesUpdateTableThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface TableResourcesMigrateTableToAutoscaleOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateTableToAutoscale operation. */ export type TableResourcesMigrateTableToAutoscaleResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface TableResourcesMigrateTableToManualThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateTableToManualThroughput operation. */ export type TableResourcesMigrateTableToManualThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface CassandraResourcesListCassandraKeyspacesOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listCassandraKeyspaces operation. */ export type CassandraResourcesListCassandraKeyspacesResponse = CassandraKeyspaceListResult; /** Optional parameters. */ export interface CassandraResourcesGetCassandraKeyspaceOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getCassandraKeyspace operation. */ export type CassandraResourcesGetCassandraKeyspaceResponse = CassandraKeyspaceGetResults; /** Optional parameters. */ export interface CassandraResourcesCreateUpdateCassandraKeyspaceOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateCassandraKeyspace operation. */ export type CassandraResourcesCreateUpdateCassandraKeyspaceResponse = CassandraKeyspaceGetResults; /** Optional parameters. */ export interface CassandraResourcesDeleteCassandraKeyspaceOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface CassandraResourcesGetCassandraKeyspaceThroughputOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getCassandraKeyspaceThroughput operation. */ export type CassandraResourcesGetCassandraKeyspaceThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface CassandraResourcesUpdateCassandraKeyspaceThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the updateCassandraKeyspaceThroughput operation. */ export type CassandraResourcesUpdateCassandraKeyspaceThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface CassandraResourcesMigrateCassandraKeyspaceToAutoscaleOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateCassandraKeyspaceToAutoscale operation. */ export type CassandraResourcesMigrateCassandraKeyspaceToAutoscaleResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface CassandraResourcesMigrateCassandraKeyspaceToManualThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateCassandraKeyspaceToManualThroughput operation. */ export type CassandraResourcesMigrateCassandraKeyspaceToManualThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface CassandraResourcesListCassandraTablesOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listCassandraTables operation. */ export type CassandraResourcesListCassandraTablesResponse = CassandraTableListResult; /** Optional parameters. */ export interface CassandraResourcesGetCassandraTableOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getCassandraTable operation. */ export type CassandraResourcesGetCassandraTableResponse = CassandraTableGetResults; /** Optional parameters. */ export interface CassandraResourcesCreateUpdateCassandraTableOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateCassandraTable operation. */ export type CassandraResourcesCreateUpdateCassandraTableResponse = CassandraTableGetResults; /** Optional parameters. */ export interface CassandraResourcesDeleteCassandraTableOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface CassandraResourcesGetCassandraTableThroughputOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getCassandraTableThroughput operation. */ export type CassandraResourcesGetCassandraTableThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface CassandraResourcesUpdateCassandraTableThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the updateCassandraTableThroughput operation. */ export type CassandraResourcesUpdateCassandraTableThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface CassandraResourcesMigrateCassandraTableToAutoscaleOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateCassandraTableToAutoscale operation. */ export type CassandraResourcesMigrateCassandraTableToAutoscaleResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface CassandraResourcesMigrateCassandraTableToManualThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateCassandraTableToManualThroughput operation. */ export type CassandraResourcesMigrateCassandraTableToManualThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface GremlinResourcesListGremlinDatabasesOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listGremlinDatabases operation. */ export type GremlinResourcesListGremlinDatabasesResponse = GremlinDatabaseListResult; /** Optional parameters. */ export interface GremlinResourcesGetGremlinDatabaseOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getGremlinDatabase operation. */ export type GremlinResourcesGetGremlinDatabaseResponse = GremlinDatabaseGetResults; /** Optional parameters. */ export interface GremlinResourcesCreateUpdateGremlinDatabaseOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateGremlinDatabase operation. */ export type GremlinResourcesCreateUpdateGremlinDatabaseResponse = GremlinDatabaseGetResults; /** Optional parameters. */ export interface GremlinResourcesDeleteGremlinDatabaseOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface GremlinResourcesGetGremlinDatabaseThroughputOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getGremlinDatabaseThroughput operation. */ export type GremlinResourcesGetGremlinDatabaseThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface GremlinResourcesUpdateGremlinDatabaseThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the updateGremlinDatabaseThroughput operation. */ export type GremlinResourcesUpdateGremlinDatabaseThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface GremlinResourcesMigrateGremlinDatabaseToAutoscaleOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateGremlinDatabaseToAutoscale operation. */ export type GremlinResourcesMigrateGremlinDatabaseToAutoscaleResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface GremlinResourcesMigrateGremlinDatabaseToManualThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateGremlinDatabaseToManualThroughput operation. */ export type GremlinResourcesMigrateGremlinDatabaseToManualThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface GremlinResourcesListGremlinGraphsOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listGremlinGraphs operation. */ export type GremlinResourcesListGremlinGraphsResponse = GremlinGraphListResult; /** Optional parameters. */ export interface GremlinResourcesGetGremlinGraphOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getGremlinGraph operation. */ export type GremlinResourcesGetGremlinGraphResponse = GremlinGraphGetResults; /** Optional parameters. */ export interface GremlinResourcesCreateUpdateGremlinGraphOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdateGremlinGraph operation. */ export type GremlinResourcesCreateUpdateGremlinGraphResponse = GremlinGraphGetResults; /** Optional parameters. */ export interface GremlinResourcesDeleteGremlinGraphOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface GremlinResourcesGetGremlinGraphThroughputOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getGremlinGraphThroughput operation. */ export type GremlinResourcesGetGremlinGraphThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface GremlinResourcesUpdateGremlinGraphThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the updateGremlinGraphThroughput operation. */ export type GremlinResourcesUpdateGremlinGraphThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface GremlinResourcesMigrateGremlinGraphToAutoscaleOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateGremlinGraphToAutoscale operation. */ export type GremlinResourcesMigrateGremlinGraphToAutoscaleResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface GremlinResourcesMigrateGremlinGraphToManualThroughputOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the migrateGremlinGraphToManualThroughput operation. */ export type GremlinResourcesMigrateGremlinGraphToManualThroughputResponse = ThroughputSettingsGetResults; /** Optional parameters. */ export interface LocationsListOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the list operation. */ export type LocationsListResponse = LocationListResult; /** Optional parameters. */ export interface LocationsGetOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the get operation. */ export type LocationsGetResponse = LocationGetResult; /** Optional parameters. */ export interface NotebookWorkspacesListByDatabaseAccountOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listByDatabaseAccount operation. */ export type NotebookWorkspacesListByDatabaseAccountResponse = NotebookWorkspaceListResult; /** Optional parameters. */ export interface NotebookWorkspacesGetOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the get operation. */ export type NotebookWorkspacesGetResponse = NotebookWorkspace; /** Optional parameters. */ export interface NotebookWorkspacesCreateOrUpdateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createOrUpdate operation. */ export type NotebookWorkspacesCreateOrUpdateResponse = NotebookWorkspace; /** Optional parameters. */ export interface NotebookWorkspacesDeleteOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface NotebookWorkspacesListConnectionInfoOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listConnectionInfo operation. */ export type NotebookWorkspacesListConnectionInfoResponse = NotebookWorkspaceConnectionInfoResult; /** Optional parameters. */ export interface NotebookWorkspacesRegenerateAuthTokenOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface NotebookWorkspacesStartOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface PrivateEndpointConnectionsListByDatabaseAccountOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listByDatabaseAccount operation. */ export type PrivateEndpointConnectionsListByDatabaseAccountResponse = PrivateEndpointConnectionListResult; /** Optional parameters. */ export interface PrivateEndpointConnectionsGetOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the get operation. */ export type PrivateEndpointConnectionsGetResponse = PrivateEndpointConnection; /** Optional parameters. */ export interface PrivateEndpointConnectionsCreateOrUpdateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createOrUpdate operation. */ export type PrivateEndpointConnectionsCreateOrUpdateResponse = PrivateEndpointConnection; /** Optional parameters. */ export interface PrivateEndpointConnectionsDeleteOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface PrivateLinkResourcesListByDatabaseAccountOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listByDatabaseAccount operation. */ export type PrivateLinkResourcesListByDatabaseAccountResponse = PrivateLinkResourceListResult; /** Optional parameters. */ export interface PrivateLinkResourcesGetOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the get operation. */ export type PrivateLinkResourcesGetResponse = PrivateLinkResource; /** Optional parameters. */ export interface RestorableDatabaseAccountsListByLocationOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listByLocation operation. */ export type RestorableDatabaseAccountsListByLocationResponse = RestorableDatabaseAccountsListResult; /** Optional parameters. */ export interface RestorableDatabaseAccountsListOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the list operation. */ export type RestorableDatabaseAccountsListResponse = RestorableDatabaseAccountsListResult; /** Optional parameters. */ export interface RestorableDatabaseAccountsGetByLocationOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getByLocation operation. */ export type RestorableDatabaseAccountsGetByLocationResponse = RestorableDatabaseAccountGetResult; /** Optional parameters. */ export interface RestorableSqlDatabasesListOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the list operation. */ export type RestorableSqlDatabasesListResponse = RestorableSqlDatabasesListResult; /** Optional parameters. */ export interface RestorableSqlContainersListOptionalParams extends coreClient.OperationOptions { /** The resource ID of the SQL database. */ restorableSqlDatabaseRid?: string; /** The snapshot create timestamp after which snapshots need to be listed. */ startTime?: string; /** The snapshot create timestamp before which snapshots need to be listed. */ endTime?: string; } /** Contains response data for the list operation. */ export type RestorableSqlContainersListResponse = RestorableSqlContainersListResult; /** Optional parameters. */ export interface RestorableSqlResourcesListOptionalParams extends coreClient.OperationOptions { /** The location where the restorable resources are located. */ restoreLocation?: string; /** The timestamp when the restorable resources existed. */ restoreTimestampInUtc?: string; } /** Contains response data for the list operation. */ export type RestorableSqlResourcesListResponse = RestorableSqlResourcesListResult; /** Optional parameters. */ export interface RestorableMongodbDatabasesListOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the list operation. */ export type RestorableMongodbDatabasesListResponse = RestorableMongodbDatabasesListResult; /** Optional parameters. */ export interface RestorableMongodbCollectionsListOptionalParams extends coreClient.OperationOptions { /** The resource ID of the MongoDB database. */ restorableMongodbDatabaseRid?: string; } /** Contains response data for the list operation. */ export type RestorableMongodbCollectionsListResponse = RestorableMongodbCollectionsListResult; /** Optional parameters. */ export interface RestorableMongodbResourcesListOptionalParams extends coreClient.OperationOptions { /** The location where the restorable resources are located. */ restoreLocation?: string; /** The timestamp when the restorable resources existed. */ restoreTimestampInUtc?: string; } /** Contains response data for the list operation. */ export type RestorableMongodbResourcesListResponse = RestorableMongodbResourcesListResult; /** Optional parameters. */ export interface CassandraClustersListBySubscriptionOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listBySubscription operation. */ export type CassandraClustersListBySubscriptionResponse = ListClusters; /** Optional parameters. */ export interface CassandraClustersListByResourceGroupOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listByResourceGroup operation. */ export type CassandraClustersListByResourceGroupResponse = ListClusters; /** Optional parameters. */ export interface CassandraClustersGetOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the get operation. */ export type CassandraClustersGetResponse = ClusterResource; /** Optional parameters. */ export interface CassandraClustersDeleteOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface CassandraClustersCreateUpdateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdate operation. */ export type CassandraClustersCreateUpdateResponse = ClusterResource; /** Optional parameters. */ export interface CassandraClustersUpdateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the update operation. */ export type CassandraClustersUpdateResponse = ClusterResource; /** Optional parameters. */ export interface CassandraClustersInvokeCommandOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the invokeCommand operation. */ export type CassandraClustersInvokeCommandResponse = CommandOutput; /** Optional parameters. */ export interface CassandraClustersDeallocateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface CassandraClustersStartOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface CassandraClustersStatusOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the status operation. */ export type CassandraClustersStatusResponse = CassandraClusterPublicStatus; /** Optional parameters. */ export interface CassandraDataCentersListOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the list operation. */ export type CassandraDataCentersListResponse = ListDataCenters; /** Optional parameters. */ export interface CassandraDataCentersGetOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the get operation. */ export type CassandraDataCentersGetResponse = DataCenterResource; /** Optional parameters. */ export interface CassandraDataCentersDeleteOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Optional parameters. */ export interface CassandraDataCentersCreateUpdateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createUpdate operation. */ export type CassandraDataCentersCreateUpdateResponse = DataCenterResource; /** Optional parameters. */ export interface CassandraDataCentersUpdateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the update operation. */ export type CassandraDataCentersUpdateResponse = DataCenterResource; /** Optional parameters. */ export interface CosmosDBManagementClientOptionalParams extends coreClient.ServiceClientOptions { /** server parameter */ $host?: string; /** Api Version */ apiVersion?: string; /** Overrides client endpoint. */ endpoint?: string; }
the_stack
module TDev { export class ClickHandler { public begX = 0; public begY = 0; private cleared = false; private version = 0; public mouseCaptureOverlay:HTMLElement = document.body; constructor(public helt:HTMLElement, public f:(e:Event)=>void) { this.setup(); } public clickBegin(pos:any) { this.helt.setFlag("active", true); this.begX = pos.pageX; this.begY = pos.pageY; this.cleared = false; this.setupVersion(); } public setupVersion() { var v = ++this.version; Util.setTimeout(300, () => { if (!this.cleared && v == this.version) this.clear(); }); } public onMove(e:any) { if (Math.abs(e.pageX - this.begX) > 10 || Math.abs(e.pageY - this.begY) > 10) this.clear(); } private getPos(e:MSPointerEvent) { // e.preventMouseEvent(); // var pp = Util.offsetIn(e.target, TDev.elt("root")); if (!e.getPointerList) return { pageX: e.clientX, pageY: e.clientY }; var pt = e.getPointerList()[0]; return { pageX: pt.clientX, pageY: pt.clientY }; } public skipIt(e:Event) { var targ = <HTMLElement>e.target; if (e.target != e.currentTarget && (targ.nodeName == "INPUT" || targ.nodeName == "TEXTAREA")) return true; return false; } private isCanceled(e:Event) { if (this.skipIt(e)) return true; if ((<any>e).utilClickCancel) return true; (<any>e).utilClickCancel = true; return false; } public getPosition(element:HTMLElement) { var xPosition = 0; var yPosition = 0; while (element) { xPosition += (element.offsetLeft - element.scrollLeft + element.clientLeft); yPosition += (element.offsetTop - element.scrollTop + element.clientTop); element = <HTMLElement>element.offsetParent; } return { x: xPosition, y: yPosition }; } public handleEvent(e:Event) { try { var ep = <any>e; if (Util.mouseLogging) { Util.log("executing click handler " + e.type + " on " + this.helt.id); Util.log("pageX:{0} pageY:{1} offsetX:{2} offsetY:{3} clientX:{4} clientY:{5}", ep.pageX, ep.pageY, ep.offsetX, ep.offsetY, ep.clientX, ep.clientY); } if (ep.touches && ep.touches[0]) { ep = ep.touches[0]; } else if (ep.touches && ep.touches.item(0)) { ep = ep.touches.item(0); } else if (ep.changedTouches && ep.changedTouches.item(0)) { ep = ep.changedTouches.item(0); } if (Util.mouseLogging) { Util.log("after touch adjustment"); var pos = this.getPosition(this.helt); Util.log("pageX:{0} pageY:{1} offsetX:{2} offsetY:{3} clientX:{4} clientY:{5} eltX:{6} eltY:{7}", ep.pageX, ep.pageY, ep.offsetX, ep.offsetY, ep.clientX, ep.clientY, pos.x, pos.y); } switch (e.type) { /* case "MSGestureTap": var ge = <MSGestureEvent>e; ge.preventMouseEvent(); var pp = Util.offsetIn(e.target, TDev.elt("root")); begX = ge.offsetX + pp.x; begX = ge.offsetY + pp.y; fireClick(e); break; */ case "pointerdown": if (this.isCanceled(e)) break; this.prepareMouseOverlay(); this.clickBegin(this.getPos(<MSPointerEvent>e)); this.mouseCaptureOverlay.addEventListener("pointermove", <any>this, false); this.mouseCaptureOverlay.addEventListener("pointerup", <any>this, false); break; case "MSPointerDown": if (this.isCanceled(e)) break; this.prepareMouseOverlay(); this.clickBegin(this.getPos(<MSPointerEvent>e)); this.mouseCaptureOverlay.addEventListener("MSPointerMove", <any>this, false); this.mouseCaptureOverlay.addEventListener("MSPointerUp", <any>this, false); break; case "pointermove": case "MSPointerMove": if (!this.cleared) this.onMove(this.getPos(<MSPointerEvent>e)); break; case "pointerup": case "MSPointerUp": if (!this.cleared) this.fireClick(e); break; case "touchstart": if (this.isCanceled(e)) break; // e.stopPropagation(); this.helt.addEventListener("touchend", <any>this, false); document.body.addEventListener("touchmove", <any>this, false); document.body.addEventListener("touchend", <any>this, false); this.clickBegin(ep); break; case "touchmove": if (!this.cleared) this.onMove(ep); break; case "touchend": if (!this.cleared) this.fireClick(ep); break; case "mousedown": if (this.isCanceled(e)) break; // e.stopPropagation(); if ((<MouseEvent> e).button != 0) break; this.helt.hideFocus = true; this.helt.addEventListener("mouseup", <any>this, false); document.addEventListener("mouseup", <any>this, false); document.body.addEventListener("mousemove", <any>this, false); this.clickBegin(e); break; case "mousemove": if (!this.cleared) this.onMove(e); break; case "mouseup": if (!this.cleared) this.fireClick(e); break; case "keypress": var ke = <KeyboardEvent>e; if (ke.target == this.helt && (ke.which == 13 || ke.which == 32)) { this.f(e); } break; /* case "click": e.stopPropagation(); clear(); f(e); break; */ } } catch (err) { Util.reportError("clickHandler", err); } } public fireClick(e:Event) { this.clear(); var canc = (<any> e).clickCancelled; (<any> e).clickCancelled = true; if (!canc) { (<any>e).pgX = this.begX; (<any>e).pgY = this.begY; this.f(e); } } public clear() { this.hideMouseOverlay(); this.cleared = true; this.hideMouseOverlay(); this.helt.removeEventListener("touchend", <any>this, false); this.helt.removeEventListener("mouseup", <any>this, false); this.helt.setFlag("active", false); document.body.removeEventListener("touchmove", <any>this, false); document.removeEventListener("mouseup", <any>this, false); document.body.removeEventListener("mouseup", <any>this, false); document.body.removeEventListener("mousemove", <any>this, false); document.body.removeEventListener("touchend", <any>this, false); this.mouseCaptureOverlay.removeEventListener("MSPointerMove", <any>this, false); this.mouseCaptureOverlay.removeEventListener("MSPointerUp", <any>this, false); this.mouseCaptureOverlay.removeEventListener("pointermove", <any>this, false); this.mouseCaptureOverlay.removeEventListener("pointerup", <any>this, false); } public prepareMouseOverlay() { } public hideMouseOverlay() { } private setup() { if (window.navigator.pointerEnabled) this.helt.addEventListener("pointerdown", <any>this, false); else if (window.navigator.msPointerEnabled) // this guy triggers just once for double tap, and in general seems to have delay // e.addEventListener("MSGestureTap", self, false); this.helt.addEventListener("MSPointerDown", <any>this, false); else if (Browser.touchStart) this.helt.addEventListener("touchstart", <any>this, false); else this.helt.addEventListener("mousedown", <any>this, false); this.helt.addEventListener("keypress", <any>this, true); } public unhook() { this.helt.removeEventListener("MSPointerDown", <any>this, false); this.helt.removeEventListener("touchstart", <any>this, false); this.helt.removeEventListener("mousedown", <any>this, false); this.helt.removeEventListener("keypress", <any>this, true); } } export module Util { export function clickHandler(e:HTMLElement, cb:(e:any) => void, allowSelect?:boolean) { if (e) { if (!e.getAttribute("role") && !e.getAttribute("data-norole")) e.setAttribute("role", "button") e.tabIndex = 0; } function newCb(e:any) { try { return cb(e); } catch (err) { Util.reportError("clickHandler " + cb.toString(), err); } } var oldH = (<any>e).clickHandler; if (oldH) oldH.unhook(); var handler = new ClickHandler(e, newCb); (<any>e).clickHandler = handler; if (!allowSelect) e.onselectstart = () => { return <any> false; }; else e.onselectstart = (e) => { e.stopImmediatePropagation(); return true; } } try { if (typeof HTMLElement != "undefined") HTMLElement.prototype.withClick = function (cb, allowSelect?) { Util.clickHandler(this, cb, allowSelect); return this; }; } catch (e) { } } }
the_stack
const en = { translation: { items: { flow: 'Flow', run: 'Run', step: 'Step', task: 'Task', artifact: 'Artifact', metadata: 'Metadata', }, home: { home: 'Home', 'show-all-runs': 'Show all runs', 'new-run-available': 'New runs at the top', 'scroll-to-top': 'Scroll to top', 'no-timeframe-warning': 'Handling runs without a Time frame can be slow.', }, help: { 'quick-links': 'Quick links', timezone: 'Timezone', 'local-time': 'Local time', 'local-timezone': 'Local timezone', 'selected-time': 'Selected timezone', timezones: 'Timezones', notifications: 'Notifications', documentation: 'Documentation', help: 'Help', 'application-version': 'Application version', 'service-version': 'Service version', }, fields: { group: { none: 'No grouping', flow: 'Group by flow', user: 'Group by user', }, flow: 'Flow', user: 'User', tag: 'Tag', project: 'Project', branch: 'Branch', 'user-tags': 'User tags', tasks: 'Tasks', flow_id: 'Flow', id: 'ID', status: 'Status', 'started-at': 'Started at', 'finished-at': 'Finished at', language: 'Language', duration: 'Duration', 'artifact-name': 'Artifact name', location: 'Location', 'datastore-type': 'Datastore type', type: 'Type', 'content-type': 'Content type', 'run-id': 'Run ID', 'task-id': 'Task ID', content: 'Content', }, filters: { 'group-by': 'Group by', 'reset-all': 'Reset view', running: 'Running', failed: 'Failed', completed: 'Completed', }, run: { 'run-details': 'Details', 'no-run-data': 'No run data', 'no-tags': 'No user tags', 'no-system-tags': 'No system tags', 'no-metadata': 'No run metadata found', 'metadata-not-available': 'Metadata not available for this attempt.', 'failed-to-load-metadata': 'Failed to load metadata', DAG: 'DAG', 'dag-not-available': 'DAG is currently not available', 'dag-only-available-AWS': 'DAG is only available when flow is executed on AWS.', 'dag-data-not-available': "DAG structure data doesn't exists. Data is only available when flow is executed on AWS", 'developer-comment': 'Developer comment', timeline: 'Timeline', parameters: 'Parameters', 'no-parameters': 'No run parameters.', 'run-parameters-error': 'Problem loading run parameters.', 'show-run-details': 'Show run details', 'hide-run-details': 'Hide run details', tags: 'User tags', 'system-tags': 'System tags', 'select-all-tags': 'Select all tags', 'scroll-to-bottom': 'Scroll to bottom', 'show-fullscreen': 'Show fullscreen', 'filter-all': 'All', 'filter-completed': 'Completed', 'filter-running': 'Running', 'filter-failed': 'Failed', 'filter-pending': 'Pending', 'filter-unknown': 'Unknown', mode: 'Mode', overview: 'Workflow', monitoring: 'New tasks', 'error-tracker': 'Failed tasks', custom: 'Custom', }, timeline: { 'no-run-data': 'No run data. You can wait if this run is created and see live updates.', 'no-rows': 'No tasks found', 'hidden-by-settings': 'rows are hidden by selected settings', 'expand-all': 'Expand all', 'collapse-all': 'Collapse all', relative: 'Relative', absolute: 'Absolute', 'group-by-step': 'Group by step', 'order-by': 'Order by', 'started-at': 'Started at', startTime: 'Started at', 'finished-at': 'Finished at', endTime: 'Finished at', duration: 'Duration', zoom: 'Zoom', 'fit-to-screen': 'Fit to screen', 'zoom-in': 'Zoom in', 'zoom-out': 'Zoom out', fullscreen: 'Fullscreen', 'show-all-steps': 'Show all steps', 'order-tasks-by': 'Order tasks by', status: 'Status', 'tasks-visibility': 'Task visibility', grouped: 'Group by step', 'not-grouped': 'Not grouped', asc: 'Ascending', desc: 'Descending', }, task: { 'no-metadata': 'No task metadata found.', 'metadata-not-available': 'Metadata not available for this attempt.', 'task-details': 'Task details', loading: 'Loading task data', 'no-task-selected': 'No task selected', 'could-not-find-task': 'Could not find the task', 'task-info': 'Task info', links: 'Links', 'std-out': 'stdout', 'std-err': 'stderr', artifacts: 'Artifacts', 'search-tasks-tip': 'Search: artifact_name:value', 'no-logs': 'No logs', 'no-preload-logs': 'No logs. Logs will be checked again momentarily', 'logs-only-available-AWS': 'Logs were not found from AWS.', attempt: 'Attempt', 'copy-logs-to-clipboard': 'Copy to clipboard', 'all-logs-copied': 'Full log copied to clipboard', 'line-copied': 'Line copied to clipboard', 'copy-to-clipboard': 'Copy to clipboard', 'download-artifact-content': 'Download artifact content', 'download-logs': 'Download logs as txt file', 'downloading-logs': 'Downloading logs as txt file', 'waiting-for-task-to-start': 'Waiting for task to start', 'artifact-copied': 'Artifact content copied to clipboard', 'show-fullscreen': 'Show logs in fullscreen', 'no-artifacts-found': 'No artifacts found', 'artifact-remote': 'Remote', 'artifact-local': 'Local', metadata: 'Metadata', 'failed-to-load-metadata': 'Failed to load metadata', 'metadata-not-loaded': 'Metadata not loaded', 'show-task-metadata': 'Show task metadata', 'hide-task-metadata': 'Hide task metadata', 'poll-loader-msg': 'New logs are fetched in 20 seconds intervals', 'ui-content': 'UI Content', 'unable-to-find-status': 'Unable to find status for this task', 'log-search': 'Search', 'log-search-tip': 'Searching only from locally available lines', }, breadcrumb: { 'no-match': "Text doesn't match known patterns.", goto: 'Go to...', whereto: 'Where to?', 'example-flow': 'Flow Name', 'example-run': 'Flow Name / Run ID', 'example-step': 'Flow Name / Run ID / Step Name', 'example-task': 'Flow Name / Run ID / Step Name / Task ID', }, search: { search: 'Search', artifact: 'Artifact', artifactInfo: 'You can wrap value "" to search for an exact match.', 'no-results': 'No search results', 'no-tasks': 'No tasks with selected settings', 'failed-to-search': 'Failed to search', }, connection: { connected: 'Connected for real-time updates', 'waiting-for-connection': 'Waiting for connection', 'data-might-be-stale': 'Reconnected, but data might be stale. Click here to reconnect', }, status: { fail: 'failed', }, error: { 'error-details': 'Error details', details: 'Details', 'show-more-details': 'Show error details', 'hide-more-details': 'Hide error details', 'generic-error': 'An error occurred', 'load-error': 'Error loading data', 'no-results': 'No results found', 'no-runs': 'No runs found', 'no-tasks': 'No tasks found', 'not-found': 'Not available', 'dag-not-found': 'DAG visualization is not available for this run', 'dag-not-found-running': 'DAG visualization was not found for this run. Try again later.', 'stack-trace': 'Stack trace', 'copy-stack-trace': 'Copy error stack trace', 'stack-trace-copied': 'Stack trace copied', 'download-stack-trace': 'Download stack trace', DataException: 'There was a problem accessing data with the Metaflow client.', MetaflowS3AccessDenied: 'Access denied. There was a problem with AWS credentials.', MetaflowS3NotFound: 'S3 bucket was not found.', MetaflowS3URLException: 'Error in S3 URL.', MetaflowS3Exception: 'There was an error with S3 access.', DAGUnsupportedFlowLanguage: 'Unsupported language. DAG is only supported for flows ran with Python.', DAGParsingFailed: 'DAG was found but something went wrong with processing the data.', 'log-error': 'There was a problem loading logs.', 'application-error': 'Application encountered an unexpected error. This should not happen and might be caused by unexpected data.', 'sidebar-error': 'Sidebar encountered an unexpected error. This should not happen and might be caused by unexpected data or parameters.', 'home-error': 'Run listing encountered an unexpected error. This should not happen and might be caused by unexpected data.', 'run-header-error': 'Run info section encountered an unexpected error. This should not happen and might be caused by unexpected data.', 'dag-error': 'DAG encountered an unexpected error. This should not happen and might be caused by unexpected data.', 'timeline-error': 'Timeline encountered an unexpected error. This should not happen and might be caused by unexpected data.', 'task-error': 'Task page encountered an unexpected error. This should not happen and might be caused by unexpected data.', 'artifact-too-large': 'Artifact too large to display, use the client to access it.', 'artifact-handle-failed': 'Artifact not currently displayable in the UI, use the client to access it.', 'artifact-unknown-error': 'Artifact not currently displayable in the UI, use the client to access it.', 'no-run-version-info': 'Since this run did not record Metaflow version, some information may be incomplete.', 'old-metaflow-client-warning': 'Since this run uses an old version of Metaflow, some information may be incomplete.', 'local-datastore-warning': 'Since this run uses local datastore instead of cloud services, some information may be incomplete.', }, notifications: { dateMissing: 'Date missing', header: 'Notifications', published: 'Published', unsorted: 'Unsorted', }, component: { show: 'Show', hide: 'Hide', startTime: 'Start time', endTime: 'End time', cancel: 'Cancel', set: 'Set', }, date: { 'select-preset': 'Select preset', month: 'Last 30 days', twoweeks: 'Last 14 days', yesterday: 'Yesterday', today: 'Today', }, debug: { title: 'Debug', feature_flags: 'Feature flags', log_recording: 'Log recording', start_recording: 'Start recording', stop_recording: 'Stop recording', recording_logs: 'Recording logs', stop_and_download: 'Stop and download logs', stop_and_discard: 'Stop and discard logs', DAG_msg: 'New tab in runs view which has graphical presentation of the run.', RUN_GROUPS_msg: 'Grouping feature for runs list on home page.', TASK_METADATA_msg: 'Show metadata for each task on task view.', TIMELINE_MINIMAP_msg: 'Show rough presentation of lines in timeline minimap.', ARTIFACT_TABLE_msg: 'Show artifact table on task view.', ARTIFACT_SEARCH_msg: 'Enable search field in timeline view to filter tasks by artifact values.', DEBUG_VIEW_msg: 'Expose this view in help menu as a link.', CACHE_DISABLE_msg: 'Disable cache from server side.', DB_LISTEN_DISABLE_msg: 'Disable real time update features from database.', HEARTBEAT_DISABLE_msg: 'Disable heartbeat for tasks and runs.', PREFETCH_DISABLE_msg: 'Disable preloading data to cache service.', REFINE_DISABLE_msg: 'Disable refined queries for tasks and artifacts.', S3_DISABLE_msg: 'Disable fetching extra data from AWS S3.', WS_DISABLE_msg: 'Disable websocket real time messaging.', generic_msg: 'Value of feature flag can be changed server side.', logging_msg: 'Logging can be used to gather information in problem situations. Logging will record all HTTP and websocket traffic which can then be downloaded as text files even in case of application crash.', }, card: { card_id_title: 'Card ID', card_title: 'Card', download_card: 'Download card HTML file', card_timeout: 'Timeout: loading cards', card_loading: 'Loading cards', }, }, }; // This model should be used for additional languages to come. For example to add finnish // language, add file fi.ts and define translation object as const fi: TranslationModel = .... export type TranslationModel = typeof en; export default en;
the_stack
import * as protos from '../../protos/iam_service'; import * as assert from 'assert'; import * as sinon from 'sinon'; import {SinonStub} from 'sinon'; import {describe, it} from 'mocha'; import {IamClient} from '../../src/iamService'; import * as protobuf from 'protobufjs'; import {GrpcClient} from '../../src/grpc'; function generateSampleMessage<T extends object>(instance: T) { const filledObject = ( instance.constructor as typeof protobuf.Message ).toObject(instance as protobuf.Message<T>, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( filledObject ) as T; } function stubSimpleCall<ResponseType>(response?: ResponseType, error?: Error) { return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); } function stubSimpleCallWithCallback<ResponseType>( response?: ResponseType, error?: Error ) { return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); } describe('IAM service', () => { describe('getIamPolicy', () => { it('invokes getIamPolicy without error', async () => { const grpcClient = new GrpcClient(); const client = new IamClient(grpcClient, { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); client.initialize(); const request = generateSampleMessage( new protos.google.iam.v1.GetIamPolicyRequest() ); request.resource = ''; const expectedHeaderRequestParams = 'resource='; const expectedOptions = { otherArgs: { headers: { 'x-goog-request-params': expectedHeaderRequestParams, }, }, }; const expectedResponse = generateSampleMessage( new protos.google.iam.v1.Policy() ); client.innerApiCalls.getIamPolicy = stubSimpleCall(expectedResponse); const response = await client.getIamPolicy(request, expectedOptions); assert.deepStrictEqual(response, [expectedResponse]); assert( (client.innerApiCalls.getIamPolicy as SinonStub) .getCall(0) .calledWith(request, expectedOptions, undefined) ); }); it('invokes getIamPolicy without error using callback', async () => { const grpcClient = new GrpcClient(); const client = new IamClient(grpcClient, { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); client.initialize(); const request = generateSampleMessage( new protos.google.iam.v1.GetIamPolicyRequest() ); request.resource = ''; const expectedHeaderRequestParams = 'resource='; const expectedOptions = { otherArgs: { headers: { 'x-goog-request-params': expectedHeaderRequestParams, }, }, }; const expectedResponse = generateSampleMessage( new protos.google.iam.v1.Policy() ); client.innerApiCalls.getIamPolicy = stubSimpleCallWithCallback(expectedResponse); const promise = new Promise((resolve, reject) => { client.getIamPolicy( request, expectedOptions, (err?: Error | null, result?: protos.google.iam.v1.Policy | null) => { if (err) { reject(err); } else { resolve(result); } } ); }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); assert( (client.innerApiCalls.getIamPolicy as SinonStub) .getCall(0) .calledWith(request, expectedOptions /* callback function above */) ); }); it('invokes getIamPolicy with error', async () => { const grpcClient = new GrpcClient(); const client = new IamClient(grpcClient, { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); client.initialize(); const request = generateSampleMessage( new protos.google.iam.v1.GetIamPolicyRequest() ); request.resource = ''; const expectedHeaderRequestParams = 'resource='; const expectedOptions = { otherArgs: { headers: { 'x-goog-request-params': expectedHeaderRequestParams, }, }, }; const expectedError = new Error('expected'); client.innerApiCalls.getIamPolicy = stubSimpleCall( undefined, expectedError ); await assert.rejects(async () => { await client.getIamPolicy(request, expectedOptions); }, expectedError); assert( (client.innerApiCalls.getIamPolicy as SinonStub) .getCall(0) .calledWith(request, expectedOptions, undefined) ); }); }); describe('setIamPolicy', () => { it('invokes setIamPolicy without error', async () => { const grpcClient = new GrpcClient(); const client = new IamClient(grpcClient, { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); client.initialize(); const request = generateSampleMessage( new protos.google.iam.v1.SetIamPolicyRequest() ); request.resource = ''; const expectedHeaderRequestParams = 'resource='; const expectedOptions = { otherArgs: { headers: { 'x-goog-request-params': expectedHeaderRequestParams, }, }, }; const expectedResponse = generateSampleMessage( new protos.google.iam.v1.Policy() ); client.innerApiCalls.setIamPolicy = stubSimpleCall(expectedResponse); const response = await client.setIamPolicy(request, expectedOptions); assert.deepStrictEqual(response, [expectedResponse]); assert( (client.innerApiCalls.setIamPolicy as SinonStub) .getCall(0) .calledWith(request, expectedOptions, undefined) ); }); it('invokes setIamPolicy without error using callback', async () => { const grpcClient = new GrpcClient(); const client = new IamClient(grpcClient, { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); client.initialize(); const request = generateSampleMessage( new protos.google.iam.v1.SetIamPolicyRequest() ); request.resource = ''; const expectedHeaderRequestParams = 'resource='; const expectedOptions = { otherArgs: { headers: { 'x-goog-request-params': expectedHeaderRequestParams, }, }, }; const expectedResponse = generateSampleMessage( new protos.google.iam.v1.Policy() ); client.innerApiCalls.setIamPolicy = stubSimpleCallWithCallback(expectedResponse); const promise = new Promise((resolve, reject) => { client.setIamPolicy( request, expectedOptions, (err?: Error | null, result?: protos.google.iam.v1.Policy | null) => { if (err) { reject(err); } else { resolve(result); } } ); }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); assert( (client.innerApiCalls.setIamPolicy as SinonStub) .getCall(0) .calledWith(request, expectedOptions /* callback function above */) ); }); it('invokes setIamPolicy with error', async () => { const grpcClient = new GrpcClient(); const client = new IamClient(grpcClient, { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); client.initialize(); const request = generateSampleMessage( new protos.google.iam.v1.SetIamPolicyRequest() ); request.resource = ''; const expectedHeaderRequestParams = 'resource='; const expectedOptions = { otherArgs: { headers: { 'x-goog-request-params': expectedHeaderRequestParams, }, }, }; const expectedError = new Error('expected'); client.innerApiCalls.setIamPolicy = stubSimpleCall( undefined, expectedError ); await assert.rejects(async () => { await client.setIamPolicy(request, expectedOptions); }, expectedError); assert( (client.innerApiCalls.setIamPolicy as SinonStub) .getCall(0) .calledWith(request, expectedOptions, undefined) ); }); }); describe('testIamPermissions', () => { it('invokes testIamPermissions without error', async () => { const grpcClient = new GrpcClient(); const client = new IamClient(grpcClient, { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); client.initialize(); const request = generateSampleMessage( new protos.google.iam.v1.TestIamPermissionsRequest() ); request.resource = ''; const expectedHeaderRequestParams = 'resource='; const expectedOptions = { otherArgs: { headers: { 'x-goog-request-params': expectedHeaderRequestParams, }, }, }; const expectedResponse = generateSampleMessage( new protos.google.iam.v1.TestIamPermissionsResponse() ); client.innerApiCalls.testIamPermissions = stubSimpleCall(expectedResponse); const response = await client.testIamPermissions( request, expectedOptions ); assert.deepStrictEqual(response, [expectedResponse]); assert( (client.innerApiCalls.testIamPermissions as SinonStub) .getCall(0) .calledWith(request, expectedOptions, undefined) ); }); it('invokes testIamPermissions without error using callback', async () => { const grpcClient = new GrpcClient(); const client = new IamClient(grpcClient, { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); client.initialize(); const request = generateSampleMessage( new protos.google.iam.v1.TestIamPermissionsRequest() ); request.resource = ''; const expectedHeaderRequestParams = 'resource='; const expectedOptions = { otherArgs: { headers: { 'x-goog-request-params': expectedHeaderRequestParams, }, }, }; const expectedResponse = generateSampleMessage( new protos.google.iam.v1.TestIamPermissionsResponse() ); client.innerApiCalls.testIamPermissions = stubSimpleCallWithCallback(expectedResponse); const promise = new Promise((resolve, reject) => { client.testIamPermissions( request, expectedOptions, ( err?: Error | null, result?: protos.google.iam.v1.TestIamPermissionsResponse | null ) => { if (err) { reject(err); } else { resolve(result); } } ); }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); assert( (client.innerApiCalls.testIamPermissions as SinonStub) .getCall(0) .calledWith(request, expectedOptions /* callback function above */) ); }); it('invokes testIamPermissions with error', async () => { const grpcClient = new GrpcClient(); const client = new IamClient(grpcClient, { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); client.initialize(); const request = generateSampleMessage( new protos.google.iam.v1.TestIamPermissionsRequest() ); request.resource = ''; const expectedHeaderRequestParams = 'resource='; const expectedOptions = { otherArgs: { headers: { 'x-goog-request-params': expectedHeaderRequestParams, }, }, }; const expectedError = new Error('expected'); client.innerApiCalls.testIamPermissions = stubSimpleCall( undefined, expectedError ); await assert.rejects(async () => { await client.testIamPermissions(request, expectedOptions); }, expectedError); assert( (client.innerApiCalls.testIamPermissions as SinonStub) .getCall(0) .calledWith(request, expectedOptions, undefined) ); }); }); });
the_stack
class Vector/* /* */ */{/* *//* */ constructor( public x: number,public y: number,public z: number ) { }/* *//* */ static times( k: number,v: Vector )/* /* */ */ {/* */ return new Vector( k*v.x,k*v.y,k*v.z );/* */ }/* *//* */ static minus( v1: Vector,v2: Vector )/* /* */ */ {/* */ return new Vector( v1.x-v2.x,v1.y-v2.y,v1.z-v2.z );/* */ }/* *//* */ static plus( v1: Vector,v2: Vector )/* /* */ */ {/* */ return new Vector( v1.x+v2.x,v1.y+v2.y,v1.z+v2.z );/* */ }/* *//* */ static dot( v1: Vector,v2: Vector )/* /* */ */ {/* */ return v1.x*v2.x+v1.y*v2.y+v1.z*v2.z;/* */ }/* *//* */ static mag( v: Vector )/* /* */ */ {/* */ return Math.sqrt( v.x*v.x+v.y*v.y+v.z*v.z );/* */ }/* *//* */ static norm( v: Vector )/* /* */ */ {/* */ var mag=Vector.mag( v );/* */ var div=( mag===0 )? Infinity:1.0/mag;/* */ return Vector.times( div,v );/* */ }/* *//* */ static cross( v1: Vector,v2: Vector )/* /* */ */ {/* */ return new Vector( v1.y*v2.z-v1.z*v2.y,/* */ v1.z*v2.x-v1.x*v2.z,/* */ v1.x*v2.y-v1.y*v2.x );/* */ }/* *//* */}/* *//* */class Color/* /* */ */{/* *//* */ constructor( public r: number,public g: number,public b: number ) { }/* *//* */ static scale( k: number,v: Color )/* /* */ */ {/* */ return new Color( k*v.r,k*v.g,k*v.b );/* */ }/* *//* */ static plus( v1: Color,v2: Color )/* /* */ */ {/* */ return new Color( v1.r+v2.r,v1.g+v2.g,v1.b+v2.b );/* */ }/* *//* */ static times( v1: Color,v2: Color )/* /* */ */ {/* */ return new Color( v1.r*v2.r,v1.g*v2.g,v1.b*v2.b );/* */ }/* *//* */ static white=new Color( 1.0,1.0,1.0 );/* */ static grey=new Color( 0.5,0.5,0.5 );/* */ static black=new Color( 0.0,0.0,0.0 );/* */ static background=Color.black;/* */ static defaultColor=Color.black;/* *//* */ static toDrawingColor( c: Color )/* /* */ */ {/* */ var legalize=d => d>1? 1:d;/* */ return {/* */ r: Math.floor( legalize( c.r )*255 ),/* */ g: Math.floor( legalize( c.g )*255 ),/* */ b: Math.floor( legalize( c.b )*255 )/* */ }/* */ }/* *//* */}/* *//* */class Camera/* /* */ */{/* *//* */ forward: Vector;/* */ right: Vector;/* */ up: Vector;/* *//* */ constructor( public pos: Vector,lookAt: Vector )/* /* */ */ {/* */ var down=new Vector( 0.0,-1.0,0.0 );/* */ this.forward=Vector.norm( Vector.minus( lookAt,this.pos ) );/* */ this.right=Vector.times( 1.5,Vector.norm( Vector.cross( this.forward,down ) ) );/* */ this.up=Vector.times( 1.5,Vector.norm( Vector.cross( this.forward,this.right ) ) );/* */ }/* *//* */}/* *//* */interface Ray/* /* */ */{/* */ start: Vector;/* */ dir: Vector;/* */}/* *//* */interface Intersection/* /* */ */{/* */ thing: Thing;/* */ ray: Ray;/* */ dist: number;/* */}/* *//* */interface Surface/* /* */ */{/* */ diffuse: ( pos: Vector ) => Color;/* */ specular: ( pos: Vector ) => Color;/* */ reflect: ( pos: Vector ) => number;/* */ roughness: number;/* */}/* *//* */interface Thing/* /* */ */{/* */ intersect: ( ray: Ray ) => Intersection;/* */ normal: ( pos: Vector ) => Vector;/* */ surface: Surface;/* */}/* *//* */interface Light/* /* */ */{/* */ pos: Vector;/* */ color: Color;/* */}/* *//* */interface Scene/* /* */ */{/* */ things: Thing[];/* */ lights: Light[];/* */ camera: Camera;/* */}/* *//* */class Sphere implements Thing/* /* */ */{/* *//* */ radius2: number;/* *//* */ constructor( public center: Vector,radius: number,public surface: Surface )/* /* */ */ {/* */ this.radius2=radius*radius;/* */ }/* *//* */ normal( pos: Vector ): Vector/* /* */ */ {/* */ return Vector.norm( Vector.minus( pos,this.center ) );/* */ }/* *//* */ intersect( ray: Ray )/* /* */ */ {/* */ var eo=Vector.minus( this.center,ray.start );/* */ var v=Vector.dot( eo,ray.dir );/* */ var dist=0;/* */ if( v>=0 )/* /* */ */ {/* */ var disc=this.radius2-( Vector.dot( eo,eo )-v*v );/* */ if( disc>=0 )/* /* */ */ {/* */ dist=v-Math.sqrt( disc );/* */ }/* */ }/* */ if( dist===0 )/* /* */ */ {/* */ return null;/* */ } else/* /* */ */ {/* */ return { thing: this,ray: ray,dist: dist };/* */ }/* */ }/* *//* */}/* *//* */class Plane implements Thing/* /* */ */{/* *//* */ normal: ( pos: Vector ) => Vector;/* */ intersect: ( ray: Ray ) => Intersection;/* *//* */ constructor( norm: Vector,offset: number,public surface: Surface )/* /* */ */ {/* */ this.normal=function ( pos: Vector ) { return norm; }/* */ this.intersect=function ( ray: Ray ): Intersection/* /* */ */ {/* */ var denom=Vector.dot( norm,ray.dir );/* */ if( denom>0 )/* /* */ */ {/* */ return null;/* */ } else/* /* */ */ {/* */ var dist=( Vector.dot( norm,ray.start )+offset )/( -denom );/* */ return { thing: this,ray: ray,dist: dist };/* */ }/* */ }/* */ }/* *//* */}/* *//* */module Surfaces/* /* */ */{/* *//* */ export var shiny: Surface={/* */ diffuse: function ( pos ) { return Color.white; },/* */ specular: function ( pos ) { return Color.grey; },/* */ reflect: function ( pos ) { return 0.7; },/* */ roughness: 250/* */ }/* *//* */ export var checkerboard: Surface={/* */ diffuse: function ( pos )/* /* */ */ {/* */ if( ( Math.floor( pos.z )+Math.floor( pos.x ) )%2!==0 )/* /* */ */ {/* */ return Color.white;/* */ } else/* /* */ */ {/* */ return Color.black;/* */ }/* */ },/* */ specular: function ( pos ) { return Color.white; },/* */ reflect: function ( pos )/* /* */ */ {/* */ if( ( Math.floor( pos.z )+Math.floor( pos.x ) )%2!==0 )/* /* */ */ {/* */ return 0.1;/* */ } else/* /* */ */ {/* */ return 0.7;/* */ }/* */ },/* */ roughness: 150/* */ }/* *//* */}/* *//* *//* */class RayTracer/* /* */ */{/* *//* */ private maxDepth=5;/* *//* */ private intersections( ray: Ray,scene: Scene )/* /* */ */ {/* */ var closest=+Infinity;/* */ var closestInter: Intersection=undefined;/* */ for( var i in scene.things )/* /* */ */ {/* */ var inter=scene.things[i].intersect( ray );/* */ if( inter!=null&&inter.dist<closest )/* /* */ */ {/* */ closestInter=inter;/* */ closest=inter.dist;/* */ }/* */ }/* */ return closestInter;/* */ }/* *//* */ private testRay( ray: Ray,scene: Scene )/* /* */ */ {/* */ var isect=this.intersections( ray,scene );/* */ if( isect!=null )/* /* */ */ {/* */ return isect.dist;/* */ } else/* /* */ */ {/* */ return undefined;/* */ }/* */ }/* *//* */ private traceRay( ray: Ray,scene: Scene,depth: number ): Color/* /* */ */ {/* */ var isect=this.intersections( ray,scene );/* */ if( isect===undefined )/* /* */ */ {/* */ return Color.background;/* */ } else/* /* */ */ {/* */ return this.shade( isect,scene,depth );/* */ }/* */ }/* *//* */ private shade( isect: Intersection,scene: Scene,depth: number )/* /* */ */ {/* */ var d=isect.ray.dir;/* */ var pos=Vector.plus( Vector.times( isect.dist,d ),isect.ray.start );/* */ var normal=isect.thing.normal( pos );/* */ var reflectDir=Vector.minus( d,Vector.times( 2,Vector.times( Vector.dot( normal,d ),normal ) ) );/* */ var naturalColor=Color.plus( Color.background,/* */ this.getNaturalColor( isect.thing,pos,normal,reflectDir,scene ) );/* */ var reflectedColor=( depth>=this.maxDepth )? Color.grey:this.getReflectionColor( isect.thing,pos,normal,reflectDir,scene,depth );/* */ return Color.plus( naturalColor,reflectedColor );/* */ }/* *//* */ private getReflectionColor( thing: Thing,pos: Vector,normal: Vector,rd: Vector,scene: Scene,depth: number )/* /* */ */ {/* */ return Color.scale( thing.surface.reflect( pos ),this.traceRay( { start: pos,dir: rd },scene,depth+1 ) );/* */ }/* *//* */ private getNaturalColor( thing: Thing,pos: Vector,norm: Vector,rd: Vector,scene: Scene )/* /* */ */ {/* */ var addLight=( col,light ) =>/* /* */ */ {/* */ var ldis=Vector.minus( light.pos,pos );/* */ var livec=Vector.norm( ldis );/* */ var neatIsect=this.testRay( { start: pos,dir: livec },scene );/* */ var isInShadow=( neatIsect===undefined )? false:( neatIsect<=Vector.mag( ldis ) );/* */ if( isInShadow )/* /* */ */ {/* */ return col;/* */ } else/* /* */ */ {/* */ var illum=Vector.dot( livec,norm );/* */ var lcolor=( illum>0 )? Color.scale( illum,light.color )/* */ :Color.defaultColor;/* */ var specular=Vector.dot( livec,Vector.norm( rd ) );/* */ var scolor=( specular>0 )? Color.scale( Math.pow( specular,thing.surface.roughness ),light.color )/* */ :Color.defaultColor;/* */ return Color.plus( col,Color.plus( Color.times( thing.surface.diffuse( pos ),lcolor ),/* */ Color.times( thing.surface.specular( pos ),scolor ) ) );/* */ }/* */ }/* */ return scene.lights.reduce( addLight,Color.defaultColor );/* */ }/* *//* */ render( scene,ctx,screenWidth,screenHeight )/* /* */ */ {/* */ var getPoint=( x,y,camera ) =>/* /* */ */ {/* */ var recenterX=x => ( x-( screenWidth/2.0 ) )/2.0/screenWidth;/* */ var recenterY=y => -( y-( screenHeight/2.0 ) )/2.0/screenHeight;/* */ return Vector.norm( Vector.plus( camera.forward,Vector.plus( Vector.times( recenterX( x ),camera.right ),Vector.times( recenterY( y ),camera.up ) ) ) );/* */ }/* */ for( var y=0;y<screenHeight;y++ )/* /* */ */ {/* */ for( var x=0;x<screenWidth;x++ )/* /* */ */ {/* */ var color=this.traceRay( { start: scene.camera.pos,dir: getPoint( x,y,scene.camera ) },scene,0 );/* */ var c=Color.toDrawingColor( color );/* */ ctx.fillStyle="rgb("+String( c.r )+", "+String( c.g )+", "+String( c.b )+")";/* */ ctx.fillRect( x,y,x+1,y+1 );/* */ }/* */ }/* */ }/* *//* */}/* *//* *//* */function defaultScene(): Scene/* /* */ */{/* */ return {/* */ things: [new Plane( new Vector( 0.0,1.0,0.0 ),0.0,Surfaces.checkerboard ),/* */ new Sphere( new Vector( 0.0,1.0,-0.25 ),1.0,Surfaces.shiny ),/* */ new Sphere( new Vector( -1.0,0.5,1.5 ),0.5,Surfaces.shiny )],/* */ lights: [{ pos: new Vector( -2.0,2.5,0.0 ),color: new Color( 0.49,0.07,0.07 ) },/* */ { pos: new Vector( 1.5,2.5,1.5 ),color: new Color( 0.07,0.07,0.49 ) },/* */ { pos: new Vector( 1.5,2.5,-1.5 ),color: new Color( 0.07,0.49,0.071 ) },/* */ { pos: new Vector( 0.0,3.5,0.0 ),color: new Color( 0.21,0.21,0.35 ) }],/* */ camera: new Camera( new Vector( 3.0,2.0,4.0 ),new Vector( -1.0,0.5,0.0 ) )/* */ };/* */}/* *//* */function exec()/* /* */ */{/* */ var canv=document.createElement( "canvas" );/* */ canv.width=256;/* */ canv.height=256;/* */ document.body.appendChild( canv );/* */ var ctx=canv.getContext( "2d" );/* */ var rayTracer=new RayTracer();/* */ return rayTracer.render( defaultScene(),ctx,256,256 );/* */}/* *//* */exec();/* */
the_stack
// tslint:disable:max-file-line-count export type ResourceEvents = 'cached' | 'error' | 'abort' | 'load' | 'beforeunload' | 'unload' export type NetworkEvents = 'online' | 'offline' export type FocusEvents = 'focus' | 'blur' export type WebsocketEvents = 'open' | 'message' | 'error' | 'close' export type SessionHistoryEvents = 'pagehide' | 'pageshow' | 'popstate' export type CssAnimationEvents = 'animationstart' | 'animationend' | 'animationiteration' export type FormEvents = 'reset' | 'submit' | 'invalid' export type PrintingEvents = 'beforeprint' | 'afterprint' export type TextCompositionEvents = 'compositionstart' | 'compositionupdate' | 'compositionend' export type ViewEvents = 'fullscreenchange' | 'fullscreenerror' | 'resize' | 'scroll' export type KeyboardEvents = 'keydown' | 'keypress' | 'keyup' export type MouseEvents = 'mouseenter' | 'mouseover' | 'mousemove' | 'mousedown' | 'mouseup' | 'click' | 'dblclick' | 'contextmenu' | 'wheel' | 'mouseleave' | 'mouseout' | 'select' | 'pointerlockchange' | 'pointerlockerror' export type DragAndDropEvents = 'dragstart' | 'drag' | 'dragend' | 'dragend' | 'dragenter' | 'dragover' | 'dragleave' | 'drop' export type MediaEvents = 'durationchange' | 'loadedmetadata' | 'loadeddata' | 'canplay' | 'canplaythrough' | 'ended' | 'emptied' | 'stalled' | 'suspend' | 'play' | 'playing' | 'pause' | 'waiting' | 'seeking' | 'ratechange' | 'timeupdate' | 'volumechange' | 'complete' | 'ended' | 'audioprocess' export type ProgressEvents = 'loadstart' | 'progress' | 'error' | 'timeout' | 'abort' | 'load' | 'loaded' export type StorageEvents = 'change' | 'storage' export type UpdateEvents = 'checking' | 'downloading' | 'error' | 'noupdate' | 'obsolete' | 'updateready' export type ValueChangeEvents = 'broadcast' | 'CheckboxStateChange' | 'hashchange' | 'input' | 'RadioStateChange' | 'readystatechange' | 'ValueChange' export type LocalizationEvents = 'localized' export type WebWorkerEvents = 'message' export type ContextMenuEvents = 'show' export type SvgEvents = 'SVGAbort' | 'SVGError' | 'SVGLoad' | 'SVGResize' | 'SVGScroll' | 'SVGUnload' | 'SVGZoom' export type DatabaseEvents = 'abort' | 'blocked' | 'complete' | 'error' | 'success' | 'upgradeneeded' | 'versionchange' export type NotificationEvents = 'AlertActive' | 'AlertClose' export type CSSEvents = 'CssRuleViewRefreshed' | 'CssRuleViewChanged' | 'CssRuleViewCSSLinkClicked' | 'transitionend' export type ScriptEvents = 'afterscriptexecute' | 'beforescriptexecute' export type MenuEvents = 'DOMMenutItemActive' | 'DOMMenutItemInactive' export type WindowEvents = 'DOMWindowCreated' | 'DOMTitleChanged' | 'DOMWindowClose' | 'SSWindowClosing' | 'SSWindowStateReady' | 'SSWindowStateBusy' | 'close' export type DocumentEvents = 'DOMLinkAdded' | 'DOMLinkRemoved' | 'DOMMetaAdded' | 'DOMMetaRemoved' | 'DOMWillOpenModalDialog' | 'DOMModalDialogClosed' export type PopupEvents = 'popuphidden' | 'popuphiding' | 'popupshowing' | 'popupshown' | 'DOMPopupBlocked' export type TabEvents = 'TabOpen' | 'TabClose' | 'TabSelect' | 'TabShow' | 'TabHide' | 'TabPinned' | 'TabUnpinned' | 'SSTabClosing' | 'SSTabRestoring' | 'SSTabRestored' | 'visibilitychange' export type BatteryEvents = 'chargingchange' | 'chargingtimechange' | 'dischargingtimechange' | 'levelchange' export type CallEvents = 'alerting' | 'busy' | 'callschanged' | 'connected' | 'connecting' | 'dialing' | 'disconnected' | 'disconnecting' | 'error' | 'held' | 'holding' | 'incoming' | 'resuming' | 'statechange' export type SensorEvents = 'devicelight' | 'devicemotion' | 'deviceorientation' | 'deviceproximity' | // 'MozOrientation' | 'orientationchange' | 'userproximity' export type SmartcardEvents = 'smartcard-insert' | 'smartcard-remove' export type SMSAndUSSDEvents = 'delivered' | 'received' | 'sent' export type FrameEvents = // 'mozbrowserclose' | 'mozbrowsercontextmenu' | 'mozbrowsererror' | // 'mozbrowsericonchange' | 'mozbrowserlocationchange' | 'mozbrowserloadend' | // 'mozbrowserloadstart' | 'mozbrowseropenwindow' | 'mozbrowsersecuritychange' // | 'mozbrowsershowmodalprompt' | 'mozbrowsertitlechange' | 'DOMFrameContentLoaded' export type DOMMutationEvents = 'DOMAttributeNameChanged' | 'DOMAttrModified' | 'DOMCharacterDataModified' | 'DOMContentLoaded' | 'DOMElementNamedChanged' | 'DOMNodeInserted' | 'DOMNodeInsertedIntoDocument' | 'DOMNodeRemoved' | 'DOMNodeRemovedFromDocument' | 'DOMSubtreeModified' export type TouchEvents = // 'MozEdgeUiGestor' | 'MozMagnifyGesture' | 'MozMagnifyGestureStart' | // 'MozMagnifyGestureUpdate' | 'MozPressTapGesture' | 'MozRotateGesture' | // 'MozRotateGestureStart' | 'MozRotateGestureUpdate' | 'MozSwipeGesture' | // 'MozTapGesture' | 'MozTouchDown' | 'MozTouchMove' | 'MozTouchUp' | 'touchcancel' | 'touchend' | 'touchenter' | 'touchleave' | 'touchmove' | 'touchstart' export type PointerEvents = 'pointerover' | 'pointerenter' | 'pointerdown' | 'pointermove' | 'pointerup' | 'pointercancel' | 'pointerout' | 'pointerleave' | 'gotpointercapture' | 'lostpointercapture' // the events that are in the var browser specifications // all browsers should have these implemented the same export type StandardEvents = // name - Event Types 'abort' | // UIEvent, ProgressEvent, Event 'afterprint' | // Event; 'animationend' | // AnimationEvent 'animationiteration' | // AnimationEvent 'animationstart' | // AnimationEvent 'audioprocess' | // AudioProcessingEvent 'audioend' | // Event 'audiostart' | // Event 'beforprint' | // Event 'beforeunload' | // BeforeUnloadEvent 'beginEvent' | // TimeEvent 'blocked' | // Event 'blur' | // FocusEvent 'boundary' | // SpeechsynthesisEvent 'cached' | // Event 'canplay' | // Event 'canplaythrough' | // Event 'change' | // Event 'chargingchange' | // Event 'chargingtimechange' | // Event 'checking' | // Event 'click' | // MouseEvent 'close' | // Event 'complete' | // Event, OfflineAudioCompletionEvent 'compositionend' | // CompositionEvent 'compositionstart' | // CompositionEvent 'compositionupdate' | // CompositionEvent 'contextmenu' | // MoustEvent 'copy' | // ClipboardEvent 'cut' | // ClipboardEvent 'dblclick' | // MouseEvent 'devicechange' | // Event 'devicelight' | // DeviceLightEvent 'devicemotion' | // DeviceMotionEvent 'deviceorientation' | // DeviceOrientationEvent 'deviceproximity' | // DeviceProximityEvent 'dischargingtimechange' | // Event 'DOMActivate' | // UIEvent 'DOMAttributeNameChanged' | // MutationNameEvent 'DOMAttrModified' | // Mutationevent 'DOMCharacterDataModified' | // MutationEvent 'DOMContentLoaded' |// Event 'DOMElementNamedChanged' | // MutationNameEvent 'DOMNodeInserted' | // MutationEvent 'DOMNodeInsertedIntoDocument' | // MutationEvent 'DOMNodeRemoved' | // MutationEvent 'DOMNodeRemovedFromDocument' | // MutationEvent 'DOMSubtreeModified' | // MutationEvent 'downloaded' | // Event 'drag' | // DragEvent 'dragend' | // DragEvent 'dragenter' | // DragEvent 'dragleave' | // DragEvent 'dragover' | // DragEvent 'dragstart' | // DragEvent 'drop' | // DragEvent 'durationchange' | // Event 'emptied' | // Event 'end' | // Event, SpeechSynthesisEvent 'ended' | // Event 'endEvent' | // TimeEvent 'error' | // UIEvent | ProgressEvent | Event 'focus' | // FocusEvent 'fullscreenchange' | // Event 'fullscreenerror' | // Event 'gamepadconnected' | // GamepadEvent 'gamepaddisconnected' | // GamepadEvent 'gotpointercapture' | // PointerEvent 'hashchange' | // HashChangEvent 'lostpointercapture' | // PointerEvent 'input' | // event 'invalid' | // Event 'keydown' | // KeyboardEvent 'keypress' | // KeyboardEvent 'keyup' | // KeyboardEvent 'languagechange' | // Event 'levelchange' | // Event 'load' | // UIEvent, ProgressEvent 'loadeddata' | // Event 'loadedmetadata' | // Event 'loadend' | // ProgressEvent 'loadstart' | // ProgressEvent 'mark' | // SpeechSynthesisEvent 'message' | // MessageEvent, ServiceWorkerMessageEvent, ExtendableMessageEvent 'mousedown' | // MouseEvent 'mouseenter' | // MouseEvent 'mouseleave' | // MouseEvent 'mousemove' | // MouseEvent 'mouseout' | // MouseEvent 'mouseover' | // Mouseevent 'nomatch' | // SpeechRecognitionEvent 'notificationclick' | // NotificationEvent 'noupdate' | // event 'obsolete' | // Event 'offline' | // event 'online' | // Event 'open' | // event 'orientationchange' | // Event 'pagehide' | // PageTransitionEvent 'pageshow' | // PageTransitionEvent 'paste' | // ClipboardEvent 'pause' | // Event, SpeechSynthesisEvent 'pointercancel' | // PointerEvent 'pointerdown' | // PointerEvent 'pointerenter' | // PointerEvent 'pointerleave' | // PointerEvent 'pointerlockchange' | // Event 'pointerlockerror' | // Event 'pointermove' | // PointerEvent 'pointerout' | // PointerEvent 'pointerover' | // PointerEvent 'pointerup' | // PointerEvent 'play' | // Event 'playing' | // Event 'popstate' | // PopStateEvent 'progress' | // ProgressEvent 'push' | // PushEvent 'pushsubscriptionchange' | // PushEvent 'ratechange' | // Event 'readystatechange' | // Event 'repeatEvent' | // TimeEvent 'reset' | // Event 'resize' | // UIEvent 'resourcetimingbufferfull' | // Performance 'result' | // SpeechRecognitionEvent 'resume' | // SpeechSynthesisEvent 'scroll' | // UIEvent 'seeked' | // Event 'seeking' | // Event 'select' | // UIEvent 'selectstart' | // UIEvent 'selectionchange' | // Event 'show' | // MouseEvent 'soundend' | // Event 'soundstart' | // Event 'speechend' | // Event 'speechstart' | // Event 'stalled' | // Event 'start' | // SpeechSynthesisEvent 'storage' | // StorageEvent 'submit' | // Event 'success' | // Event 'suspend' | // Event 'SVGAbort' | // SvgEvent 'SVGError' | // SvgEvent 'SVGLoad' | // SvgEvent 'SVGResize' | // SvgEvent 'SVGScroll' | // SvgEvent 'SVGUnload' | // SvgEvent 'SVGZoom' | // SvgEvent 'timeout' | // ProgressEvent 'timeupdate' | // Event 'touchcancel' | // TouchEvent 'touchend' | // TouchEvent 'touchenter' | // TouchEvent 'touchleave' | // TouchEvent 'touchmove' | // TouchEvent 'touchstart' | // TouchEvent ; 'transitionend' | // Transitionevent 'unload' | // UIEvent 'updateready' | // Event 'upgradeneeded' | // Event 'userproximity' | // UserProximityEvent 'voiceschanged' | // Event 'versionchange' | // Event 'visibilitychange' | // Event 'volumechange' | // Event 'vrdisplayconnected' | // Event 'vrdisplaydisconnected' | // Event 'vrdisplaypresentchange' | // Event 'waiting' | // Event 'wheel' // WheelEvent
the_stack
import express, { response } from "express"; import { BubbleWrapper } from "../build/bubbleWrapper"; import { AndroidPackageOptions as AndroidPackageOptions } from "../build/androidPackageOptions"; import path from "path"; import tmp, { dir } from "tmp"; import archiver from "archiver"; import fs from "fs-extra"; import { LocalKeyFileSigningOptions, SigningOptions } from "../build/signingOptions"; import del from "del"; import { GeneratedAppPackage } from "../build/generatedAppPackage"; import { AppPackageRequest } from "../build/appPackageRequest"; import generatePassword from "password-generator"; import fetch, { Response } from "node-fetch"; import { logUrlResult } from "../build/urlLogger"; import { errorToString } from "../build/utils"; const router = express.Router(); const tempFileRemovalTimeoutMs = 1000 * 60 * 5; // 5 minutes tmp.setGracefulCleanup(); // remove any tmp file artifacts on process exit /** * Generates an APK package and zips it up along with the signing key info. Sends back the zip file. * Expects a POST body containing @see ApkOptions form data. */ router.post(["/generateAppPackage", "/generateApkZip"], async function (request: express.Request, response: express.Response) { const apkRequest = validateApkRequest(request); if (apkRequest.validationErrors.length > 0 || !apkRequest.options) { const errorMessage = "Invalid PWA settings: " + apkRequest.validationErrors.join(", "); logUrlResult(apkRequest.options?.host || "", false, errorMessage); response.status(500).send(errorMessage); return; } try { const appPackage = await createAppPackage(apkRequest.options); // Create our zip file containing the APK, readme, and signing info. const zipFile = await zipAppPackage(appPackage, apkRequest.options); response.sendFile(zipFile, {}); logUrlResult(apkRequest.options.host, true, null); console.info("Process completed successfully."); } catch (err) { console.error("Error generating app package", err); const errorString = errorToString(err); logUrlResult(apkRequest.options.host, false, "Error generating app package " + errorToString); response.status(500).send("Error generating app package: \r\n" + errorString); } }); /** * This endpoint tries to fetch a URL. This is useful because we occasionally have bug reports * where the Android packaging service can't fetch an image or other resource. * Example: https://github.com/pwa-builder/PWABuilder/issues/1166 * * Often, the cause is the developer's web server is blocking an IP address range that includes * our published app service. * * This endpoint checks for that by performing a simple fetch. * * Usage: /fetch?type=blob&url=https://somewebsite.com/favicon-512x512.png */ router.get("/fetch", async function (request: express.Request, response: express.Response) { const url = request.query.url; if (!url) { response.status(500).send("You must specify a URL"); return; } let type: "blob" | "json" | "text" = request.query.type || "text"; let fetchResult: Response; try { fetchResult = await fetch(url); } catch (fetchError) { response.status(500).send(`Unable to initiate fetch for ${url}. Error: ${fetchError}`); return; } if (!fetchResult.ok) { response.status(fetchResult.status).send(`Unable to fetch ${url}. Status: ${fetchResult.status}, ${fetchResult.statusText}`); return; } if (fetchResult.type) { response.type(fetchResult.type); } if (fetchResult.headers) { fetchResult.headers.forEach((value, name) => response.setHeader(name, value)); } try { if (type === "blob") { const blob = await fetchResult.arrayBuffer(); response.status(fetchResult.status).send(Buffer.from(blob)); } else if (type === "json") { const json = await fetchResult.json(); response.status(fetchResult.status).send(JSON.parse(json)); } else { const text = await fetchResult.text(); response.status(fetchResult.status).send(text); } } catch (getResultError) { response.status(500).send(`Unable to fetch result from ${url} using type ${type}. Error: ${getResultError}`); } }); function validateApkRequest(request: express.Request): AppPackageRequest { const validationErrors: string[] = []; // If we were unable to parse ApkOptions, there's no more validation to do. let options: AndroidPackageOptions | null = tryParseOptionsFromRequest(request); if (!options) { validationErrors.push("Malformed argument. Coudn't find ApkOptions in body"); return { options: null, validationErrors, } } // Ensure we have required fields. const requiredFields: Array<keyof AndroidPackageOptions> = [ "appVersion", "appVersionCode", "backgroundColor", "display", "fallbackType", "host", "iconUrl", "launcherName", "navigationColor", "packageId", "signingMode", "startUrl", "themeColor", "webManifestUrl" ]; validationErrors.push(...requiredFields .filter(f => !options![f]) .map(f => `${f} is required`)); // We must have signing options if the signing is enabled. if (options.signingMode !== "none" && !options.signing) { validationErrors.push(`Signing options are required when signing mode = '${options.signingMode}'`); } // If the user is supplying their own signing key, we have some additional requirements: // - A signing key file must be specified // - The signing key file must be a base64 encoded string. // - A store password must be supplied // - A key password must be supplied if (options.signingMode === "mine" && options.signing) { // We must have a keystore file uploaded if the signing mode is use existing. if (!options.signing.file) { validationErrors.push("You must supply a signing key file when signing mode = 'mine'"); } // Signing file must be a base 64 encoded string. if (options.signing.file && !options.signing.file.startsWith("data:")) { validationErrors.push("Signing file must be a base64 encoded string containing the Android keystore file"); } if (!options.signing.storePassword) { validationErrors.push("You must supply a store password when signing mode = 'mine'"); } if (!options.signing.keyPassword) { validationErrors.push("You must supply a key password when signing mode = 'mine'"); } } // Validate signing option fields if (options.signingMode !== "none" && options.signing) { // If we don't have a key password or store password, create one now. if (!options.signing.keyPassword) { options.signing.keyPassword = generatePassword(12, false); } if (!options.signing.storePassword) { options.signing.storePassword = generatePassword(12, false); } // Verify we have the required signing options. const requiredSigningOptions: Array<keyof SigningOptions> = [ "alias", "keyPassword", "storePassword" ]; // If we're creating a new key, we require additional info. if (options.signingMode === "new") { requiredSigningOptions.push("countryCode", "fullName", "organization", "organizationalUnit"); } validationErrors.push(...requiredSigningOptions .filter(f => !options?.signing![f]) .map(f => `Signing option ${f} is required`)); } return { options: options, validationErrors }; } function tryParseOptionsFromRequest(request: express.Request): AndroidPackageOptions | null { // See if the body is our options request. if (request.body["packageId"]) { return request.body as AndroidPackageOptions; } return null; } async function createAppPackage(options: AndroidPackageOptions): Promise<GeneratedAppPackage> { let projectDir: tmp.DirResult | null = null; try { // Create a temporary directory where we'll do all our work. projectDir = tmp.dirSync({ prefix: "pwabuilder-cloudapk-" }); const projectDirPath = projectDir.name; // Get the signing information. const signing = await createLocalSigninKeyInfo(options, projectDirPath); // Generate the APK, keys, and digital asset links. return await createAppPackageWith403Fallback(options, projectDirPath, signing); } finally { // Schedule this directory for cleanup in the near future. scheduleTmpDirectoryCleanup(projectDir?.name); } } async function createAppPackageWith403Fallback(options: AndroidPackageOptions, projectDirPath: string, signing: LocalKeyFileSigningOptions | null) { // Create the app package. // If we get a get a 403 error, try again using our URL proxy service. // // We've witnessed dozens of cases where we receive a 403 forbidden from accessing a server: // - https://github.com/pwa-builder/PWABuilder/issues/1499 // - https://github.com/pwa-builder/PWABuilder/issues/1476 // - https://github.com/pwa-builder/PWABuilder/issues/1375 // - https://github.com/pwa-builder/PWABuilder/issues/1320 // // When this happens, we can swap out the APK url items with a safe proxy server that doesn't have the same issues. // For example, if the icon is https://foo.com/img.png, we change this to // https://pwabuilder-safe-url.azurewebsites.net/api/getsafeurl?url=https://foo.com/img/png const http1Fetch = "node-fetch"; const http2Fetch = "fetch-h2"; try { const bubbleWrapper = new BubbleWrapper(options, projectDirPath, signing, http1Fetch); return await bubbleWrapper.generateAppPackage(); } catch (error) { const errorMessage = (error as Error)?.message || ""; const is403Error = errorMessage.includes("403") || errorMessage.includes("ECONNREFUSED"); if (is403Error) { const optionsWithSafeUrl = getAndroidOptionsWithSafeUrls(options); console.warn("Encountered 403 error when generating app package. Retrying with safe URL proxy.", error, optionsWithSafeUrl); const bubbleWrapper = new BubbleWrapper(optionsWithSafeUrl, projectDirPath, signing, http2Fetch); return await bubbleWrapper.generateAppPackage(); } // It's not a 403 / connection refused? Just throw it. console.error("Bubblewrap failed to generated app package.", error); throw error; } } async function createLocalSigninKeyInfo(apkSettings: AndroidPackageOptions, projectDir: string): Promise<LocalKeyFileSigningOptions | null> { // If we're told not to sign it, skip this. if (apkSettings.signingMode === "none") { return null; } // Did the user upload a key file for signing? If so, download it to our directory. const keyFilePath = path.join(projectDir, "signingKey.keystore"); if (apkSettings.signingMode === "mine") { if (!apkSettings.signing?.file) { throw new Error("Signing mode is 'mine', but no signing key file was supplied."); } const fileBuffer = base64ToBuffer(apkSettings.signing.file); await fs.promises.writeFile(keyFilePath, fileBuffer); } function base64ToBuffer(base64: string): Buffer { const matches = base64.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/); if (!matches || matches.length !== 3) { throw new Error("Invalid base 64 string"); } return Buffer.from(matches[2], "base64"); } // Make sure we have signing info supplied, otherwise we received bad data. if (!apkSettings.signing) { throw new Error(`Signing mode was set to ${apkSettings.signingMode}, but no signing information was supplied.`); } return { keyFilePath: keyFilePath, ...apkSettings.signing }; } /*** * Creates a zip file containing the app package and associated artifacts. */ async function zipAppPackage(appPackage: GeneratedAppPackage, apkOptions: AndroidPackageOptions): Promise<string> { console.info("Zipping app package with options", appPackage, apkOptions); const apkName = `${apkOptions.name}${apkOptions.signingMode === "none" ? "-unsigned" : ""}.apk`; let tmpZipFile: string | null = null; return new Promise((resolve, reject) => { try { const archive = archiver("zip", { zlib: { level: 5 } }); archive.on("warning", function (zipWarning: any) { console.warn("Warning during zip creation", zipWarning); }); archive.on("error", function (zipError: any) { console.error("Error during zip creation", zipError); reject(zipError); }); tmpZipFile = tmp.tmpNameSync({ prefix: "pwabuilder-cloudapk-", postfix: ".zip" }); const output = fs.createWriteStream(tmpZipFile); output.on("close", () => { if (tmpZipFile) { resolve(tmpZipFile); } else { reject("No zip file was created"); } }); archive.pipe(output); // Append the APK and next steps readme. const isSigned = !!appPackage.signingInfo; archive.file(appPackage.apkFilePath, { name: apkName }); archive.file(isSigned ? "./Next-steps.html" : "./Next-steps-unsigned.html", { name: "Readme.html" }); // If we've signed it, we should have signing info, asset links file, and app bundle. if (appPackage.signingInfo && appPackage.signingInfo.keyFilePath) { archive.file(appPackage.signingInfo.keyFilePath, { name: "signing.keystore" }); const readmeContents = [ "Keep your this file and signing.keystore in a safe place. You'll need these files if you want to upload future versions of your PWA to the Google Play Store.\r\n", "Key store file: signing.keystore", `Key store password: ${appPackage.signingInfo.storePassword}`, `Key alias: ${appPackage.signingInfo.alias}`, `Key password: ${appPackage.signingInfo.keyPassword}`, `Signer's full name: ${appPackage.signingInfo.fullName}`, `Signer's organization: ${appPackage.signingInfo.organization}`, `Signer's organizational unit: ${appPackage.signingInfo.organizationalUnit}`, `Signer's country code: ${appPackage.signingInfo.countryCode}` ]; archive.append(readmeContents.join("\r\n"), { name: "signing-key-info.txt" }); // Zip up the asset links. if (appPackage.assetLinkFilePath) { archive.file(appPackage.assetLinkFilePath, { name: "assetlinks.json" }); } // Zip up the app bundle as well. if (appPackage.appBundleFilePath) { archive.file(appPackage.appBundleFilePath, { name: `${apkOptions.name}.aab` }) } } // Add the source code directory if need be. if (apkOptions.includeSourceCode) { archive.directory(appPackage.projectDirectory, "source"); } archive.finalize(); } catch (err) { reject(err); } finally { scheduleTmpFileCleanup(tmpZipFile); } }) } function scheduleTmpFileCleanup(file: string | null) { if (file) { console.info("Scheduled cleanup for tmp file", file); const delFile = function () { const filePath = file.replace(/\\/g, "/"); // Use / instead of \ otherwise del gets failed to delete files on Windows del([filePath], { force: true }) .then((deletedPaths: string[]) => console.info("Cleaned up tmp file", deletedPaths)) .catch((err: any) => console.warn("Unable to cleanup tmp file. It will be cleaned up on process exit", err, filePath)); } setTimeout(() => delFile(), tempFileRemovalTimeoutMs); } } function scheduleTmpDirectoryCleanup(dir?: string | null) { // We can't use dir.removeCallback() because it will fail with "ENOTEMPTY: directory not empty" error. // We can't use fs.rmdir(path, { recursive: true }) as it's supported only in Node 12+, which isn't used by our docker image. if (dir) { const dirToDelete = dir.replace(/\\/g, "/"); // Use '/' instead of '\', otherwise del gets confused and won't cleanup on Windows. const dirPatternToDelete = dirToDelete + "/**"; // Glob pattern to delete subdirectories and files console.info("Scheduled cleanup for tmp directory", dirPatternToDelete); const delDir = function () { del([dirPatternToDelete], { force: true }) // force allows us to delete files outside of workspace .then((deletedPaths: string[]) => console.info("Cleaned up tmp directory", dirPatternToDelete, deletedPaths?.length, "subdirectories and files were deleted")) .catch((err: any) => console.warn("Unable to cleanup tmp directory. It will be cleaned up on process exit", err)); }; setTimeout(() => delDir(), tempFileRemovalTimeoutMs); } } function getAndroidOptionsWithSafeUrls(options: AndroidPackageOptions): AndroidPackageOptions { const absoluteUrlProps: Array<keyof AndroidPackageOptions> = [ "maskableIconUrl", "monochromeIconUrl", "iconUrl", "webManifestUrl", ]; const newOptions: AndroidPackageOptions = { ...options }; for (let prop of absoluteUrlProps) { const url = newOptions[prop]; if (url && typeof url === "string") { const safeUrlFetcherEndpoint = "https://pwabuilder-safe-url.azurewebsites.net/api/getsafeurl"; const safeUrl = `${safeUrlFetcherEndpoint}?url=${encodeURIComponent(url)}`; (newOptions[prop] as any) = safeUrl; } } return newOptions; } module.exports = router;
the_stack
import { ipcRenderer } from 'electron' import { update } from './update-preview' import { MathJaxController, processHTMLString } from './mathjax-helper' import * as util from './util' import { getMedia } from '../src/util-common' import { ChannelMap } from './ipc' let handlerId: number let nativePageScrollKeys = false function uncaughtError(err: Error) { ipcRenderer.send<'atom-markdown-preview-plus-ipc-uncaught-error'>( 'atom-markdown-preview-plus-ipc-uncaught-error', handlerId, { message: err.message, name: err.name, stack: err.stack, }, ) } window.addEventListener('error', (e) => { uncaughtError(e.error as Error) }) window.addEventListener('unhandledrejection', (evt) => { uncaughtError((evt as any).reason as Error) }) function mkResPromise<T>() { let resFn: (value: T | PromiseLike<T>) => void const p = new Promise<T>((resolve) => (resFn = resolve)) as Promise<T> & { resolve: typeof resFn } p.resolve = resFn! return p } const atomVars = { mathJax: mkResPromise<MathJaxController>(), sourceLineMap: new Map<number, Element>(), revSourceMap: new WeakMap<Element, number[]>(), } ipcRenderer.on<'set-id'>('set-id', (_evt, id) => { handlerId = id }) ipcRenderer.on<'init'>('init', (_evt, params) => { atomVars.mathJax.resolve( MathJaxController.create(params.userMacros, params.mathJaxConfig), ) document.documentElement!.dataset.markdownPreviewPlusContext = params.context // tslint:disable-next-line:totality-check if (params.context === 'pdf-export') { document.documentElement!.style.setProperty( 'width', `${params.pdfExportOptions.width}mm`, 'important', ) } }) ipcRenderer.on<'set-native-keys'>('set-native-keys', (_evt, val) => { nativePageScrollKeys = val }) function scrollSync({ firstLine, lastLine }: ChannelMap['scroll-sync']) { if (firstLine === 0) { window.scroll({ top: 0 }) return } const slm = atomVars.sourceLineMap const lines = Array.from(slm.keys()).sort((a, b) => a - b) let lowix = lines.findIndex((x) => x >= firstLine) if (lowix > 0) lowix-- let highix = lines.findIndex((x) => x >= lastLine) if (highix === -1) highix = lines.length - 1 else if (highix < lines.length - 1) highix++ const low = lines[lowix] const high = lines[highix] let norm = 0 let meanScroll = 0 const entries = Array.from(slm.entries()).slice(lowix, highix + 1) for (const [line, item] of entries) { const weight = line <= (high + low) / 2 ? line - low + 1 : high - line + 1 norm += weight meanScroll += item.getBoundingClientRect().top * weight } if (norm === 0) return const offset = document.documentElement!.scrollTop const clientHeight = document.documentElement!.clientHeight const top = offset - clientHeight / 2 + meanScroll / norm window.scroll({ top }) } ipcRenderer.on<'scroll-sync'>('scroll-sync', (_evt, params) => { scrollSync(params) }) ipcRenderer.on<'style'>('style', (_event, { styles }) => { let styleElem = document.head!.querySelector('style#atom-styles') if (!styleElem) { styleElem = document.createElement('style') styleElem.id = 'atom-styles' document.head!.appendChild(styleElem) } styleElem.innerHTML = styles.join('\n') }) ipcRenderer.on<'update-images'>('update-images', (_event, { oldsrc, v }) => { const imgs = getMedia(document) for (const img of Array.from(imgs)) { let ovs: string | undefined let ov: number | undefined let attrName: 'href' | 'src' if (img.tagName === 'LINK') attrName = 'href' else attrName = 'src' let src = img.getAttribute(attrName)! const match = src.match(/^(.*)\?v=(\d+)$/) if (match) [, src, ovs] = match if (src === oldsrc) { if (ovs !== undefined) ov = parseInt(ovs, 10) if (v !== ov) img[attrName] = v ? `${src}?v=${v}` : `${src}` } } }) ipcRenderer.on<'sync'>('sync', (_event, { line, flash }) => { const root = document.querySelector('div.update-preview') if (!root) return let element = atomVars.sourceLineMap.get(line) if (!element) { for (let i = line - 1; i >= 0; i -= 1) { element = atomVars.sourceLineMap.get(line) if (element) break } } if (!element) return element.scrollIntoViewIfNeeded(true) if (flash) { element.classList.add('flash') setTimeout(() => element!.classList.remove('flash'), 1000) } }) let updatePromise: Promise<void> | undefined let nextUpdateParams: ChannelMap['update-preview'] | undefined async function doUpdate({ id, html, renderLaTeX, map, diffMethod, scrollSyncParams, }: ChannelMap['update-preview']) { // div.update-preview created after constructor st UpdatePreview cannot // be instanced in the constructor const preview = document.querySelector('div.update-preview') if (!preview) return const parser = new DOMParser() const domDocument = parser.parseFromString(html, 'text/html') const doc = document if (doc && domDocument.head!.hasChildNodes()) { let container = doc.head!.querySelector('original-elements') if (!container) { container = doc.createElement('original-elements') doc.head!.insertBefore(container, doc.head!.firstElementChild) } container.innerHTML = '' for (const headElement of Array.from(domDocument.head!.childNodes)) { container.appendChild(headElement) } } const visibleElements = scrollSyncParams ? undefined : Array.from(preview.children) .map((x) => ({ el: x, r: x.getBoundingClientRect() })) .filter(({ r }) => r.top <= window.innerHeight && r.bottom >= 0) await update(preview, domDocument.body, { renderLaTeX, diffMethod, mjController: await atomVars.mathJax, }) if (visibleElements) { const stillVisibleElements = visibleElements.filter( ({ el }) => (el as HTMLElement).offsetParent, ) const lastEl = stillVisibleElements[stillVisibleElements.length - 1] if (lastEl) { window.scrollBy({ top: lastEl.el.getBoundingClientRect().bottom - lastEl.r.bottom, }) } } if (map) { const slsm = new Map<number, Element>() const rsm = new WeakMap<Element, number[]>() for (const [lineS, path] of Object.entries(map)) { const line = parseInt(lineS, 10) const elem = util.resolveElement(preview, path) if (elem) { slsm.set(line, elem) const rsmel = rsm.get(elem) if (rsmel) rsmel.push(line) else rsm.set(elem, [line]) } } atomVars.sourceLineMap = slsm atomVars.revSourceMap = rsm } if (scrollSyncParams) scrollSync(scrollSyncParams) ipcRenderer.send<'atom-markdown-preview-plus-ipc-request-reply'>( 'atom-markdown-preview-plus-ipc-request-reply', handlerId, { id, request: 'update-preview', result: processHTMLString(preview), }, ) } function delayedUpdate(): Promise<void> | undefined { let res if (nextUpdateParams) res = doUpdate(nextUpdateParams).then(delayedUpdate) nextUpdateParams = undefined return res } ipcRenderer.on<'update-preview'>('update-preview', (_event, params) => { if (!updatePromise) { updatePromise = doUpdate(params) .then(delayedUpdate) .catch(uncaughtError) .then(() => { updatePromise = undefined }) } else { nextUpdateParams = params } }) ipcRenderer.on<'await-fully-ready'>('await-fully-ready', (_event, { id }) => { function sendLoaded() { requestAnimationFrame(function () { ipcRenderer.send<'atom-markdown-preview-plus-ipc-request-reply'>( 'atom-markdown-preview-plus-ipc-request-reply', handlerId, { id, request: 'await-fully-ready', result: void 0, }, ) }) } // tslint:disable-next-line: totality-check if (document.readyState === 'complete') { sendLoaded() return } function loaded() { sendLoaded() document.removeEventListener('load', loaded) } document.addEventListener('load', loaded) }) const baseElement = document.createElement('base') document.head!.appendChild(baseElement) ipcRenderer.on<'set-base-path'>('set-base-path', (_evt, { path }) => { if (path) baseElement.href = path else baseElement.href = '' }) ipcRenderer.on<'error'>('error', (_evt, { msg }) => { const preview = document.querySelector('div.update-preview') if (!preview) return const errorDiv = document.createElement('div') errorDiv.innerHTML = `<h2>Previewing Markdown Failed</h2><h3>${msg}</h3>` if (preview.firstElementChild) { preview.insertBefore(errorDiv, preview.firstElementChild) } else { preview.appendChild(errorDiv) } }) document.addEventListener('wheel', (event) => { if (event.ctrlKey) { if (event.deltaY > 0) { ipcRenderer.send<'atom-markdown-preview-plus-ipc-zoom-in'>( 'atom-markdown-preview-plus-ipc-zoom-in', handlerId, ) } else if (event.deltaY < 0) { ipcRenderer.send<'atom-markdown-preview-plus-ipc-zoom-out'>( 'atom-markdown-preview-plus-ipc-zoom-out', handlerId, ) } event.preventDefault() event.stopPropagation() } }) document.addEventListener('scroll', (_event) => { const el = document.documentElement! const height = el.clientHeight const visible = Array.from(atomVars.sourceLineMap.entries()) .filter(([_line, elem]) => { const { top, bottom } = elem.getBoundingClientRect() return top > 0 && bottom < height }) .map(([line, _elem]) => line) ipcRenderer.send<'atom-markdown-preview-plus-ipc-did-scroll-preview'>( 'atom-markdown-preview-plus-ipc-did-scroll-preview', handlerId, { max: Math.max(...visible), min: Math.min(...visible), }, ) }) function keyEventHandler(type: 'keydown' | 'keyup', e: KeyboardEvent) { if ( nativePageScrollKeys && !e.altKey && !e.ctrlKey && !e.shiftKey && !e.metaKey && e.code.match(/^(Arrow.*|Page.*|Space|Home|End)$/) ) { return } const data = { type: type, altKey: e.altKey, ctrlKey: e.ctrlKey, bubbles: e.bubbles, cancelable: e.cancelable, code: e.code, composed: e.composed, detail: e.detail, isComposing: e.isComposing, key: e.key, location: e.location, metaKey: e.metaKey, repeat: e.repeat, shiftKey: e.shiftKey, } as const ipcRenderer.send<'atom-markdown-preview-plus-ipc-key'>( 'atom-markdown-preview-plus-ipc-key', handlerId, data, ) e.preventDefault() } document.addEventListener('keydown', keyEventHandler.bind(this, 'keydown')) document.addEventListener('keyup', keyEventHandler.bind(this, 'keyup')) let lastContextMenuTarget: HTMLElement document.addEventListener('contextmenu', (e) => { lastContextMenuTarget = e.target as HTMLElement ipcRenderer.send<'atom-markdown-preview-plus-ipc-show-context-menu'>( 'atom-markdown-preview-plus-ipc-show-context-menu', handlerId, ) }) ipcRenderer.on<'sync-source'>('sync-source', (_, { id }) => { let element = lastContextMenuTarget const rsm = atomVars.revSourceMap let lines = rsm.get(element) while (!lines && element.parentElement) { element = element.parentElement lines = rsm.get(element) } if (!lines) return ipcRenderer.send<'atom-markdown-preview-plus-ipc-request-reply'>( 'atom-markdown-preview-plus-ipc-request-reply', handlerId, { id, request: 'sync-source', result: Math.min(...lines), }, ) }) ipcRenderer.on<'reload'>('reload', (_, { id }) => { window.onbeforeunload = null ipcRenderer.send<'atom-markdown-preview-plus-ipc-request-reply'>( 'atom-markdown-preview-plus-ipc-request-reply', handlerId, { id, request: 'reload', result: undefined, }, ) }) window.onbeforeunload = function () { return false } ipcRenderer.on<'get-tex-config'>('get-tex-config', async (_, { id }) => { ipcRenderer.send<'atom-markdown-preview-plus-ipc-request-reply'>( 'atom-markdown-preview-plus-ipc-request-reply', handlerId, { id, request: 'get-tex-config', result: (await atomVars.mathJax).jaxTeXConfig(), }, ) }) ipcRenderer.on<'get-selection'>('get-selection', async (_, { id }) => { const selection = window.getSelection() const selectedText = selection && selection.toString() const selectedNode = selection && selection.anchorNode ipcRenderer.send<'atom-markdown-preview-plus-ipc-request-reply'>( 'atom-markdown-preview-plus-ipc-request-reply', handlerId, { id, request: 'get-selection', result: selectedText && selectedNode ? selectedText : undefined, }, ) }) document.addEventListener('click', (event) => { if (!event.target) return const el = event.target as HTMLElement if (el.tagName === 'A') { const href = el.getAttribute('href') if (href && href.startsWith('#')) { event.preventDefault() const anchor = document.getElementById(decodeURIComponent(href).slice(1)) if (anchor) anchor.scrollIntoView() } } })
the_stack
import { Component, ElementRef, EventEmitter, Input, OnInit, Output, ViewChild, OnDestroy } from '@angular/core'; import { Subscription } from 'rxjs'; import { isFirefox, isIE, isIEOrEdge, openExternalLink } from './../../../../utils/util'; import { PoKeyCodeEnum } from './../../../../enums/po-key-code.enum'; import { PoRichTextService } from '../po-rich-text.service'; const poRichTextBodyCommands = [ 'bold', 'italic', 'underline', 'justifyleft', 'justifycenter', 'justifyright', 'justifyfull', 'insertUnorderedList', 'Createlink' ]; @Component({ selector: 'po-rich-text-body', templateUrl: './po-rich-text-body.component.html' }) export class PoRichTextBodyComponent implements OnInit, OnDestroy { @ViewChild('bodyElement', { static: true }) bodyElement: ElementRef; @Input('p-height') height?: string; @Input('p-model-value') modelValue?: string; @Input('p-placeholder') placeholder?: string; @Input('p-readonly') readonly?: string; @Output('p-change') change = new EventEmitter<any>(); @Output('p-commands') commands = new EventEmitter<any>(); @Output('p-selected-link') selectedLink = new EventEmitter<any>(); @Output('p-shortcut-command') shortcutCommand = new EventEmitter<any>(); @Output('p-value') value = new EventEmitter<any>(); @Output('p-blur') blur = new EventEmitter<any>(); private isLinkEditing: boolean; private linkElement: any; private timeoutChange: any; private valueBeforeChange: any; private modelSubscription: Subscription; constructor(private richTextService: PoRichTextService) {} ngOnInit() { this.bodyElement.nativeElement.designMode = 'on'; this.modelSubscription = this.richTextService.getModel().subscribe(modelValue => { this.modelValue = modelValue; this.bodyElement.nativeElement.innerHTML = ''; this.updateValueWithModelValue(); this.addClickListenerOnAnchorElements(); }); } ngOnDestroy() { this.modelSubscription?.unsubscribe(); } executeCommand(command: string | { command: any; value: string | any }) { this.bodyElement.nativeElement.focus(); if (typeof command === 'object') { if (command.command === 'InsertHTML') { const { command: linkCommand, value: { urlLink }, value: { urlLinkText } } = command; this.handleCommandLink(linkCommand, urlLink, urlLinkText); } else { document.execCommand(command.command, false, command.value); } } else { document.execCommand(command, false, null); } this.updateModel(); this.value.emit(this.modelValue); } linkEditing(event) { this.isLinkEditing = !!event; } onBlur() { this.blur.emit(); if (this.modelValue !== this.valueBeforeChange) { clearTimeout(this.timeoutChange); this.timeoutChange = setTimeout(() => { this.change.emit(this.modelValue); }, 200); } } focus(): void { this.bodyElement.nativeElement.focus(); } onClick() { this.emitSelectionCommands(); } onFocus() { this.valueBeforeChange = this.modelValue; } onKeyDown(event) { const keyK = event.keyCode === PoKeyCodeEnum.keyK; const isLinkShortcut = (keyK && event.ctrlKey) || (keyK && event.metaKey); if (isLinkShortcut) { event.preventDefault(); this.shortcutCommand.emit(); } this.toggleCursorOnLink(event, 'add'); } onKeyUp(event: any) { this.toggleCursorOnLink(event, 'remove'); this.removeBrElement(); this.updateModel(); this.emitSelectionCommands(); } onPaste() { this.update(); setTimeout(() => this.addClickListenerOnAnchorElements()); } update() { setTimeout(() => this.updateModel()); setTimeout(() => { this.removeBrElement(); this.updateModel(); this.emitSelectionCommands(); }); } private addClickListenerOnAnchorElements() { this.bodyElement.nativeElement.querySelectorAll('a').forEach(element => { element.addEventListener('click', this.onAnchorClick); }); } private emitSelectionCommands() { const commands = poRichTextBodyCommands.filter(command => document.queryCommandState(command)); const rgbColor = document.queryCommandValue('ForeColor'); let hexColor; if (!isIE()) { hexColor = this.rgbToHex(rgbColor); } if (this.isCursorPositionedInALink()) { commands.push('Createlink'); } this.selectedLink.emit(this.linkElement); // importante ficar fora do if para emitir mesmo undefined. this.commands.emit({ commands, hexColor }); } private getTextSelection() { const textSelection = document.getSelection(); if (!textSelection) { return; } const focusNode = textSelection.focusNode ? textSelection.focusNode.parentElement : undefined; const anchorNode = textSelection.anchorNode ? textSelection.anchorNode.parentNode : undefined; const node = focusNode || anchorNode; let tagName; if (node) { tagName = node['tagName'] || node['nodeName']; return { node, tagName }; } } private handleCommandLink(linkCommand: string, urlLink: string, urlLinkText: string) { if (isIE()) { this.insertHtmlLinkElement(urlLink, urlLinkText); } else { // '&nbsp;' necessário para o cursor não ficar preso dentro do link no Firefox. const linkValue = isFirefox() && !this.isLinkEditing ? `&nbsp;${this.makeLinkTag(urlLink, urlLinkText)}&nbsp;` : this.makeLinkTag(urlLink, urlLinkText); document.execCommand(linkCommand, false, linkValue); } this.addClickListenerOnAnchorElements(); } // tratamento específico para IE pois não suporta o comando 'insertHTML'. private insertHtmlLinkElement(urlLink: string, urlLinkText: string) { const selection = document.getSelection(); const selectionRange = selection.getRangeAt(0); const elementLink = document.createElement('a'); const elementlinkText = document.createTextNode(urlLinkText); elementLink.appendChild(elementlinkText); elementLink.href = urlLink; elementLink.setAttribute('target', '_blank'); elementLink.classList.add('po-rich-text-link'); selectionRange.deleteContents(); selectionRange.insertNode(elementLink); } private isCursorPositionedInALink(): boolean { const textSelection = this.getTextSelection(); this.linkElement = undefined; let isLink = false; if (textSelection && textSelection.node && textSelection.tagName === 'A') { this.linkElement = textSelection.node; isLink = true; } else if ((isFirefox() || isIEOrEdge()) && this.verifyCursorPositionInFirefoxIEEdge()) { isLink = true; } else { isLink = textSelection ? this.isParentNodeAnchor(textSelection) : false; } return isLink; } private isParentNodeAnchor(textSelection): boolean { let element = textSelection.node; let isLink = false; while (element && (element.tagName !== null || element.nodeName !== null)) { if (element.tagName === 'A' || element.nodeName === 'A') { this.linkElement = element; isLink = true; return isLink; } element = element.parentElement || element.parentNode; } this.linkElement = undefined; return isLink; } private makeLinkTag(urlLink: string, urlLinkText: string) { return `<a class="po-rich-text-link" href="${urlLink}" target="_blank">${urlLinkText || urlLink}</a>`; } private onAnchorClick = event => { const { target, ctrlKey, metaKey } = event; let url; let elementLink; if (ctrlKey || metaKey) { if (event.path) { event.path.forEach(element => { if (element.nodeName === 'A') { url = element.href; elementLink = element; } }); } else { url = target.attributes.href.value; elementLink = target; } openExternalLink(url); elementLink.classList.remove('po-clickable'); } }; // Tratamento necessário para eliminar a tag <br> criada no firefox quando o body for limpo. private removeBrElement() { const bodyElement = this.bodyElement.nativeElement; if (!bodyElement.innerText.trim() && bodyElement.childNodes.length === 1 && bodyElement.querySelector('br')) { bodyElement.querySelector('br').remove(); } } private rgbToHex(rgb) { // Tratamento necessário para converter o código rgb para hexadecimal. const sep = rgb.indexOf(',') > -1 ? ',' : ' '; rgb = rgb.substr(4).split(')')[0].split(sep); let r = (+rgb[0]).toString(16); let g = (+rgb[1]).toString(16); let b = (+rgb[2]).toString(16); if (r.length === 1) { r = '0' + r; } if (g.length === 1) { g = '0' + g; } if (b.length === 1) { b = '0' + b; } return '#' + r + g + b; } private toggleCursorOnLink(event: any, action: 'add' | 'remove') { const selection = document.getSelection(); const element = selection.focusNode ? selection.focusNode.parentNode : undefined; const isCtrl = event.key === 'Control'; const isCommand = event.key === 'Meta'; const isOnCtrlLink = this.isCursorPositionedInALink() && (isCtrl || isCommand); if (element) { if (isOnCtrlLink) { element['classList'][action]('po-clickable'); } else { const isClickable = element['classList'] && element['classList'].contains('po-clickable'); if (isClickable) { element['classList'].remove('po-clickable'); } } this.updateModel(); } } private updateModel() { this.modelValue = this.bodyElement.nativeElement.innerHTML; this.value.emit(this.modelValue); } private updateValueWithModelValue() { if (this.modelValue) { this.bodyElement.nativeElement.insertAdjacentHTML('afterbegin', this.modelValue); } } private verifyCursorPositionInFirefoxIEEdge(): boolean { const textSelection = document.getSelection(); const nodeLink = textSelection.focusNode; let isLink = false; if (nodeLink && nodeLink.nodeName === 'A') { this.linkElement = nodeLink; isLink = true; } else { const range = textSelection.getRangeAt(0); const fragmentDocument = range.cloneContents(); const element = fragmentDocument.childNodes[0] || fragmentDocument.firstElementChild; this.linkElement = element && element.nodeName === 'A' ? element : undefined; isLink = !!this.linkElement; } return isLink; } }
the_stack
import { deepStrictEqual, throws, doesNotThrow, ok } from "assert"; import { deflateRawSync, inflateRawSync } from "zlib"; import { Parser } from "../lib/binary_parser"; function compositeParserTests( name: string, factory: (array: Uint8Array | number[]) => Uint8Array ) { describe(`Composite parser (${name})`, () => { function hexToBuf(hex: string): Uint8Array { return factory(hex.match(/.{1,2}/g)!.map((byte) => parseInt(byte, 16))); } describe("Array parser", () => { it("should parse array of primitive types", () => { const parser = Parser.start().uint8("length").array("message", { length: "length", type: "uint8", }); const buffer = factory([12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); deepStrictEqual(parser.parse(buffer), { length: 12, message: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], }); }); it("should parse array of primitive types with lengthInBytes", () => { const parser = Parser.start().uint8("length").array("message", { lengthInBytes: "length", type: "uint8", }); const buffer = factory([12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); deepStrictEqual(parser.parse(buffer), { length: 12, message: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], }); }); it("should parse array of user defined types", () => { const elementParser = new Parser().uint8("key").int16le("value"); const parser = Parser.start().uint16le("length").array("message", { length: "length", type: elementParser, }); const buffer = factory([ 0x02, 0x00, 0xca, 0xd2, 0x04, 0xbe, 0xd3, 0x04, ]); deepStrictEqual(parser.parse(buffer), { length: 0x02, message: [ { key: 0xca, value: 1234 }, { key: 0xbe, value: 1235 }, ], }); }); it("should parse array of user defined types and have access to parent context", () => { const elementParser = new Parser().uint8("key").array("value", { type: "uint8", length: function (this: any) { return this.$parent.valueLength; }, }); const parser = Parser.start() .useContextVars() .uint16le("length") .uint16le("valueLength") .array("message", { length: "length", type: elementParser, }); const buffer = factory([ 0x02, 0x00, 0x02, 0x00, 0xca, 0xd2, 0x04, 0xbe, 0xd3, 0x04, ]); deepStrictEqual(parser.parse(buffer), { length: 0x02, valueLength: 0x02, message: [ { key: 0xca, value: [0xd2, 0x04] }, { key: 0xbe, value: [0xd3, 0x04] }, ], }); }); it("should parse array of user defined types and have access to root context", () => { const elementParser = new Parser().uint8("key").nest("data", { type: new Parser().array("value", { type: "uint8", length: "$root.valueLength", }), }); const parser = Parser.start() .useContextVars() .uint16le("length") .uint16le("valueLength") .array("message", { length: "length", type: elementParser, }); const buffer = factory([ 0x02, 0x00, 0x02, 0x00, 0xca, 0xd2, 0x04, 0xbe, 0xd3, 0x04, ]); deepStrictEqual(parser.parse(buffer), { length: 0x02, valueLength: 0x02, message: [ { key: 0xca, data: { value: [0xd2, 0x04] } }, { key: 0xbe, data: { value: [0xd3, 0x04] } }, ], }); }); it("should parse array of user defined types with lengthInBytes", () => { const elementParser = new Parser().uint8("key").int16le("value"); const parser = Parser.start().uint16le("length").array("message", { lengthInBytes: "length", type: elementParser, }); const buffer = factory([ 0x06, 0x00, 0xca, 0xd2, 0x04, 0xbe, 0xd3, 0x04, ]); deepStrictEqual(parser.parse(buffer), { length: 0x06, message: [ { key: 0xca, value: 1234 }, { key: 0xbe, value: 1235 }, ], }); }); it("should parse array of user defined types with lengthInBytes literal", () => { const elementParser = new Parser().uint8("key").int16le("value"); const parser = Parser.start().array("message", { lengthInBytes: 0x06, type: elementParser, }); const buffer = factory([0xca, 0xd2, 0x04, 0xbe, 0xd3, 0x04]); deepStrictEqual(parser.parse(buffer), { message: [ { key: 0xca, value: 1234 }, { key: 0xbe, value: 1235 }, ], }); }); it("should parse array of user defined types with lengthInBytes function", () => { const elementParser = new Parser().uint8("key").int16le("value"); const parser = Parser.start() .uint16le("length") .array("message", { lengthInBytes: function (this: any) { return this.length; }, type: elementParser, }); const buffer = factory([ 0x06, 0x00, 0xca, 0xd2, 0x04, 0xbe, 0xd3, 0x04, ]); deepStrictEqual(parser.parse(buffer), { length: 0x06, message: [ { key: 0xca, value: 1234 }, { key: 0xbe, value: 1235 }, ], }); }); it("should parse array of arrays", () => { const rowParser = Parser.start().uint8("length").array("cols", { length: "length", type: "int32le", }); const parser = Parser.start().uint8("length").array("rows", { length: "length", type: rowParser, }); const size = 1 + 10 * (1 + 5 * 4); const buffer = Buffer.alloc ? Buffer.alloc(size) : new Buffer(size); const dataView = new DataView(buffer.buffer); let iterator = 0; buffer[iterator] = 10; iterator += 1; for (let i = 0; i < 10; i++) { buffer[iterator] = 5; iterator += 1; for (let j = 0; j < 5; j++) { dataView.setInt32(iterator, i * j, true); iterator += 4; } } deepStrictEqual(parser.parse(buffer), { length: 10, rows: [ { length: 5, cols: [0, 0, 0, 0, 0] }, { length: 5, cols: [0, 1, 2, 3, 4] }, { length: 5, cols: [0, 2, 4, 6, 8] }, { length: 5, cols: [0, 3, 6, 9, 12] }, { length: 5, cols: [0, 4, 8, 12, 16] }, { length: 5, cols: [0, 5, 10, 15, 20] }, { length: 5, cols: [0, 6, 12, 18, 24] }, { length: 5, cols: [0, 7, 14, 21, 28] }, { length: 5, cols: [0, 8, 16, 24, 32] }, { length: 5, cols: [0, 9, 18, 27, 36] }, ], }); }); it("should parse until eof when readUntil is specified", () => { const parser = Parser.start().array("data", { readUntil: "eof", type: "uint8", }); const buffer = factory([ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, ]); deepStrictEqual(parser.parse(buffer), { data: [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff], }); }); it("should parse until function returns true when readUntil is function", () => { const parser = Parser.start().array("data", { readUntil: (item: number, _: Buffer) => item === 0, type: "uint8", }); const buffer = factory([ 0xff, 0xff, 0xff, 0x01, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, ]); deepStrictEqual(parser.parse(buffer), { data: [0xff, 0xff, 0xff, 0x01, 0x00], }); }); it("should parse until function returns true when readUntil is function (using read-ahead)", () => { const parser = Parser.start().array("data", { readUntil: (_: number, buf: Buffer) => buf.length > 0 && buf[0] === 0, type: "uint8", }); const buffer = factory([ 0xff, 0xff, 0xff, 0x01, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, ]); deepStrictEqual(parser.parse(buffer), { data: [0xff, 0xff, 0xff, 0x01], }); }); it("should parse associative arrays", () => { const parser = Parser.start() .int8("numlumps") .array("lumps", { type: Parser.start() .int32le("filepos") .int32le("size") .string("name", { length: 8, encoding: "utf8" }), length: "numlumps", key: "name", }); const buffer = factory([ 0x02, 0xd2, 0x04, 0x00, 0x00, 0x2e, 0x16, 0x00, 0x00, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x2e, 0x16, 0x00, 0x00, 0xd2, 0x04, 0x00, 0x00, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, ]); deepStrictEqual(parser.parse(buffer), { numlumps: 2, lumps: { AAAAAAAA: { filepos: 1234, size: 5678, name: "AAAAAAAA", }, bbbbbbbb: { filepos: 5678, size: 1234, name: "bbbbbbbb", }, }, }); }); it("should use formatter to transform parsed array", () => { const parser = Parser.start().array("data", { type: "uint8", length: 4, formatter: (arr: number[]) => arr.join("."), }); const buffer = factory([0x0a, 0x0a, 0x01, 0x6e]); deepStrictEqual(parser.parse(buffer), { data: "10.10.1.110", }); }); it("should be able to go into recursion", () => { const parser = Parser.start() .namely("self") .uint8("length") .array("data", { type: "self", length: "length", }); const buffer = factory([1, 1, 1, 0]); deepStrictEqual(parser.parse(buffer), { length: 1, data: [ { length: 1, data: [ { length: 1, data: [{ length: 0, data: [] }], }, ], }, ], }); }); it("should be able to go into even deeper recursion", () => { const parser = Parser.start() .namely("self") .uint8("length") .array("data", { type: "self", length: "length", }); // 2 // / \ // 3 1 // / | \ \ // 1 0 2 0 // / / \ // 0 1 0 // / // 0 // prettier-ignore const buffer = factory([ 2, /* 0 */ 3, /* 0 */ 1, /* 0 */ 0, /* 1 */ 0, /* 2 */ 2, /* 0 */ 1, /* 0 */ 0, /* 1 */ 0, /* 1 */ 1, /* 0 */ 0, ]); deepStrictEqual(parser.parse(buffer), { length: 2, data: [ { length: 3, data: [ { length: 1, data: [{ length: 0, data: [] }] }, { length: 0, data: [] }, { length: 2, data: [ { length: 1, data: [{ length: 0, data: [] }] }, { length: 0, data: [] }, ], }, ], }, { length: 1, data: [{ length: 0, data: [] }], }, ], }); }); it("should allow parent parser attributes as choice key", () => { const ChildParser = Parser.start().choice("data", { tag: (vars: { version: number }) => vars.version, choices: { 1: Parser.start().uint8("v1"), 2: Parser.start().uint16("v2"), }, }); const ParentParser = Parser.start() .uint8("version") .nest("child", { type: ChildParser }); const buffer1 = factory([0x1, 0x2]); deepStrictEqual(ParentParser.parse(buffer1), { version: 1, child: { data: { v1: 2 } }, }); const buffer2 = factory([0x2, 0x3, 0x4]); deepStrictEqual(ParentParser.parse(buffer2), { version: 2, child: { data: { v2: 0x0304 } }, }); }); it("should be able to access to index context variable when using length", () => { const elementParser = new Parser() .uint8("key", { formatter: function (this: any, item: number) { return this.$index % 2 === 0 ? item : String.fromCharCode(item); }, }) .nest("data", { type: new Parser().array("value", { type: "uint8", length: "$root.valueLength", }), }); const parser = Parser.start() .useContextVars() .uint16le("length") .uint16le("valueLength") .array("message", { length: "length", type: elementParser, }); const buffer = factory([ 0x02, 0x00, 0x02, 0x00, 0x50, 0xd2, 0x04, 0x51, 0xd3, 0x04, ]); deepStrictEqual(parser.parse(buffer), { length: 0x02, valueLength: 0x02, message: [ { key: 0x50, data: { value: [0xd2, 0x04] } }, { key: "Q", data: { value: [0xd3, 0x04] } }, ], }); }); it("should be able to access to index context variable when using length on named parser", () => { // @ts-ignore const elementParser = new Parser() .uint8("key", { formatter: function (this: any, item: number) { return this.$index % 2 === 0 ? item : String.fromCharCode(item); }, }) .nest("data", { type: new Parser().array("value", { type: "uint8", length: "$root.valueLength", }), }) .namely("ArrayLengthIndexTest"); const parser = Parser.start() .useContextVars() .uint16le("length") .uint16le("valueLength") .array("message", { length: "length", type: "ArrayLengthIndexTest", }); const buffer = factory([ 0x02, 0x00, 0x02, 0x00, 0x50, 0xd2, 0x04, 0x51, 0xd3, 0x04, ]); deepStrictEqual(parser.parse(buffer), { length: 0x02, valueLength: 0x02, message: [ { key: 0x50, data: { value: [0xd2, 0x04] } }, { key: "Q", data: { value: [0xd3, 0x04] } }, ], }); }); }); describe("Choice parser", () => { it("should parse choices of primitive types", () => { const parser = Parser.start() .uint8("tag1") .choice("data1", { tag: "tag1", choices: { 0: "int32le", 1: "int16le", }, }) .uint8("tag2") .choice("data2", { tag: "tag2", choices: { 0: "int32le", 1: "int16le", }, }); const buffer = factory([0x0, 0x4e, 0x61, 0xbc, 0x00, 0x01, 0xd2, 0x04]); deepStrictEqual(parser.parse(buffer), { tag1: 0, data1: 12345678, tag2: 1, data2: 1234, }); }); it("should parse default choice", () => { const parser = Parser.start() .uint8("tag") .choice("data", { tag: "tag", choices: { 0: "int32le", 1: "int16le", }, defaultChoice: "uint8", }) .int32le("test"); const buffer = factory([0x03, 0xff, 0x2f, 0xcb, 0x04, 0x0]); deepStrictEqual(parser.parse(buffer), { tag: 3, data: 0xff, test: 314159, }); }); it("should parse choices of user defined types", () => { const parser = Parser.start() .uint8("tag") .choice("data", { tag: "tag", choices: { 1: Parser.start() .uint8("length") .string("message", { length: "length" }), 3: Parser.start().int32le("number"), }, }); const buffer1 = factory([ 0x1, 0xc, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, ]); deepStrictEqual(parser.parse(buffer1), { tag: 1, data: { length: 12, message: "hello, world", }, }); const buffer2 = factory([0x03, 0x4e, 0x61, 0xbc, 0x00]); deepStrictEqual(parser.parse(buffer2), { tag: 3, data: { number: 12345678, }, }); }); it("should be able to go into recursion", () => { const stop = Parser.start(); const parser = Parser.start() .namely("self") .uint8("type") .choice("data", { tag: "type", choices: { 0: stop, 1: "self", }, }); const buffer = factory([1, 1, 1, 0]); deepStrictEqual(parser.parse(buffer), { type: 1, data: { type: 1, data: { type: 1, data: { type: 0, data: {} }, }, }, }); }); it("should be able to go into recursion with simple nesting", () => { const stop = Parser.start(); const parser = Parser.start() .namely("self") .uint8("type") .choice("data", { tag: "type", choices: { 0: stop, 1: "self", 2: Parser.start() .nest("left", { type: "self" }) .nest("right", { type: stop }), }, }); const buffer = factory([2, /* left */ 1, 1, 0, /* right */ 0]); deepStrictEqual(parser.parse(buffer), { type: 2, data: { left: { type: 1, data: { type: 1, data: { type: 0, data: {} } }, }, right: {}, }, }); }); it("should be able to refer to other parsers by name", () => { const parser = Parser.start().namely("self"); // @ts-ignore const stop = Parser.start().namely("stop"); // @ts-ignore const twoCells = Parser.start() .namely("twoCells") .nest("left", { type: "self" }) .nest("right", { type: "stop" }); parser.uint8("type").choice("data", { tag: "type", choices: { 0: "stop", 1: "self", 2: "twoCells", }, }); const buffer = factory([2, /* left */ 1, 1, 0, /* right */ 0]); deepStrictEqual(parser.parse(buffer), { type: 2, data: { left: { type: 1, data: { type: 1, data: { type: 0, data: {} } }, }, right: {}, }, }); }); it("should be able to refer to other parsers both directly and by name", () => { const parser = Parser.start().namely("self"); const stop = Parser.start(); const twoCells = Parser.start() .nest("left", { type: "self" }) .nest("right", { type: stop }); parser.uint8("type").choice("data", { tag: "type", choices: { 0: stop, 1: "self", 2: twoCells, }, }); const buffer = factory([2, /* left */ 1, 1, 0, /* right */ 0]); deepStrictEqual(parser.parse(buffer), { type: 2, data: { left: { type: 1, data: { type: 1, data: { type: 0, data: {} } }, }, right: {}, }, }); }); it("should be able to go into recursion with complex nesting", () => { const stop = Parser.start(); const parser = Parser.start() .namely("self") .uint8("type") .choice("data", { tag: "type", choices: { 0: stop, 1: "self", 2: Parser.start() .nest("left", { type: "self" }) .nest("right", { type: "self" }), 3: Parser.start() .nest("one", { type: "self" }) .nest("two", { type: "self" }) .nest("three", { type: "self" }), }, }); // 2 // / \ // 3 1 // / | \ \ // 1 0 2 0 // / / \ // 0 1 0 // / // 0 // prettier-ignore const buffer = factory([ 2, /* left -> */ 3, /* one -> */ 1, /* -> */ 0, /* two -> */ 0, /* three -> */ 2, /* left -> */ 1, /* -> */ 0, /* right -> */ 0, /* right -> */ 1, /* -> */ 0, ]); deepStrictEqual(parser.parse(buffer), { type: 2, data: { left: { type: 3, data: { one: { type: 1, data: { type: 0, data: {} } }, two: { type: 0, data: {} }, three: { type: 2, data: { left: { type: 1, data: { type: 0, data: {} } }, right: { type: 0, data: {} }, }, }, }, }, right: { type: 1, data: { type: 0, data: {} }, }, }, }); }); it("should be able to 'flatten' choices when using null varName", () => { const parser = Parser.start() .uint8("tag") .choice({ tag: "tag", choices: { 1: Parser.start() .uint8("length") .string("message", { length: "length" }), 3: Parser.start().int32le("number"), }, }); const buffer1 = factory([ 0x1, 0xc, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, ]); deepStrictEqual(parser.parse(buffer1), { tag: 1, length: 12, message: "hello, world", }); const buffer2 = factory([0x03, 0x4e, 0x61, 0xbc, 0x00]); deepStrictEqual(parser.parse(buffer2), { tag: 3, number: 12345678, }); }); it("should be able to 'flatten' choices when omitting varName parameter", () => { const parser = Parser.start() .uint8("tag") .choice({ tag: "tag", choices: { 1: Parser.start() .uint8("length") .string("message", { length: "length" }), 3: Parser.start().int32le("number"), }, }); const buffer1 = factory([ 0x1, 0xc, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, ]); deepStrictEqual(parser.parse(buffer1), { tag: 1, length: 12, message: "hello, world", }); const buffer2 = factory([0x03, 0x4e, 0x61, 0xbc, 0x00]); deepStrictEqual(parser.parse(buffer2), { tag: 3, number: 12345678, }); }); it("should be able to use function as the choice selector", () => { const parser = Parser.start() .string("selector", { length: 4 }) .choice({ tag: function (this: { selector: string }) { return parseInt(this.selector, 2); // string base 2 to integer decimal }, choices: { 2: Parser.start() .uint8("length") .string("message", { length: "length" }), 7: Parser.start().int32le("number"), }, }); const buffer1 = factory([ 48, 48, 49, 48, 0xc, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, ]); deepStrictEqual(parser.parse(buffer1), { selector: "0010", // -> choice 2 length: 12, message: "hello, world", }); const buffer2 = factory([48, 49, 49, 49, 0x4e, 0x61, 0xbc, 0x00]); deepStrictEqual(parser.parse(buffer2), { selector: "0111", // -> choice 7 number: 12345678, }); }); it("should be able to use parsing context", () => { const parser = Parser.start() .useContextVars() .uint8("tag") .uint8("items") .choice("data", { tag: "tag", choices: { 1: Parser.start() .uint8("length") .string("message", { length: "length" }) .array("value", { type: "uint8", length: "$parent.items", }), 3: Parser.start().int32le("number"), }, }); const buffer1 = factory([ 0x1, 0x2, 0xc, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, 0x01, 0x02, 0x02, 0x02, ]); deepStrictEqual(parser.parse(buffer1), { tag: 1, items: 2, data: { length: 12, message: "hello, world", value: [0x01, 0x02], }, }); const buffer2 = factory([0x03, 0x0, 0x4e, 0x61, 0xbc, 0x00]); deepStrictEqual(parser.parse(buffer2), { tag: 3, items: 0, data: { number: 12345678, }, }); }); }); describe("Nest parser", () => { it("should parse nested parsers", () => { const nameParser = new Parser() .string("firstName", { zeroTerminated: true, }) .string("lastName", { zeroTerminated: true, }); const infoParser = new Parser().uint8("age"); const personParser = new Parser() .nest("name", { type: nameParser, }) .nest("info", { type: infoParser, }); const name = Array.from(new TextEncoder().encode("John\0Doe\0")); const age = [0x20]; const buffer = [...name, ...age]; deepStrictEqual(personParser.parse(factory(buffer)), { name: { firstName: "John", lastName: "Doe", }, info: { age: 0x20, }, }); }); it("should format parsed nested parser", () => { const nameParser = new Parser() .string("firstName", { zeroTerminated: true, }) .string("lastName", { zeroTerminated: true, }); const personParser = new Parser().nest("name", { type: nameParser, formatter: (name: { firstName: string; lastName: string }) => name.firstName + " " + name.lastName, }); const buffer = factory(new TextEncoder().encode("John\0Doe\0")); deepStrictEqual(personParser.parse(buffer), { name: "John Doe", }); }); it("should 'flatten' output when using null varName", () => { const parser = new Parser() .string("s1", { zeroTerminated: true }) .nest({ type: new Parser().string("s2", { zeroTerminated: true }), }); const buf = factory(new TextEncoder().encode("foo\0bar\0")); deepStrictEqual(parser.parse(buf), { s1: "foo", s2: "bar" }); }); it("should 'flatten' output when omitting varName", () => { const parser = new Parser() .string("s1", { zeroTerminated: true }) .nest({ type: new Parser().string("s2", { zeroTerminated: true }), }); const buf = factory(new TextEncoder().encode("foo\0bar\0")); deepStrictEqual(parser.parse(buf), { s1: "foo", s2: "bar" }); }); it("should be able to use parsing context", () => { const parser = Parser.start() .useContextVars() .uint8("items") .nest("data", { type: Parser.start() .uint8("length") .string("message", { length: "length" }) .array("value", { type: "uint8", length: "$parent.items", }), }); const buffer = factory([ 0x2, 0xc, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, 0x01, 0x02, 0x02, 0x02, ]); deepStrictEqual(parser.parse(buffer), { items: 2, data: { length: 12, message: "hello, world", value: [0x01, 0x02], }, }); }); }); describe("Constructors", () => { it("should create a custom object type", () => { class Person { name: string = ""; } const parser = Parser.start().create(Person).string("name", { zeroTerminated: true, }); const buffer = factory(new TextEncoder().encode("John Doe\0")); const person = parser.parse(buffer); ok(person instanceof Person); deepStrictEqual(person.name, "John Doe"); }); }); describe("Pointer parser", () => { it("should move pointer to specified offset", () => { const parser = Parser.start().pointer("x", { type: "uint8", offset: 2, }); const buf = factory([0x1, 0x2, 0x3, 0x4, 0x5]); deepStrictEqual(parser.parse(buf), { x: 3 }); }); it("should restore pointer to original position", () => { const parser = Parser.start() .pointer("x", { type: "uint8", offset: 2 }) .uint16be("y"); const buf = factory([0x1, 0x2, 0x3, 0x4, 0x5]); deepStrictEqual(parser.parse(buf), { x: 0x3, y: 0x0102 }); }); it("should work with child parser", () => { const parser = Parser.start() .uint32le("x") .pointer("y", { type: Parser.start().string("s", { zeroTerminated: true }), offset: 4, }); const buf = factory([ 1, 2, 3, 4, ...Array.from(new TextEncoder().encode("hello")), 0, 6, ]); deepStrictEqual(parser.parse(buf), { x: 0x04030201, y: { s: "hello" }, }); }); it("should pass variable context to child parser", () => {}); const parser = Parser.start() .uint16be("len") .pointer("child", { offset: 4, type: Parser.start().array("a", { type: "uint8", length: (vars: { len: number }) => vars.len, }), }); const buf = factory([0, 6, 0, 0, 1, 2, 3, 4, 5, 6]); deepStrictEqual(parser.parse(buf), { len: 6, child: { a: [1, 2, 3, 4, 5, 6] }, }); }); describe("SaveOffset", () => { it("should save the offset", () => { const buff = factory([0x01, 0x00, 0x02]); const parser = Parser.start() .int8("a") .int16("b") .saveOffset("bytesRead"); deepStrictEqual(parser.parse(buff), { a: 1, b: 2, bytesRead: 3, }); }); it("should save the offset if not at end", () => { const buff = factory([0x01, 0x00, 0x02]); const parser = Parser.start() .int8("a") .saveOffset("bytesRead") .int16("b"); deepStrictEqual(parser.parse(buff), { a: 1, b: 2, bytesRead: 1, }); }); it("should save the offset with a dynamic parser", () => { const buff = factory([0x74, 0x65, 0x73, 0x74, 0x00]); const parser = Parser.start() .string("name", { zeroTerminated: true }) .saveOffset("bytesRead"); deepStrictEqual(parser.parse(buff), { name: "test", bytesRead: 5, }); }); }); describe("Utilities", () => { it("should count size for fixed size structs", () => { const parser = Parser.start() .int8("a") .int32le("b") .string("msg", { length: 10 }) .seek(2) .array("data", { length: 3, type: "int8", }) .buffer("raw", { length: 8 }); deepStrictEqual(parser.sizeOf(), 1 + 4 + 10 + 2 + 3 + 8); }); it("should assert parsed values", () => { const parser1 = Parser.start().string("msg", { encoding: "utf8", zeroTerminated: true, assert: "hello, world", }); const buffer1 = hexToBuf("68656c6c6f2c20776f726c6400"); doesNotThrow(() => { parser1.parse(buffer1); }); const buffer2 = hexToBuf("68656c6c6f2c206a7300"); throws(() => { parser1.parse(buffer2); }); let parser2 = new Parser() .int16le("a") .int16le("b") .int16le("c", { assert: function ( this: { a: number; b: number }, x: number | string ) { return this.a + this.b === x; }, }); const buffer3 = hexToBuf("d2042e16001b"); doesNotThrow(() => { parser2.parse(buffer3); }); const buffer4 = hexToBuf("2e16001bd204"); throws(() => { parser2.parse(buffer4); }); }); }); describe("Parse other fields after bit", () => { it("Parse uint8", () => { const buffer = factory([0, 1, 0, 4]); const parser1 = Parser.start().bit17("a").uint8("b"); deepStrictEqual(parser1.parse(buffer), { a: 1 << 1, b: 4, }); const parser2 = Parser.start().bit18("a").uint8("b"); deepStrictEqual(parser2.parse(buffer), { a: 1 << 2, b: 4, }); const parser3 = Parser.start().bit19("a").uint8("b"); deepStrictEqual(parser3.parse(buffer), { a: 1 << 3, b: 4, }); const parser4 = Parser.start().bit20("a").uint8("b"); deepStrictEqual(parser4.parse(buffer), { a: 1 << 4, b: 4, }); const parser5 = Parser.start().bit21("a").uint8("b"); deepStrictEqual(parser5.parse(buffer), { a: 1 << 5, b: 4, }); const parser6 = Parser.start().bit22("a").uint8("b"); deepStrictEqual(parser6.parse(buffer), { a: 1 << 6, b: 4, }); const parser7 = Parser.start().bit23("a").uint8("b"); deepStrictEqual(parser7.parse(buffer), { a: 1 << 7, b: 4, }); const parser8 = Parser.start().bit24("a").uint8("b"); deepStrictEqual(parser8.parse(buffer), { a: 1 << 8, b: 4, }); }); }); describe("Wrapper", () => { it("should parse deflated then inflated data", () => { const text = "This is compressible text.\0"; const bufferBefore = factory([ 12, ...Array.from(new TextEncoder().encode(text)), 34, ]); // Skip if we are testing with Uint8Array since the zlib polyfill does // not support Uint8Array if (bufferBefore instanceof Uint8Array) return; const compressedData = factory(deflateRawSync(bufferBefore)); const buffer = factory([ ...Array.from( new Uint8Array(new Uint32Array([compressedData.length]).buffer) ), ...Array.from(compressedData), 42, ]); const bufferParser = Parser.start() .uint8("a") .string("b", { zeroTerminated: true, }) .uint8("c"); const mainParser = Parser.start() .uint32le("length") .wrapped("compressedData", { length: "length", wrapper: (x: Uint8Array) => inflateRawSync(x), type: bufferParser, }) .uint8("answer"); deepStrictEqual(mainParser.parse(buffer), { length: compressedData.length, compressedData: { a: 12, b: text.substring(0, text.length - 1), c: 34, }, answer: 42, }); }); }); }); } compositeParserTests("Buffer", (arr) => Buffer.from(arr)); compositeParserTests("Uint8Array", (arr) => Uint8Array.from(arr));
the_stack
import { call, put, select, take, takeEvery } from 'redux-saga/effects' import { WSS_ENABLED, WITH_FIXED_COLLECTIONS, getAssetBundlesBaseUrl } from 'config' import defaultLogger from 'shared/logger' import { RENDERER_INITIALIZED } from 'shared/renderer/types' import { catalogLoaded, CATALOG_LOADED, WearablesFailure, wearablesFailure, WearablesRequest, WearablesSuccess, wearablesSuccess, WEARABLES_FAILURE, WEARABLES_REQUEST, WEARABLES_SUCCESS } from './actions' import { baseCatalogsLoaded, getPlatformCatalog } from './selectors' import { WearablesRequestFilters, WearableV2, BodyShapeRepresentationV2, PartialWearableV2, UnpublishedWearable } from './types' import { WORLD_EXPLORER } from '../../config/index' import { getResourcesURL } from '../location' import { RendererInterfaces } from 'unity-interface/dcl' import { StoreContainer } from '../store/rootTypes' import { ensureRealmInitialized } from 'shared/dao/sagas' import { ensureRenderer } from 'shared/renderer/sagas' import { CatalystClient, OwnedWearablesWithDefinition } from 'dcl-catalyst-client' import { fetchJson } from 'dcl-catalyst-commons' import { getCatalystServer, getFetchContentServer } from 'shared/dao/selectors' import { BASE_BUILDER_SERVER_URL, BASE_DOWNLOAD_URL, BuilderServerAPIManager } from 'shared/apis/SceneStateStorageController/BuilderServerAPIManager' import { getCurrentIdentity } from 'shared/session/selectors' import { userAuthentified } from 'shared/session' declare const globalThis: Window & RendererInterfaces & StoreContainer export const BASE_AVATARS_COLLECTION_ID = 'urn:decentraland:off-chain:base-avatars' export const WRONG_FILTERS_ERROR = `You must set one and only one filter for V1. Also, the only collection id allowed is '${BASE_AVATARS_COLLECTION_ID}'` /** * This saga handles wearable definition fetching. * * When the renderer detects a new wearable, but it doesn't know its definition, then it will create a catalog request. * * This request will include the ids of the unknown wearables. We will then find the appropriate definition, and return it to the renderer. * */ export function* catalogsSaga(): any { yield takeEvery(RENDERER_INITIALIZED, initialLoad) yield takeEvery(WEARABLES_REQUEST, handleWearablesRequest) yield takeEvery(WEARABLES_SUCCESS, handleWearablesSuccess) yield takeEvery(WEARABLES_FAILURE, handleWearablesFailure) } function* initialLoad() { yield call(ensureRealmInitialized) if (!WORLD_EXPLORER) { let baseCatalog = [] try { const catalogPath = '/default-profile/basecatalog.json' const response = yield fetch(getResourcesURL(catalogPath)) baseCatalog = yield response.json() if (WSS_ENABLED) { for (let item of baseCatalog) { item.baseUrl = `http://localhost:8000${item.baseUrl}` } } } catch (e) { defaultLogger.warn(`Could not load base catalog`) } yield put(catalogLoaded('base-avatars', baseCatalog)) yield put(catalogLoaded('base-exclusive', [])) } } export function* handleWearablesRequest(action: WearablesRequest) { const { filters, context } = action.payload const valid = areFiltersValid(filters) if (valid) { try { const shouldUseLocalCatalog = WORLD_EXPLORER const downloadUrl = yield select(getFetchContentServer) const response: PartialWearableV2[] = shouldUseLocalCatalog ? yield call(fetchWearablesFromCatalyst, filters) : yield call(fetchWearablesFromLocalCatalog, filters) const assetBundlesBaseUrl: string = getAssetBundlesBaseUrl() + '/' const v2Wearables: WearableV2[] = response.map((wearable) => ({ ...wearable, baseUrl: wearable.baseUrl ?? downloadUrl + '/contents/', baseUrlBundles: assetBundlesBaseUrl })) yield put(wearablesSuccess(v2Wearables, context)) } catch (error) { yield put(wearablesFailure(context, error.message)) } } else { yield put(wearablesFailure(context, WRONG_FILTERS_ERROR)) } } function* fetchWearablesFromCatalyst(filters: WearablesRequestFilters) { const catalystUrl = yield select(getCatalystServer) const client: CatalystClient = new CatalystClient(catalystUrl, 'EXPLORER') const result: any[] = [] if (filters.ownedByUser) { if (WITH_FIXED_COLLECTIONS) { // The WITH_FIXED_COLLECTIONS config can only be used in zone. However, we want to be able to use prod collections for testing. // That's why we are also querying a prod catalyst for the given collections const collectionIds: string[] = WITH_FIXED_COLLECTIONS.split(',') // Fetch published collections const urnCollections = collectionIds.filter((collectionId) => collectionId.startsWith('urn')) if (urnCollections.length > 0) { const orgClient: CatalystClient = yield CatalystClient.connectedToCatalystIn('mainnet', 'EXPLORER') const zoneWearables = yield client.fetchWearables({ collectionIds: urnCollections }) const orgWearables = yield orgClient.fetchWearables({ collectionIds: urnCollections }) result.push(...zoneWearables, ...orgWearables) } // Fetch unpublished collections from builder server const uuidCollections = collectionIds.filter((collectionId) => !collectionId.startsWith('urn')) if (uuidCollections.length > 0) { yield userAuthentified() const identity = yield select(getCurrentIdentity) for (const collectionUuid of uuidCollections) { const path = `collections/${collectionUuid}/items` const headers = BuilderServerAPIManager.authorize(identity, 'get', `/${path}`) const collection: { data: UnpublishedWearable[] } = yield fetchJson(`${BASE_BUILDER_SERVER_URL}${path}`, { headers }) const v2Wearables = collection.data.map((wearable) => mapUnpublishedWearableIntoCatalystWearable(wearable)) result.push(...v2Wearables) } } } else { const ownedWearables: OwnedWearablesWithDefinition[] = yield call( fetchOwnedWearables, filters.ownedByUser, client ) for (const { amount, definition } of ownedWearables) { if (definition) { for (let i = 0; i < amount; i++) { result.push(definition) } } } } } else { const wearables = yield call(fetchWearablesByFilters, filters, client) result.push(...wearables) } return result.map(mapCatalystWearableIntoV2) } function fetchOwnedWearables(ethAddress: string, client: CatalystClient) { return client.fetchOwnedWearables(ethAddress, true) } async function fetchWearablesByFilters(filters: WearablesRequestFilters, client: CatalystClient) { return client.fetchWearables(filters) } /** * We are now mapping wearables that were fetched from the builder server into the same format that is returned by the catalysts */ function mapUnpublishedWearableIntoCatalystWearable(wearable: UnpublishedWearable): any { const { id, rarity, name, thumbnail, description, data, contents: contentToHash } = wearable return { id, rarity, i18n: [{ code: 'en', text: name }], thumbnail: `${BASE_DOWNLOAD_URL}/${contentToHash[thumbnail]}`, description, data: { ...data, representations: data.representations.map(({ contents, ...other }) => ({ ...other, contents: contents.map((key) => ({ key, url: `${BASE_DOWNLOAD_URL}/${contentToHash[key]}` })) })) } } } function mapCatalystRepresentationIntoV2(representation: any): BodyShapeRepresentationV2 { const { contents, ...other } = representation const newContents = contents.map(({ key, url }: { key: string; url: string }) => ({ key, hash: url.substring(url.lastIndexOf('/') + 1) })) return { ...other, contents: newContents } } function mapCatalystWearableIntoV2(v2Wearable: any): PartialWearableV2 { const { id, data, rarity, i18n, thumbnail, description } = v2Wearable const { category, tags, hides, replaces, representations } = data const newRepresentations: BodyShapeRepresentationV2[] = representations.map(mapCatalystRepresentationIntoV2) const index = thumbnail.lastIndexOf('/') const newThumbnail = thumbnail.substring(index + 1) const baseUrl = thumbnail.substring(0, index + 1) return { id, rarity, i18n, thumbnail: newThumbnail, description, data: { category, tags, hides, replaces, representations: newRepresentations }, baseUrl } } function* fetchWearablesFromLocalCatalog(filters: WearablesRequestFilters) { yield call(ensureBaseCatalogs) const platformCatalog = yield select(getPlatformCatalog) let response: PartialWearableV2[] if (filters.wearableIds) { // Filtering by ids response = filters.wearableIds.map((wearableId) => platformCatalog[wearableId]).filter((wearable) => !!wearable) } else if (filters.collectionIds) { // We assume that the only collection id used is base-avatars response = Object.values(platformCatalog) } else { throw new Error('Unknown filter') } return response } export function* handleWearablesSuccess(action: WearablesSuccess) { const { wearables, context } = action.payload yield call(ensureRenderer) yield call(sendWearablesCatalog, wearables, context) } export function* handleWearablesFailure(action: WearablesFailure) { const { context, error } = action.payload defaultLogger.error(`Failed to fetch wearables for context '${context}'`, error) yield call(ensureRenderer) yield call(informRequestFailure, error, context) } function areFiltersValid(filters: WearablesRequestFilters) { let filtersSet = 0 let ok = true if (filters.collectionIds) { filtersSet += 1 if (filters.collectionIds.some((id) => id !== BASE_AVATARS_COLLECTION_ID)) { ok = false } } if (filters.ownedByUser) { filtersSet += 1 } if (filters.wearableIds) { filtersSet += 1 } return filtersSet === 1 && ok } export function informRequestFailure(error: string, context: string | undefined) { globalThis.unityInterface.WearablesRequestFailed(error, context) } export function sendWearablesCatalog(wearables: WearableV2[], context: string | undefined) { globalThis.unityInterface.AddWearablesToCatalog(wearables, context) } export function* ensureBaseCatalogs() { while (!WORLD_EXPLORER && !(yield select(baseCatalogsLoaded))) { yield take(CATALOG_LOADED) } }
the_stack
import type { TVec2, TMat3, TMat4 } from '@oito/types'; class Mat3 extends Float32Array{ //#region STATIC VALUES static BYTESIZE = 9 * Float32Array.BYTES_PER_ELEMENT; //#endregion //////////////////////////////////////////////////////// //#region CONSTRUCTORS constructor(){ super(9); this[0] = this[4] = this[8] = 1; } //#endregion //////////////////////////////////////////////////////// //#region GETTERS / SETTERS //copy another matrix's data to this one. copy( mat : TMat3, offset=0 ) : this{ for( let i=0; i < 9; i++ ) this[ i ] = mat[ offset + i ]; return this; } identity(): this { this[0] = 1; this[1] = 0; this[2] = 0; this[3] = 0; this[4] = 1; this[5] = 0; this[6] = 0; this[7] = 0; this[8] = 1; return this; } determinant() : number { const a00 = this[0], a01 = this[1], a02 = this[2]; const a10 = this[3], a11 = this[4], a12 = this[5]; const a20 = this[6], a21 = this[7], a22 = this[8]; return ( a00 * ( a22 * a11 - a12 * a21) + a01 * (-a22 * a10 + a12 * a20) + a02 * ( a21 * a10 - a11 * a20) ); } /** Frobenius norm of a Matrix */ frob() : number{ return Math.hypot( this[0], this[1], this[2], this[3], this[4], this[5], this[6], this[7], this[8] ); } //#endregion //////////////////////////////////////////////////////// //#region FROM SETTERS fromTranslation( v: TVec2 ): this{ this[0] = 1; this[1] = 0; this[2] = 0; this[3] = 0; this[4] = 1; this[5] = 0; this[6] = v[0]; this[7] = v[1]; this[8] = 1; return this; } fromRotation( rad: number ) : this{ const s = Math.sin( rad ), c = Math.cos( rad ); this[0] = c; this[1] = s; this[2] = 0; this[3] = -s; this[4] = c; this[5] = 0; this[6] = 0; this[7] = 0; this[8] = 1; return this; } fromScaling( v: TVec2 ) : this{ this[0] = v[ 0 ]; this[1] = 0; this[2] = 0; this[3] = 0; this[4] = v[ 1 ]; this[5] = 0; this[6] = 0; this[7] = 0; this[8] = 1; return this; } fromRotTranScale( rad: number, tran: TVec2, scl: TVec2 ) : this{ const s = Math.sin(rad), c = Math.cos(rad), sx = scl[ 0 ], sy = scl[ 1 ]; this[0] = c * sx; this[1] = s * sx; this[2] = 0; this[3] = -s * sy; this[4] = c * sy; this[5] = 0; this[6] = tran[0]; this[7] = tran[1]; this[8] = 1; return this; } /** Calculates a 3x3 normal matrix (transpose inverse) from the 4x4 matrix */ fromMat4Normal( a: TMat4 ) : this{ const a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3]; const a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7]; const a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11]; const a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15]; const b00 = a00 * a11 - a01 * a10; const b01 = a00 * a12 - a02 * a10; const b02 = a00 * a13 - a03 * a10; const b03 = a01 * a12 - a02 * a11; const b04 = a01 * a13 - a03 * a11; const b05 = a02 * a13 - a03 * a12; const b06 = a20 * a31 - a21 * a30; const b07 = a20 * a32 - a22 * a30; const b08 = a20 * a33 - a23 * a30; const b09 = a21 * a32 - a22 * a31; const b10 = a21 * a33 - a23 * a31; const b11 = a22 * a33 - a23 * a32; // Calculate the determinant let det = b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06; if (!det) return this; det = 1.0 / det; this[0] = (a11 * b11 - a12 * b10 + a13 * b09) * det; this[1] = (a12 * b08 - a10 * b11 - a13 * b07) * det; this[2] = (a10 * b10 - a11 * b08 + a13 * b06) * det; this[3] = (a02 * b10 - a01 * b11 - a03 * b09) * det; this[4] = (a00 * b11 - a02 * b08 + a03 * b07) * det; this[5] = (a01 * b08 - a00 * b10 - a03 * b06) * det; this[6] = (a31 * b05 - a32 * b04 + a33 * b03) * det; this[7] = (a32 * b02 - a30 * b05 - a33 * b01) * det; this[8] = (a30 * b04 - a31 * b02 + a33 * b00) * det; return this; } /** Generates a 2D projection matrix with the given bounds */ fromProjection( width: number, height: number) : this{ this[0] = 2 / width; this[1] = 0; this[2] = 0; this[3] = 0; this[4] = -2 / height; this[5] = 0; this[6] = -1; this[7] = 1; this[8] = 1; return this; } //#endregion //////////////////////////////////////////////////////// //#region FROM OPS fromAdd( a: TMat3, b: TMat3) : this{ this[0] = a[0] + b[0]; this[1] = a[1] + b[1]; this[2] = a[2] + b[2]; this[3] = a[3] + b[3]; this[4] = a[4] + b[4]; this[5] = a[5] + b[5]; this[6] = a[6] + b[6]; this[7] = a[7] + b[7]; this[8] = a[8] + b[8]; return this; } fromSub( a: TMat3, b: TMat3) : this{ this[0] = a[0] - b[0]; this[1] = a[1] - b[1]; this[2] = a[2] - b[2]; this[3] = a[3] - b[3]; this[4] = a[4] - b[4]; this[5] = a[5] - b[5]; this[6] = a[6] - b[6]; this[7] = a[7] - b[7]; this[8] = a[8] - b[8]; return this; } fromScalar( a: TMat3, b: number) : this{ this[0] = a[0] * b; this[1] = a[1] * b; this[2] = a[2] * b; this[3] = a[3] * b; this[4] = a[4] * b; this[5] = a[5] * b; this[6] = a[6] * b; this[7] = a[7] * b; this[8] = a[8] * b; return this; } fromMul( a: TMat3, b: TMat3 ) : this{ const a00 = a[0], a01 = a[1], a02 = a[2]; const a10 = a[3], a11 = a[4], a12 = a[5]; const a20 = a[6], a21 = a[7], a22 = a[8]; const b00 = b[0], b01 = b[1], b02 = b[2]; const b10 = b[3], b11 = b[4], b12 = b[5]; const b20 = b[6], b21 = b[7], b22 = b[8]; this[0] = b00 * a00 + b01 * a10 + b02 * a20; this[1] = b00 * a01 + b01 * a11 + b02 * a21; this[2] = b00 * a02 + b01 * a12 + b02 * a22; this[3] = b10 * a00 + b11 * a10 + b12 * a20; this[4] = b10 * a01 + b11 * a11 + b12 * a21; this[5] = b10 * a02 + b11 * a12 + b12 * a22; this[6] = b20 * a00 + b21 * a10 + b22 * a20; this[7] = b20 * a01 + b21 * a11 + b22 * a21; this[8] = b20 * a02 + b21 * a12 + b22 * a22; return this; } fromInvert( a: TMat3 ) : this { const a00 = a[0], a01 = a[1], a02 = a[2]; const a10 = a[3], a11 = a[4], a12 = a[5]; const a20 = a[6], a21 = a[7], a22 = a[8]; const b01 = a22 * a11 - a12 * a21; const b11 = -a22 * a10 + a12 * a20; const b21 = a21 * a10 - a11 * a20; // Calculate the determinant let det = a00 * b01 + a01 * b11 + a02 * b21; if( !det ) return this; det = 1.0 / det; this[0] = b01 * det; this[1] = (-a22 * a01 + a02 * a21) * det; this[2] = (a12 * a01 - a02 * a11) * det; this[3] = b11 * det; this[4] = (a22 * a00 - a02 * a20) * det; this[5] = (-a12 * a00 + a02 * a10) * det; this[6] = b21 * det; this[7] = (-a21 * a00 + a01 * a20) * det; this[8] = (a11 * a00 - a01 * a10) * det; return this; } fromTranspose( a: TMat3 ) : this{ this[0] = a[0]; this[1] = a[3]; this[2] = a[6]; this[3] = a[1]; this[4] = a[4]; this[5] = a[7]; this[6] = a[2]; this[7] = a[5]; this[8] = a[8]; return this; } fromAdjoint( a: TMat3 ) : this{ const a00 = a[0], a01 = a[1], a02 = a[2]; const a10 = a[3], a11 = a[4], a12 = a[5]; const a20 = a[6], a21 = a[7], a22 = a[8]; this[0] = a11 * a22 - a12 * a21; this[1] = a02 * a21 - a01 * a22; this[2] = a01 * a12 - a02 * a11; this[3] = a12 * a20 - a10 * a22; this[4] = a00 * a22 - a02 * a20; this[5] = a02 * a10 - a00 * a12; this[6] = a10 * a21 - a11 * a20; this[7] = a01 * a20 - a00 * a21; this[8] = a00 * a11 - a01 * a10; return this; } //#endregion //////////////////////////////////////////////////////// //#region OPERATIONS mul( b: TMat3 ) : this{ const a00 = this[0], a01 = this[1], a02 = this[2]; const a10 = this[3], a11 = this[4], a12 = this[5]; const a20 = this[6], a21 = this[7], a22 = this[8]; const b00 = b[0], b01 = b[1], b02 = b[2]; const b10 = b[3], b11 = b[4], b12 = b[5]; const b20 = b[6], b21 = b[7], b22 = b[8]; this[0] = b00 * a00 + b01 * a10 + b02 * a20; this[1] = b00 * a01 + b01 * a11 + b02 * a21; this[2] = b00 * a02 + b01 * a12 + b02 * a22; this[3] = b10 * a00 + b11 * a10 + b12 * a20; this[4] = b10 * a01 + b11 * a11 + b12 * a21; this[5] = b10 * a02 + b11 * a12 + b12 * a22; this[6] = b20 * a00 + b21 * a10 + b22 * a20; this[7] = b20 * a01 + b21 * a11 + b22 * a21; this[8] = b20 * a02 + b21 * a12 + b22 * a22; return this; } pmul( a: TMat3 ) : this{ const a00 = a[0], a01 = a[1], a02 = a[2]; const a10 = a[3], a11 = a[4], a12 = a[5]; const a20 = a[6], a21 = a[7], a22 = a[8]; const b00 = this[0], b01 = this[1], b02 = this[2]; const b10 = this[3], b11 = this[4], b12 = this[5]; const b20 = this[6], b21 = this[7], b22 = this[8]; this[0] = b00 * a00 + b01 * a10 + b02 * a20; this[1] = b00 * a01 + b01 * a11 + b02 * a21; this[2] = b00 * a02 + b01 * a12 + b02 * a22; this[3] = b10 * a00 + b11 * a10 + b12 * a20; this[4] = b10 * a01 + b11 * a11 + b12 * a21; this[5] = b10 * a02 + b11 * a12 + b12 * a22; this[6] = b20 * a00 + b21 * a10 + b22 * a20; this[7] = b20 * a01 + b21 * a11 + b22 * a21; this[8] = b20 * a02 + b21 * a12 + b22 * a22; return this; } invert() : this{ const a00 = this[0], a01 = this[1], a02 = this[2]; const a10 = this[3], a11 = this[4], a12 = this[5]; const a20 = this[6], a21 = this[7], a22 = this[8]; const b01 = a22 * a11 - a12 * a21; const b11 = -a22 * a10 + a12 * a20; const b21 = a21 * a10 - a11 * a20; // Calculate the determinant let det = a00 * b01 + a01 * b11 + a02 * b21; if( !det ) return this; det = 1.0 / det; this[0] = b01 * det; this[1] = (-a22 * a01 + a02 * a21) * det; this[2] = (a12 * a01 - a02 * a11) * det; this[3] = b11 * det; this[4] = (a22 * a00 - a02 * a20) * det; this[5] = (-a12 * a00 + a02 * a10) * det; this[6] = b21 * det; this[7] = (-a21 * a00 + a01 * a20) * det; this[8] = (a11 * a00 - a01 * a10) * det; return this; } transpose() : this{ // If we are transposing ourselves we can skip a few steps but have to cache some values const a01 = this[1], a02 = this[2], a12 = this[5]; this[1] = this[3]; this[2] = this[6]; this[3] = a01; this[5] = this[7]; this[6] = a02; this[7] = a12; return this; } translate( v: TVec2 ) : this{ const a00 = this[0], a01 = this[1], a02 = this[2], a10 = this[3], a11 = this[4], a12 = this[5], a20 = this[6], a21 = this[7], a22 = this[8], x = v[0], y = v[1]; //this[0] = a00; //this[1] = a01; //this[2] = a02; //this[3] = a10; //this[4] = a11; //this[5] = a12; this[6] = x * a00 + y * a10 + a20; this[7] = x * a01 + y * a11 + a21; this[8] = x * a02 + y * a12 + a22; return this; } rotate( rad: number) : this{ const a00 = this[0], a01 = this[1], a02 = this[2], a10 = this[3], a11 = this[4], a12 = this[5], //a20 = this[6], a21 = this[7], a22 = this[8], s = Math.sin(rad), c = Math.cos(rad); this[0] = c * a00 + s * a10; this[1] = c * a01 + s * a11; this[2] = c * a02 + s * a12; this[3] = c * a10 - s * a00; this[4] = c * a11 - s * a01; this[5] = c * a12 - s * a02; //this[6] = a20; //this[7] = a21; //this[8] = a22; return this; } scale( v: TVec2 ) : this{ const x = v[0], y = v[1]; this[0] = x * this[0]; this[1] = x * this[1]; this[2] = x * this[2]; this[3] = y * this[3]; this[4] = y * this[4]; this[5] = y * this[5]; //this[6] = this[6]; //this[7] = this[7]; //this[8] = this[8]; return this; } transformVec2( v: TVec2, out ?: TVec2 ) : TVec2{ const x = v[0], y = v[1]; out = out || v; out[0] = this[0] * x + this[3] * y + this[6]; out[1] = this[1] * x + this[4] * y + this[7]; return out; } //#endregion //////////////////////////////////////////////////////// //#region STATIC static fromScale( v: TVec2 ) : Mat3{ return new Mat3().fromScaling( v ); } static fromTranslation( v: TVec2 ) : Mat3{ return new Mat3().fromTranslation( v ); } static fromRotation( v: number ) : Mat3{ return new Mat3().fromRotation( v ); } //#endregion } /* * * @param {mat3} out mat3 receiving operation result * @param {ReadonlyQuat} q Quaternion to create matrix from * * @returns {mat3} out export function fromQuat(out, q) { let x = q[0], y = q[1], z = q[2], w = q[3]; let x2 = x + x; let y2 = y + y; let z2 = z + z; let xx = x * x2; let yx = y * x2; let yy = y * y2; let zx = z * x2; let zy = z * y2; let zz = z * z2; let wx = w * x2; let wy = w * y2; let wz = w * z2; out[0] = 1 - yy - zz; out[3] = yx - wz; out[6] = zx + wy; out[1] = yx + wz; out[4] = 1 - xx - zz; out[7] = zy - wx; out[2] = zx - wy; out[5] = zy + wx; out[8] = 1 - xx - yy; return out; } */ export default Mat3;
the_stack
import { Router } from 'express'; import asyncHandler from 'express-async-handler'; import { jsonError } from '../../middleware'; import { ICrossOrganizationMembersResult, MemberSearch, Operations } from '../../business'; import { ICorporateLink } from '../../interfaces'; import { IApiRequest } from '../../middleware/apiReposAuth'; import postLinkApi from './link'; import { ErrorHelper, getProviders } from '../../transitional'; import { wrapError } from '../../utils'; const router: Router = Router(); const unsupportedApiVersions = [ '2016-12-01', ]; const extendedLinkApiVersions = [ '2019-02-01', ]; router.use(function (req: IApiRequest, res, next) { const token = req.apiKeyToken; if (!token.scopes) { return next(jsonError('The key is not authorized for specific APIs', 401)); } if (!token.hasScope('links') && !token.hasScope('link')) { return next(jsonError('The key is not authorized to use the links API', 401)); } return next(); }); router.post('/', asyncHandler(postLinkApi)); router.get('/', asyncHandler(async (req: IApiRequest, res, next) => { const { operations } = getProviders(req); const skipOrganizations = req.query.showOrganizations !== undefined && !!req.query.showOrganizations; const showTimestamps = req.query.showTimestamps !== undefined && req.query.showTimestamps === 'true'; const results = await getAllUsers(req.apiVersion, operations, skipOrganizations, showTimestamps); req.insights.trackMetric({ name: 'ApiRequestLinks', value: 1 }); res.set('Content-Type', 'application/json'); res.send(JSON.stringify(results, undefined, 2)); })); router.get('/:linkid', asyncHandler(async (req: IApiRequest, res, next) => { if (unsupportedApiVersions.includes(req.apiVersion)) { return next(jsonError('This API is not supported by the API version you are using.', 400)); } const linkid = req.params.linkid.toLowerCase(); const { operations } = getProviders(req); const skipOrganizations = req.query.showOrganizations !== undefined && !!req.query.showOrganizations; const showTimestamps = req.query.showTimestamps !== undefined && req.query.showTimestamps === 'true'; if (operations.providers.queryCache && operations.providers.queryCache.supportsOrganizationMembership) { // faster implementation const links = (await operations.providers.linkProvider.getAll()).filter(lid => lid['id'] === linkid); let link = links.length === 1 ? links[0] : null; if (!link) { return next(jsonError('Could not find the link', 404)); } let entry = null; const thirdPartyId = link.thirdPartyId; try { entry = await getByThirdPartyId(thirdPartyId, req.apiVersion, operations, skipOrganizations, showTimestamps); } catch (error) { if (ErrorHelper.IsNotFound(error)) { return next(jsonError('Could not find the link', 404)); } else { return next(jsonError(error, 500)); } } req.insights.trackMetric({ name: 'ApiRequestLinkByLinkId', value: 1 }); return res.json(entry); } const results = await getAllUsers(req.apiVersion, operations, skipOrganizations, showTimestamps, true); for (let i = 0; i < results.length; i++) { const entry = results[i]; if (entry && entry.id === linkid) { req.insights.trackMetric({ name: 'ApiRequestLinkByLinkId', value: 1 }); return res.json(entry); } } return next(jsonError('Could not find the link', 404)); })); router.get('/github/:username', asyncHandler(async (req: IApiRequest, res, next) => { if (unsupportedApiVersions.includes(req.apiVersion)) { return next(jsonError('This API is not supported by the API version you are using.', 400)); } const username = req.params.username.toLowerCase(); const { operations } = getProviders(req); const skipOrganizations = req.query.showOrganizations !== undefined && !!req.query.showOrganizations; const showTimestamps = req.query.showTimestamps !== undefined && req.query.showTimestamps === 'true'; if (operations.providers.queryCache && operations.providers.queryCache.supportsOrganizationMembership) { // faster implementation let account = null; try { account = await operations.getAccountByUsername(username); } catch (getAccountError) { if (ErrorHelper.IsNotFound(account)) { return next(jsonError('Could not find a link for the user', 404)); } return next(jsonError(getAccountError, 500)); } try { const entry = await getByThirdPartyId(String(account.id), req.apiVersion, operations, skipOrganizations, showTimestamps); req.insights.trackMetric({ name: 'ApiRequestLinkByGitHubUsername', value: 1 }); return res.json(entry); } catch (entryError) { return next(jsonError(entryError, ErrorHelper.GetStatus(entryError) || 500)); } } const results = await getAllUsers(req.apiVersion, operations, skipOrganizations, showTimestamps); for (let i = 0; i < results.length; i++) { const entry = results[i]; if (entry && entry.github && entry.github.login.toLowerCase() === username) { req.insights.trackMetric({ name: 'ApiRequestLinkByGitHubUsername', value: 1 }); return res.json(entry); } } return next(jsonError('Could not find a link for the user', 404)); })); router.get('/aad/userPrincipalName/:upn', asyncHandler(async (req: IApiRequest, res, next) => { const upn = req.params.upn; const { operations } = getProviders(req); const skipOrganizations = req.query.showOrganizations !== undefined && !!req.query.showOrganizations; const showTimestamps = req.query.showTimestamps !== undefined && req.query.showTimestamps === 'true'; if (operations.providers.queryCache && operations.providers.queryCache.supportsOrganizationMembership) { // faster implementation const links = await operations.providers.linkProvider.queryByCorporateUsername(upn); const r = []; for (const link of links) { const thirdPartyId = link.thirdPartyId; try { const entry = await getByThirdPartyId(thirdPartyId, req.apiVersion, operations, skipOrganizations, showTimestamps); if (entry) { r.push(entry); } } catch (partialIgnoreError) { if (!ErrorHelper.IsNotFound(partialIgnoreError)) { console.dir(partialIgnoreError); } } } req.insights.trackEvent({ name: 'ApiRequestLinkByAadUpnResult', properties: { length: r.length.toString(), userPrincipalName: upn, }, }); return res.json(r); } const results = await getAllUsers(req.apiVersion, operations, skipOrganizations, showTimestamps); let r = []; for (let i = 0; i < results.length; i++) { const entry = results[i]; if (entry && entry.aad && entry.aad.userPrincipalName === upn) { r.push(entry); } } req.insights.trackEvent({ name: 'ApiRequestLinkByAadUpnResult', properties: { length: r.length.toString(), userPrincipalName: upn, }, }); if (r.length === 0) { return next(jsonError('Could not find a link for the user', 404)); } req.insights.trackMetric({ name: 'ApiRequestLinkByAadUpn', value: 1 }); return res.json(r); })); router.get('/aad/:id', asyncHandler(async (req: IApiRequest, res, next) => { if (req.apiVersion == '2016-12-01') { return next(jsonError('This API is not supported by the API version you are using.', 400)); } const id = req.params.id; const skipOrganizations = req.query.showOrganizations !== undefined && !!req.query.showOrganizations; const showTimestamps = req.query.showTimestamps !== undefined && req.query.showTimestamps === 'true'; const { operations } = getProviders(req); if (operations.providers.queryCache && operations.providers.queryCache.supportsOrganizationMembership) { // faster implementation const links = await operations.providers.linkProvider.queryByCorporateId(id); const r = []; for (const link of links) { const thirdPartyId = link.thirdPartyId; try { const entry = await getByThirdPartyId(thirdPartyId, req.apiVersion, operations, skipOrganizations, showTimestamps); if (entry) { r.push(entry); } } catch (partialIgnoreError) { if (!ErrorHelper.IsNotFound(partialIgnoreError)) { console.dir(partialIgnoreError); } } } req.insights.trackMetric({ name: 'ApiRequestLinkByAadId', value: 1 }); return res.json(r); } const results = await getAllUsers(req.apiVersion, operations, skipOrganizations, showTimestamps); let r = []; for (let i = 0; i < results.length; i++) { const entry = results[i]; if (entry && entry.aad && entry.aad.id === id) { r.push(entry); } } if (r.length === 0) { return next(jsonError('Could not find a link for the user', 404)); } req.insights.trackMetric({ name: 'ApiRequestLinkByAadId', value: 1 }); return res.json(r); })); async function getByThirdPartyId(thirdPartyId: string, apiVersion, operations: Operations, skipOrganizations: boolean, showTimestamps: boolean, showLinkIds?: boolean): Promise<any> { const providers = operations.providers; const { graphProvider } = providers; let link: ICorporateLink = null; try { link = await providers.linkProvider.getByThirdPartyId(thirdPartyId); } catch (linksError) { if (ErrorHelper.IsNotFound(linksError)) { throw jsonError(`${thirdPartyId} is not linked`, 404); } else { linksError = wrapError(linksError, 'There was a problem retrieving link information to display alongside the member.'); throw jsonError(linksError, 500); } } const account = operations.getAccount(thirdPartyId); await account.getDetails(); let orgMembershipNames: string[] = []; if (providers.queryCache && operations.providers.queryCache.supportsOrganizationMembership) { orgMembershipNames = (await providers.queryCache.userOrganizations(thirdPartyId)).map(org => org.organization.name); } else { // TODO: not implemented for performance reasons now throw ErrorHelper.NotImplemented(); } const isExpandedView = extendedLinkApiVersions.includes(apiVersion); const entry = { github: { id: Number(link.thirdPartyId), login: account.login, organizations: undefined, }, isServiceAccount: undefined, serviceAccountContact: undefined, }; if (isExpandedView) { entry.github['avatar'] = account.avatar_url; } if (showLinkIds && link) { entry['id'] = link['id']; // not part of the interface } if (!skipOrganizations) { entry.github.organizations = orgMembershipNames; } // '2017-09-01' added 'isServiceAccount'; so '2016-12-01' & '2017-03-08' do not have it if (showTimestamps && link && link['created']) { entry['timestamp'] = link['created']; } if (link && link.isServiceAccount === true && apiVersion !== '2016-12-01' && apiVersion !== '2017-03-08') { entry.isServiceAccount = true; if (isExpandedView && link.isServiceAccount && link.serviceAccountMail) { entry.serviceAccountContact = link.serviceAccountMail; } } if (link?.corporateAlias || link?.corporateDisplayName || link?.corporateMailAddress || link?.corporateUsername) { const corporatePropertyName = apiVersion === '2016-12-01' ? 'corporate' : 'aad'; // This was renamed to be provider name-based entry[corporatePropertyName] = { alias: link?.corporateAlias, preferredName: link?.corporateDisplayName, userPrincipalName: link?.corporateUsername, emailAddress: link?.corporateMailAddress, }; const corporateIdPropertyName = apiVersion === '2016-12-01' ? 'aadId' : 'id'; // Now just 'id' entry[corporatePropertyName][corporateIdPropertyName] = link.corporateId; } return entry; } async function getAllUsers(apiVersion, operations: Operations, skipOrganizations: boolean, showTimestamps: boolean, showLinkIds?: boolean): Promise<any[]> { let links: ICorporateLink[] = null; try { links = await operations.getLinks(); } catch (linksError) { linksError = wrapError(linksError, 'There was a problem retrieving link information to display alongside members.'); throw jsonError(linksError, 500); } let crossOrganizationMembers: ICrossOrganizationMembersResult; try { // TODO: this is a cross-org map!? validate return type... crossOrganizationMembers = await operations.getMembers(); } catch (error) { error = wrapError(error, 'There was a problem getting the members list.'); throw jsonError(error, 500); } const search = new MemberSearch({ crossOrganizationMembers, type: 'linked', links, providers: operations.providers, pageSize: Number.MAX_SAFE_INTEGER, }); try { await search.search(1); const sr = search.members; const isExpandedView = extendedLinkApiVersions.includes(apiVersion); const results = []; sr.forEach(member => { const entry = { github: { id: member['account'].id, login: member['account'].login, organizations: undefined, }, isServiceAccount: undefined, serviceAccountContact: undefined, }; if (isExpandedView) { entry.github['avatar'] = member['account'].avatar_url; } if (showLinkIds && member && member.link && member.link['id']) { entry['id'] = member.link['id']; } if (!skipOrganizations && member['orgs']) { entry.github.organizations = Object.getOwnPropertyNames(member['orgs']); } // '2017-09-01' added 'isServiceAccount'; so '2016-12-01' & '2017-03-08' do not have it const link = member.link as ICorporateLink; if (showTimestamps && link && link['created']) { entry['timestamp'] = link['created']; } if (link && link.isServiceAccount === true && apiVersion !== '2016-12-01' && apiVersion !== '2017-03-08') { entry.isServiceAccount = true; if (isExpandedView && link.isServiceAccount && link.serviceAccountMail) { entry.serviceAccountContact = link.serviceAccountMail; } } const corporate = member.link; if (corporate) { const corporatePropertyName = apiVersion === '2016-12-01' ? 'corporate' : 'aad'; // This was renamed to be provider name-based entry[corporatePropertyName] = { alias: corporate.corporateAlias, preferredName: corporate.corporateDisplayName, userPrincipalName: corporate.corporateUsername, emailAddress: corporate.corporateMailAddress, }; const corporateIdPropertyName = apiVersion === '2016-12-01' ? 'aadId' : 'id'; // Now just 'id' entry[corporatePropertyName][corporateIdPropertyName] = corporate.corporateId; } results.push(entry); }); return results; } catch (error) { throw jsonError(error, 400); } } export default router;
the_stack
import * as path from 'path'; import R from 'ramda'; import fs from 'fs-extra'; // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! import Ajv from 'ajv'; import semver from 'semver'; import logger, { createExtensionLogger } from '../logger/logger'; import { Scope } from '../scope'; import { BitId } from '../bit-id'; import { EnvExtensionOptions } from './env-extension-types'; import { ExtensionOptions } from './extension'; import ExtensionNameNotValid from './exceptions/extension-name-not-valid'; import ExtensionGetDynamicConfigError from './exceptions/extension-get-dynamic-config-error'; import { PathOsBased } from '../utils/path'; import { Analytics } from '../analytics/analytics'; import ExtensionLoadError from './exceptions/extension-load-error'; import Environment from '../environment'; import ExtensionSchemaError from './exceptions/extension-schema-error'; const ajv = new Ajv(); const CORE_EXTENSIONS_PATH = './core-extensions'; export type BaseExtensionOptions = { file?: string | null | undefined; }; type BaseArgs = { name: string; rawConfig: Record<string, any>; // options: BaseExtensionOptions options: ExtensionOptions | EnvExtensionOptions; }; export type BaseLoadArgsProps = BaseArgs & { consumerPath?: PathOsBased | null | undefined; scopePath?: PathOsBased | null | undefined; context?: Record<string, any> | null | undefined; throws?: boolean; }; type BaseLoadFromFileArgsProps = BaseArgs & { filePath: string; rootDir?: string; throws?: boolean; }; type StaticProps = BaseArgs & { dynamicConfig: Record<string, any>; filePath: string; rootDir?: string | null | undefined; schema?: Record<string, any> | null | undefined; script?: Function; disabled: boolean; loaded: boolean; context?: Record<string, any> | null | undefined; }; type InstanceSpecificProps = { api: Record<string, any>; }; export type BaseExtensionProps = InstanceSpecificProps & StaticProps; export type BaseExtensionModel = { name: string; config: Record<string, any>; }; type ExtensionPath = { resolvedPath: string; componentPath: string; }; export type InitOptions = { writeConfigFilesOnAction: boolean | null | undefined; }; // export type BaseExtensionProps = { // ...InstanceSpecificProps, // ...StaticProps // }; // type staticProps = $Diff<BaseExtensionProps, instanceSpecificProps> export default class BaseExtension { name: string; loaded: boolean; // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! initialized: boolean; disabled: boolean; filePath: string; rootDir: string; rawConfig: Record<string, any>; schema: Record<string, any> | null | undefined; options: Record<string, any>; dynamicConfig: Record<string, any>; context: Record<string, any> | null | undefined; script: Function | null | undefined; // Store the required plugin _initOptions: InitOptions | null | undefined; // Store the required plugin api = _getConcreteBaseAPI({ name: this.name }); constructor(extensionProps: BaseExtensionProps) { this.name = extensionProps.name; this.rawConfig = extensionProps.rawConfig; this.schema = extensionProps.schema; this.options = extensionProps.options; this.dynamicConfig = extensionProps.dynamicConfig || extensionProps.rawConfig; this.context = extensionProps.context; this.script = extensionProps.script; this.disabled = extensionProps.disabled; this.filePath = extensionProps.filePath; this.rootDir = extensionProps.rootDir || ''; this.loaded = extensionProps.loaded; this.api = extensionProps.api; } // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! get writeConfigFilesOnAction() { if (!this.initOptions) { return false; } // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! return this.initOptions.writeConfigFilesOnAction; } // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! get initOptions() { return this._initOptions; } // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! set initOptions(opts: Record<string, any> | null | undefined) { const defaultInitOpts = { writeConfigFilesOnAction: false }; if (!opts) { this._initOptions = defaultInitOpts; return; } const res = {}; // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! if (opts.write) { // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! res.writeConfigFilesOnAction = true; } // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! this._initOptions = res; } /** * Run the extension's init function */ async init(throws = false): Promise<boolean> { Analytics.addBreadCrumb('base-extension', 'initialize extension'); try { let initOptions = {}; // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! if (this.script && this.script.init && typeof this.script.init === 'function') { // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! initOptions = this.script.init({ rawConfig: this.rawConfig, dynamicConfig: this.dynamicConfig, api: this.api }); } // wrap in promise, in case a script has async init this.initOptions = await Promise.resolve(initOptions); this.initialized = true; // Make sure to not kill the process if an extension didn't load correctly } catch (err) { logger.error(`initialized extension ${this.name} failed`, err); if (throws) { throw new ExtensionLoadError(err, this.name); } this.initialized = false; return false; } return true; } extendAPI(baseApi: Record<string, any>, api: Record<string, any>): void { this.api = R.merge(baseApi, api); } toString(): string { return JSON.stringify(this, null, 2); } toBitJsonObject() { return { [this.name]: { rawConfig: this.rawConfig, options: this.options } }; } toModelObject() { return { name: this.name, config: this.dynamicConfig }; } toObject(): Record<string, any> { return this.toModelObject(); } /** * Reload the extension, this mainly contain the process of going to the extension file requiring it and get the dynamic config * It mostly used for env extension when sometime on the first load the env didn't installed yet (only during build / test) phase */ // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! async reload(scopePath: string, { throws }: Record<string, any>): Promise<void> { Analytics.addBreadCrumb('base-extension', 'reload extension'); // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! if (!this.filePath && !this.options.core) { // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! const { resolvedPath, componentPath } = _getExtensionPath(this.name, scopePath, this.options.core); this.filePath = resolvedPath; this.rootDir = componentPath; } this.name = _addVersionToNameFromPathIfMissing(this.name, this.rootDir, this.options); const baseProps = await BaseExtension.loadFromFile({ name: this.name, filePath: this.filePath, rootDir: this.rootDir, rawConfig: this.rawConfig, options: this.options, throws }); if (baseProps.loaded) { this.loaded = baseProps.loaded; this.script = baseProps.script; this.dynamicConfig = baseProps.dynamicConfig; await this.init(); } } setExtensionPathInScope(scopePath: string): void { // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! const { resolvedPath, componentPath } = _getExtensionPath(this.name, scopePath, this.options.core); this.filePath = resolvedPath; this.rootDir = componentPath; } static transformStringToModelObject(name: string): BaseExtensionModel { return { name, config: {} }; } /** * Load extension by name * The extension will be from scope by default or from file * if there is file(path) in the options * The file path is relative to the bit.json of the project or absolute * @param {string} name - name of the extension * @param {Object} rawConfig - raw config for the extension * @param {Object} options - extension options such as - disabled, file, core * @param {string} consumerPath - path to the consumer folder (to load the file relatively) * @param {string} scopePath - scope which stores the extension code */ static async load({ name, rawConfig = {}, options = {}, consumerPath, scopePath, throws = false, context }: BaseLoadArgsProps): Promise<BaseExtensionProps | BaseExtension> { logger.debug(`base-extension loading ${name}`); const concreteBaseAPI = _getConcreteBaseAPI({ name }); if (options.file) { let absPath = options.file; const file = options.file || ''; if (!path.isAbsolute(options.file) && consumerPath) { absPath = path.resolve(consumerPath, file); } const staticExtensionProps: StaticProps = await BaseExtension.loadFromFile({ name, filePath: absPath, rawConfig, options, throws }); const extensionProps: BaseExtensionProps = { api: concreteBaseAPI, context, ...staticExtensionProps }; extensionProps.api = concreteBaseAPI; return extensionProps; } let staticExtensionProps: StaticProps = { name, rawConfig, dynamicConfig: rawConfig, options, disabled: false, loaded: false, filePath: '' }; // Require extension from scope if (scopePath) { // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! const { resolvedPath, componentPath } = _getExtensionPath(name, scopePath, options.core); const nameWithVersion = _addVersionToNameFromPathIfMissing(name, componentPath, options); staticExtensionProps = await BaseExtension.loadFromFile({ name: nameWithVersion, filePath: resolvedPath, rootDir: componentPath, rawConfig, options, throws }); } const extensionProps: BaseExtensionProps = { api: concreteBaseAPI, context, ...staticExtensionProps }; return extensionProps; } static loadFromModelObjectBase(modelObject: string | BaseExtensionModel): BaseExtensionProps { let staticExtensionProps: StaticProps; if (typeof modelObject === 'string') { staticExtensionProps = { name: modelObject, rawConfig: {}, dynamicConfig: {}, options: {}, disabled: false, loaded: false, filePath: '' }; } else { staticExtensionProps = { name: modelObject.name, rawConfig: modelObject.config, dynamicConfig: modelObject.config, options: {}, disabled: false, loaded: false, filePath: '' }; } const concreteBaseAPI = _getConcreteBaseAPI({ name: staticExtensionProps.name }); const extensionProps: BaseExtensionProps = { api: concreteBaseAPI, ...staticExtensionProps }; return extensionProps; } static async loadFromFile({ name, filePath, rootDir, rawConfig = {}, options = {}, throws = false }: BaseLoadFromFileArgsProps): Promise<StaticProps> { logger.debug(`base-extension, loading extension ${name} from ${filePath}`); const extensionProps: StaticProps = { name, rawConfig, dynamicConfig: rawConfig, options, disabled: false, loaded: false, filePath: '', rootDir: '' }; // Skip disabled extensions // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! if (options.disabled) { extensionProps.disabled = true; logger.info(`skip extension ${extensionProps.name} because it is disabled`); extensionProps.loaded = false; return extensionProps; } extensionProps.filePath = filePath; extensionProps.rootDir = rootDir; const isFileExist = await fs.pathExists(filePath); if (!isFileExist) { // Do not throw an error if the file not exist since we will install it later // unless you specify the options.file which means you want a specific file which won't be installed automatically later if (throws && options.file) { const err = new Error(`the file ${filePath} not found`); throw new ExtensionLoadError(err, extensionProps.name); } extensionProps.loaded = false; return extensionProps; } if (rootDir && !Environment.isEnvironmentInstalled(rootDir)) { extensionProps.loaded = false; return extensionProps; } try { const script = require(filePath); // eslint-disable-line extensionProps.script = script.default ? script.default : script; // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! if (extensionProps.script.getSchema && typeof extensionProps.script.getSchema === 'function') { // the function may or may not be a promise // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! extensionProps.schema = await Promise.resolve(extensionProps.script.getSchema()); // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! const valid = ajv.validate(extensionProps.schema, rawConfig); if (!valid) { throw new ExtensionSchemaError(name, ajv.errorsText()); } } // Make sure to not kill the process if an extension didn't load correctly } catch (err) { if (err.code === 'MODULE_NOT_FOUND') { const msg = `loading extension ${extensionProps.name} failed, the file ${extensionProps.filePath} not found`; logger.warn(msg); // console.error(msg); // eslint-disable-line no-console } logger.error(`loading extension ${extensionProps.name} failed`, err); extensionProps.loaded = false; if (throws) { let printStack = true; if (err instanceof ExtensionSchemaError) { printStack = false; } throw new ExtensionLoadError(err, extensionProps.name, printStack); } return extensionProps; } extensionProps.loaded = true; return extensionProps; } static loadDynamicConfig(extensionProps: StaticProps): Record<string, any> | null | undefined { logger.debug('base-extension - loadDynamicConfig'); const getDynamicConfig = R.path(['script', 'getDynamicConfig'], extensionProps); if (getDynamicConfig && typeof getDynamicConfig === 'function') { try { const dynamicConfig = getDynamicConfig({ rawConfig: extensionProps.rawConfig }); return dynamicConfig; } catch (err) { throw new ExtensionGetDynamicConfigError(err, extensionProps.name); } } return undefined; } } function _getExtensionPath(name: string, scopePath: string, isCore = false): ExtensionPath { if (isCore) { return _getCoreExtensionPath(name); } if (!scopePath) { throw new Error('base-extension._getExtensionPath expects to get scopePath'); } return _getRegularExtensionPath(name, scopePath); } function _getCoreExtensionPath(name: string): ExtensionPath { const componentPath = path.join(__dirname, CORE_EXTENSIONS_PATH, name); return { resolvedPath: componentPath, componentPath }; } function _getRegularExtensionPath(name: string, scopePath: string): ExtensionPath { let bitId: BitId; try { bitId = BitId.parse(name, true); // todo: make sure it always has a scope } catch (err) { throw new ExtensionNameNotValid(name); } if (!bitId || !bitId.scope) throw new ExtensionNameNotValid(name); const internalComponentsPath = Scope.getComponentsRelativePath(); const internalComponentPath = Scope.getComponentRelativePath(bitId, scopePath); const componentPath = path.join(scopePath, internalComponentsPath, internalComponentPath); try { // This might throw an error in case of imported component when the env // isn't installed yet // It will be handled in higher functions const resolved = require.resolve(componentPath); return { resolvedPath: typeof resolved === 'string' ? resolved : componentPath, componentPath }; } catch (e) { return { resolvedPath: componentPath, componentPath }; } } function _getExtensionVersionFromComponentPath(componentPath: string): string | undefined { const parsed = path.parse(componentPath); const version = parsed.base; if (!semver.valid(version)) { return undefined; } return version; } function _addVersionToNameFromPathIfMissing(name: string, componentPath: string, options: Record<string, any>): string { // @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX! if (options && options.core) return name; // if it's a core extension, it's not a bit-id. let bitId: BitId; try { bitId = BitId.parse(name, true); // @todo: make sure it always has a scope name } catch (err) { throw new ExtensionNameNotValid(name); } if (bitId.getVersion().latest) { const version = _getExtensionVersionFromComponentPath(componentPath); return bitId.changeVersion(version).toString(); } return name; } const baseApi = { /** * API to get logger */ getLogger: (name): Function => () => createExtensionLogger(name) }; /** * Function which get actual params and return a concrete base api */ function _getConcreteBaseAPI({ name }: { name: string }) { const concreteBaseAPI = R.clone(baseApi); concreteBaseAPI.getLogger = baseApi.getLogger(name); return concreteBaseAPI; }
the_stack
import * as console from 'console'; import { gzipSync, gunzipSync } from 'zlib'; import { metricScope, Configuration, MetricsLogger, Unit } from 'aws-embedded-metrics'; import type { Context, ScheduledEvent } from 'aws-lambda'; import { captureHTTPsGlobal } from 'aws-xray-sdk-core'; // eslint-disable-next-line @typescript-eslint/no-require-imports import { DenyListClient } from '../../backend/deny-list/client.lambda-shared'; import { LicenseListClient } from '../../backend/license-list/client.lambda-shared'; import * as aws from '../../backend/shared/aws.lambda-shared'; import { requireEnv } from '../../backend/shared/env.lambda-shared'; import { MetricName, MARKER_FILE_NAME, METRICS_NAMESPACE } from './constants.lambda-shared'; import { CouchChanges, DatabaseChange } from './couch-changes.lambda-shared'; import { PackageVersion } from './stage-and-notify.lambda'; // eslint-disable-next-line @typescript-eslint/no-require-imports const normalizeNPMMetadata = require('normalize-registry-metadata'); const CONSTRUCT_KEYWORDS: ReadonlySet<string> = new Set(['cdk', 'aws-cdk', 'awscdk', 'cdk8s', 'cdktf']); const NPM_REPLICA_REGISTRY_URL = 'https://replicate.npmjs.com/'; /** * The release date of `aws-cdk@0.8.0`. Anything earlier than this basically is * not a relevant package, as it cannot possibly be a constructs-based package. * This is used to fast-forward over boring stuff when the sequence number is * reset. */ const DAWN_OF_CONSTRUCTS = new Date('2018-07-31T13:43:04.615Z'); // Configure embedded metrics format Configuration.namespace = METRICS_NAMESPACE; // Make sure X-Ray traces will include HTTP(s) calls. // eslint-disable-next-line @typescript-eslint/no-require-imports captureHTTPsGlobal(require('https')); // eslint-disable-next-line @typescript-eslint/no-require-imports captureHTTPsGlobal(require('http')); /** * This function triggers on a fixed schedule and reads a stream of changes from npmjs couchdb _changes endpoint. * Upon invocation the function starts reading from a sequence stored in an s3 object - the `marker`. * If the marker fails to load (or do not exist), the stream will start from `now` - the latest change. * For each change: * - the package version tarball will be copied from the npm registry to a stating bucket. * - a message will be sent to an sqs queue * npm registry API docs: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md * @param context a Lambda execution context */ export async function handler(event: ScheduledEvent, context: Context) { console.log(`Event: ${JSON.stringify(event, null, 2)}`); const stagingBucket = requireEnv('BUCKET_NAME'); const stagingFunction = requireEnv('FUNCTION_NAME'); const denyList = await DenyListClient.newClient(); const licenseList = await LicenseListClient.newClient(); const npm = new CouchChanges(NPM_REPLICA_REGISTRY_URL, 'registry'); const { marker: initialMarker, knownVersions } = await loadLastTransactionMarker(stagingBucket, npm); // The last written marker seq id. let updatedMarker = initialMarker; // The slowest batch processing time so far (starts at 30 seconds). This is how much time should // be left before timeout if a new batch is to be fetched. let maxBatchProcessingTime = 30_000; // Whether we should continue reading more items or not... This is set to false when the current // latest change is reached (i.e: next page of changes is empty). let shouldContinue = true; do { await metricScope((metrics) => async () => { const changes = await npm.changes(updatedMarker); // Clear automatically set dimensions - we don't need them (see https://github.com/awslabs/aws-embedded-metrics-node/issues/73) metrics.setDimensions(); // Recording current seq range and updating the `updatedMarker`. metrics.setProperty('StartSeq', updatedMarker); updatedMarker = changes.last_seq; metrics.setProperty('EndSeq', updatedMarker); const startTime = Date.now(); try { const batch = changes.results as readonly Change[]; // The most recent "modified" timestamp observed in the batch. let lastModified: Date | undefined; // Emit npm.js replication lag for (const { doc } of batch) { if (doc?.time?.modified) { const modified = new Date(doc.time.modified); metrics.putMetric( MetricName.NPMJS_CHANGE_AGE, startTime - modified.getTime(), Unit.Milliseconds, ); if (lastModified == null || lastModified < modified) { lastModified = modified; } } } console.log(`Received a batch of ${batch.length} element(s)`); metrics.putMetric(MetricName.CHANGE_COUNT, batch.length, Unit.Count); if (lastModified && lastModified < DAWN_OF_CONSTRUCTS) { console.log(`Skipping batch as the latest modification is ${lastModified}, which is pre-Constructs`); } else if (batch.length === 0) { console.log('Received 0 changes, caught up to "now", exiting...'); shouldContinue = false; } else { // Obtain the modified package version from the update event, and filter // out packages that are not of interest to us (not construct libraries). const versionInfos = getRelevantVersionInfos(batch, metrics, denyList, licenseList, knownVersions); console.log(`Identified ${versionInfos.length} relevant package version update(s)`); metrics.putMetric(MetricName.RELEVANT_PACKAGE_VERSIONS, versionInfos.length, Unit.Count); // Process all remaining updates await Promise.all(versionInfos.map(async ({ infos, modified, seq }) => { const invokeArgs: PackageVersion = { integrity: infos.dist.shasum, modified: modified.toISOString(), name: infos.name, seq: seq?.toString(), tarballUrl: infos.dist.tarball, version: infos.version, }; // "Fire-and-forget" invocation here. await aws.lambda().invokeAsync({ FunctionName: stagingFunction, InvokeArgs: JSON.stringify(invokeArgs, null, 2), }).promise(); // Record that this is now a "known" version (no need to re-discover) knownVersions.set(`${infos.name}@${infos.version}`, modified); })); } // Updating the S3 stored marker with the new seq id as communicated by nano. await saveLastTransactionMarker(context, stagingBucket, updatedMarker, knownVersions); } finally { // Markers may not always be numeric (but in practice they are now), so we protect against that... if (typeof updatedMarker === 'number' || /^\d+$/.test(updatedMarker)) { metrics.putMetric(MetricName.LAST_SEQ, typeof updatedMarker === 'number' ? updatedMarker : parseInt(updatedMarker), Unit.None); } metrics.putMetric(MetricName.BATCH_PROCESSING_TIME, Date.now() - startTime, Unit.Milliseconds); metrics.putMetric(MetricName.REMAINING_TIME, context.getRemainingTimeInMillis(), Unit.Milliseconds); } })(); } while (shouldContinue && context.getRemainingTimeInMillis() >= maxBatchProcessingTime); console.log('All done here, we have success!'); return { initialMarker, updatedMarker }; } //#region Last transaction marker /** * Loads the last transaction marker from S3. * * @param registry a Nano database corresponding to the Npmjs.com CouchDB instance. * * @returns the value of the last transaction marker and the map of package names + versions to the last modification * of that package version that was processed. */ async function loadLastTransactionMarker( stagingBucket: string, registry: CouchChanges, ): Promise<{ marker: string | number; knownVersions: Map<string, Date> }> { try { const response = await aws.s3().getObject({ Bucket: stagingBucket, Key: MARKER_FILE_NAME, }).promise(); if (response.ContentEncoding === 'gzip') { response.Body = gunzipSync(Buffer.from(response.Body! as any)); } let data = JSON.parse( response.Body!.toString('utf-8'), (key, value) => { if (key !== 'knownVersions') { return value; } const map = new Map<string, Date>(); for (const [pkgVersion, iso] of Object.entries(value)) { if (typeof iso === 'string' || typeof iso === 'number') { map.set(pkgVersion, new Date(iso)); } else { console.error(`Ignoring invalid entry: ${pkgVersion} => ${iso}`); } } return map; }, ); if (typeof data === 'number') { data = { marker: data.toFixed(), knownVersions: new Map() }; } console.log(`Read last transaction marker: ${data.marker}`); const dbUpdateSeq = (await registry.info()).update_seq; if (dbUpdateSeq < data.marker) { console.warn(`Current DB update_seq (${dbUpdateSeq}) is lower than marker (CouchDB instance was likely replaced), resetting to 0!`); return { marker: '0', knownVersions: data.knownVersion }; } return data; } catch (error) { if (error.code !== 'NoSuchKey') { throw error; } console.warn(`Marker object (s3://${stagingBucket}/${MARKER_FILE_NAME}) does not exist, starting from scratch`); return { marker: '0', knownVersions: new Map() }; } } /** * Updates the last transaction marker in S3. * * @param marker the last transaction marker value * @param knownVersions the map of package name + version to last modified timestamp of packages that have been processed. */ async function saveLastTransactionMarker(context: Context, stagingBucket: string, marker: string | number, knownVersions: Map<string, Date>) { console.log(`Updating last transaction marker to ${marker}`); return putObject( context, stagingBucket, MARKER_FILE_NAME, gzipSync( JSON.stringify( { marker, knownVersions }, (_, value) => { if (value instanceof Date) { return value.toISOString(); } else if (value instanceof Map) { return Object.fromEntries(value); } else { return value; } }, 2, ), { level: 9 }, ), { ContentType: 'application/json', ContentEncoding: 'gzip', }, ); } //#endregion //#region Asynchronous Primitives /** * Puts an object in the staging bucket, with standardized object metadata. * * @param key the key for the object to be put. * @param body the body of the object to be put. * @param opts any other options to use when sending the S3 request. * * @returns the result of the S3 request. */ function putObject(context: Context, bucket: string, key: string, body: AWS.S3.Body, opts: Omit<AWS.S3.PutObjectRequest, 'Bucket' | 'Key' | 'Body'> = {}) { return aws.s3().putObject({ Bucket: bucket, Key: key, Body: body, Metadata: { 'Lambda-Log-Group': context.logGroupName, 'Lambda-Log-Stream': context.logStreamName, 'Lambda-Run-Id': context.awsRequestId, ...opts.Metadata, }, ...opts, }).promise(); } //#endregion /** * Obtains the `VersionInfo` corresponding to the modified version(s) in the * provided `Change` objects, ensures they are relevant (construct libraries), * and returns those only. * * @param changes the changes to be processed. * @param metrics the metrics logger to use. * @param denyList deny list client * * @returns a list of `VersionInfo` objects */ function getRelevantVersionInfos( changes: readonly Change[], metrics: MetricsLogger, denyList: DenyListClient, licenseList: LicenseListClient, knownVersions: Map<string, Date>, ): readonly UpdatedVersion[] { const result = new Array<UpdatedVersion>(); for (const change of changes) { // Filter out all elements that don't have a "name" in the document, as // these are schemas, which are not relevant to our business here. if (change.doc.name === undefined) { console.error(`[${change.seq}] Changed document contains no 'name': ${change.id}`); metrics.putMetric(MetricName.UNPROCESSABLE_ENTITY, 1, Unit.Count); continue; } // The normalize function change the object in place, if the doc object is invalid it will return undefined if (normalizeNPMMetadata(change.doc) === undefined) { console.error(`[${change.seq}] Changed document invalid, npm normalize returned undefined: ${change.id}`); metrics.putMetric(MetricName.UNPROCESSABLE_ENTITY, 1, Unit.Count); continue; } // Sometimes, there are no versions in the document. We skip those. if (change.doc.versions == null) { console.error(`[${change.seq}] Changed document contains no 'versions': ${change.id}`); metrics.putMetric(MetricName.UNPROCESSABLE_ENTITY, 1, Unit.Count); continue; } // Sometimes, there is no 'time' entry in the document. We skip those. if (change.doc.time == null) { console.error(`[${change.seq}] Changed document contains no 'time': ${change.id}`); metrics.putMetric(MetricName.UNPROCESSABLE_ENTITY, 1, Unit.Count); continue; } // Get the last modification date from the change const packageVersionUpdates = Object.entries(change.doc.time) // Ignore the "created" and "modified" keys here .filter(([key]) => key !== 'created' && key !== 'modified') // Parse all the dates to ensure they are comparable .map(([version, isoDate]) => [version, new Date(isoDate)] as const); metrics.putMetric(MetricName.PACKAGE_VERSION_COUNT, packageVersionUpdates.length, Unit.Count); for (const [version, modified] of packageVersionUpdates) { const knownKey = `${change.doc.name}@${version}`; const known = knownVersions.get(knownKey); if (known == null || known < modified) { const infos = change.doc.versions[version]; if (infos == null) { // Could be the version in question was un-published. console.log(`[${change.seq}] Could not find info for "${change.doc.name}@${version}". Was it un-published?`); } else if (isConstructLibrary(infos)) { // skip if this package is denied const denied = denyList.lookup(infos.name, infos.version); if (denied) { console.log(`[${change.seq}] Package denied: ${JSON.stringify(denied)}`); knownVersions.set(knownKey, modified); metrics.putMetric(MetricName.DENY_LISTED_COUNT, 1, Unit.Count); continue; } metrics.putMetric(MetricName.PACKAGE_VERSION_AGE, Date.now() - modified.getTime(), Unit.Milliseconds); const isEligible = licenseList.lookup(infos.license ?? 'UNLICENSED') != null; metrics.putMetric(MetricName.INELIGIBLE_LICENSE, isEligible ? 0 : 1, Unit.Count); if (isEligible) { result.push({ infos, modified, seq: change.seq }); } else { console.log(`[${change.seq}] Package "${change.doc.name}@${version}" does not use allow-listed license: ${infos.license ?? 'UNLICENSED'}`); knownVersions.set(knownKey, modified); } } // Else this is not a construct library, so we'll just ignore it... } } } return result; /** * This determines whether a package is "interesting" to ConstructHub or not. This is related but * not necessarily identical to the logic in the ingestion process that annotates package metadata * with a construct framework name + version (those could ultimately be re-factored to share more * of the logic/heuristics, though). * * Concretely, it checks for a list of known "official" packages for various construct frameworks, * and packages that have a dependency on such a package. It also has a keywords allow-list as a * fall-back (the current dependency-based logic does not consider transitive dependencies and * might hence miss certain rare use-cases, which keywords would rescue). */ function isConstructLibrary(infos: VersionInfo): boolean { if (infos.jsii == null) { return false; } // The "constructs" package is a sign of a constructs library return isConstructFrameworkPackage(infos.name) // Recursively apply on dependencies || Object.keys(infos.dependencies ?? {}).some(isConstructFrameworkPackage) || Object.keys(infos.devDependencies ?? {}).some(isConstructFrameworkPackage) || Object.keys(infos.peerDependencies ?? {}).some(isConstructFrameworkPackage) // Keyword-based fallback || infos.keywords?.some((kw) => CONSTRUCT_KEYWORDS.has(kw)); } /** * Package is one of the known construct framework's first party packages: * - @aws-cdk/* * - @cdktf/* * - cdk8s or cdk8s-plus */ function isConstructFrameworkPackage(name: string): boolean { // IMPORTANT NOTE: Prefix matching should only be used for @scope/ names. // The low-level constructs package return name === 'constructs' // AWS CDK Packages || name === 'aws-cdk-lib' || name === 'monocdk' || name.startsWith('@aws-cdk/') // CDK8s packages || name === 'cdk8s' || /^cdk8s-plus(?:-(?:17|20|21|22))?$/.test(name) // CDKTf packages || name === 'cdktf' || name.startsWith('@cdktf/'); } } /** * The scheme of a package version in the update. Includes the package.json keys, as well as some additional npm metadata * @see https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#version */ interface VersionInfo { readonly dependencies?: { readonly [name: string]: string }; readonly devDependencies?: { readonly [name: string]: string }; readonly peerDependencies?: { readonly [name: string]: string }; readonly jsii: unknown; readonly license?: string; readonly name: string; readonly [key: string]: unknown; readonly keywords: string[]; readonly dist: { readonly shasum: string; readonly tarball: string; }; readonly version: string; } interface UpdatedVersion { /** * The `VersionInfo` for the modified package version. */ readonly infos: VersionInfo; /** * The time at which the `VersionInfo` was last modified. */ readonly modified: Date; /** * The CouchDB transaction number for the update. */ readonly seq?: string | number; } interface Document { /** * a List of all Version objects for the package */ readonly versions: { [key:string]: VersionInfo | undefined }; /** * The package's name. */ readonly name: string; /** * Timestamps associated with this document. The values are ISO-8601 encoded * timestamps. */ readonly time: { readonly created: string; readonly modified: string; readonly [version: string]: string; }; readonly [key: string]: unknown; } interface Change extends DatabaseChange { readonly doc: Document; }
the_stack
import * as Fs from 'fs-extra'; import * as Minimist from 'minimist'; import * as Path from 'path'; import { Git } from './git'; import { localStorage as LocalStorage } from './local-storage'; import { Paths } from './paths'; import { prompt } from './prompt'; import * as Rebase from './rebase'; import { Utils } from './utils'; // Get recent commit by specified arguments function getRecentCommit(offset, format, grep) { if (typeof offset === 'string') { if (!grep) { grep = format; } format = offset; offset = 0; } const argv = []; if (format) { argv.push(`--format=${format}`); } if (grep) { argv.push(`--grep=${grep}`); } return Git.recentCommit(offset, argv); } // Get the recent step commit function getRecentStepCommit(offset, format?) { return getRecentCommit(offset, format, '^Step [0-9]\\+'); } // Get the recent super step commit function getRecentSuperStepCommit(offset, format?) { return getRecentCommit(offset, format, '^Step [0-9]\\+:'); } // Get the recent sub step commit function getRecentSubStepCommit(offset, format?) { return getRecentCommit(offset, format, '^Step [0-9]\\+\\.[0-9]\\+:'); } // Extract step json from message function getStepDescriptor(message): { number: string, message: string, type: string } { if (message == null) { throw TypeError('A message must be provided'); } const match = message.match(/^Step (\d+(?:\.\d+)?)\: ((?:.|\n)*)$/); return match && { number: match[1], message: match[2], type: match[1].split('.')[1] ? 'sub' : 'super', }; } // Extract super step json from message function getSuperStepDescriptor(message) { if (message == null) { throw TypeError('A message must be provided'); } const match = message.match(/^Step (\d+)\: ((?:.|\n)*)$/); return match && { number: Number(match[1]), message: match[2], }; } // Extract sub step json from message function getSubStepDescriptor(message) { if (message == null) { throw TypeError('A message must be provided'); } const match = message.match(/^Step ((\d+)\.(\d+))\: ((?:.|\n)*)$/); return match && { number: match[1], superNumber: Number(match[2]), subNumber: Number(match[3]), message: match[4], }; } // Push a new step with the provided message function pushStep(message, options) { const step = getNextStep(); commitStep(step, message, options); // Meta-data for step editing LocalStorage.setItem('REBASE_NEW_STEP', step); } // Pop the last step function popStep() { const headHash = Git(['rev-parse', 'HEAD']); const rootHash = Git.rootHash(); if (headHash === rootHash) { throw Error("Can't remove root"); } const removedCommitMessage = Git.recentCommit(['--format=%s']); const stepDescriptor = getStepDescriptor(removedCommitMessage); Git.print(['reset', '--hard', 'HEAD~1']); // Meta-data for step editing if (stepDescriptor) { LocalStorage.setItem('REBASE_NEW_STEP', getCurrentStep()); // This will be used later on to update the manuals if (ensureStepMap()) { updateStepMap('remove', { step: stepDescriptor.number }); } // Delete branch referencing the super step unless we're rebasing, in which case the // branches will be reset automatically at the end of the rebase if (stepDescriptor.type === 'super' && !Git.rebasing()) { const branch = Git.activeBranchName(); Git(['branch', '-D', `${branch}-step${stepDescriptor.number}`]); } } else { console.warn('Removed commit was not a step'); return; } } // Finish the current with the provided message and tag it function tagStep(message) { const step = getNextSuperStep(); const tag = `step${step}`; const manualFile = `${tag}.tmpl`; const manualTemplatePath = Path.resolve(Paths.manuals.templates, manualFile); Fs.ensureDirSync(Paths.manuals.templates); Fs.ensureDirSync(Paths.manuals.views); // If file exists, use it instead of overriding it if (!Fs.existsSync(manualTemplatePath)) { Fs.writeFileSync(manualTemplatePath, ''); } Git(['add', manualTemplatePath]); commitStep(step, message); // If we're in edit mode all the branches will be set after the rebase if (!Git.rebasing()) { const branch = Git.activeBranchName(); // This branch will be used to run integration testing Git(['branch', `${branch}-step${step}`]); } // Meta-data for step editing LocalStorage.setItem('REBASE_NEW_STEP', step); } // The opposite of git rebase --continue: Will step back to the previously edited step async function stepBack(targetStep: string, options = { interactive: false }) { if (!Git.rebasing()) { throw Error('fatal: No rebase in progress?'); } // If prior to that we did `edit 1.1..1.3` and we're in 1.3 this will result in [1.1, 1.2] const previousSteps = Rebase.getPreviousEditedSteps(); if (!previousSteps.length) { throw Error('No previous steps found'); } if (targetStep) { // Multiplier e.g. x3 if (/x\d+/.test(targetStep)) { const times = Number(targetStep.match(/x(\d+)/)![1]); targetStep = previousSteps[times - 1]; } // Step e.g. 1.1 else if (!/\d+(\.\d+)?/.test(targetStep)) { throw TypeError('Provided argument is neither a step or a multiplier'); } } // Prompt else if (options.interactive) { targetStep = await prompt([ { type: 'list', name: 'stepback', message: 'Which step would you like to go back to?', choices: previousSteps, } ]); } // Target step was provided else { targetStep = previousSteps[0]; } if (!targetStep) { throw TypeError('targetStep must be provided'); } // Make sure it's actually relevant if (previousSteps.every(s => s !== targetStep)) { throw TypeError(`Provided target step ${targetStep} was not edited`); } // After retrieving target step, this is where the magic happens // Message will be printed over here Rebase.hardResetRebaseState(targetStep); } // Get the hash of the step followed by ~1, mostly useful for a rebase function getStepBase(step) { if (!step) { const message = getRecentStepCommit('%s'); if (!message) { return '--root'; } step = getStepDescriptor(message).number; } if (step === 'root') { return '--root'; } const hash = Git.recentCommit([ `--grep=^Step ${step}:`, '--format=%h', ]); if (!hash) { throw Error('Step not found'); } return `${hash}~1`; } // Edit the provided step function editStep(steps, options: any = {}) { const rootSha1 = Git.rootHash(); const allSteps = getAllSteps(); steps = [].concat(steps).filter(Boolean); // Unwrap ranges, e.g. // 1...3.1 may become 1 2.1 2.2 2.3 2 3.1 steps = steps.reduce((flattened, step) => { const range = step.match(/(\d+(?:\.\d+)?)?\.\.(?:\.+)?(\d+(?:\.\d+)?)?/); if (!range) { return flattened.concat(step); } const start = range[1] || 'root'; const end = range[2] || allSteps[allSteps.length - 1]; let startIndex = allSteps.findIndex(s => s === start); const endIndex = allSteps.findIndex(s => s === end); if (startIndex === -1) { startIndex = 0; } if (endIndex === -1) { startIndex = Infinity; } return flattened.concat(allSteps.slice(startIndex, endIndex + 1)); }, []); // Map git-refs to step indexes steps = steps.map((step) => { // If an index was provided, return it; otherwise try to find the index by SHA1 if (/^\d{1,5}(\.\d+)?$/.test(step) || step === 'root') { return step; } if (step === rootSha1) { return 'root'; } const commitMessage = Git(['log', step, '-1', '--format=%s']) const descriptor = getStepDescriptor(commitMessage); return descriptor && descriptor.number; }).filter(Boolean); steps = steps.slice().sort((a, b) => { const [superA, subA] = a.split('.').concat('Infinity'); const [superB, subB] = b.split('.').concat('Infinity'); // Always put the root on top if (a === 'root') { return -1; } if (b === 'root') { return 1; } // Put first steps first return ( (superA - superB) || (subA - subB) ); }); // The would always have to start from the first step const base = getStepBase(steps[0]); // '--root' might be fetched in case no steps where provided. We need to fill up // this missing information in the steps array if (!steps.length && base === '--root') { steps[0] = 'root'; } const argv = [Paths.tortilla.editor, 'edit', ...steps]; // Update diffSteps if (options.udiff != null) { argv.push('--udiff'); } // Update diffSteps in another repo if (options.udiff) { argv.push(options.udiff.toString()); } // Storing locally so it can be used in further processes // Indicates that this operation is hooked into a submodule if (process.env.TORTILLA_SUBMODULE_CWD) { LocalStorage.setItem('SUBMODULE_CWD', process.env.TORTILLA_SUBMODULE_CWD); } // Initialize rebase_states git project Fs.removeSync(Paths.rebaseStates); Git(['init', Paths.rebaseStates]); Git.print(['rebase', '-i', base, '--keep-empty'], { env: { GIT_SEQUENCE_EDITOR: `node ${argv.join(' ')}`, }, }); } // Adjust all the step indexes from the provided step function sortStep(step) { // If no step was provided, take the most recent one if (!step) { step = getRecentStepCommit('%s'); step = getStepDescriptor(step); step = step ? step.number : 'root'; } let newStep; let oldStep; let base; // If root, make sure to sort all step indexes since the beginning of history if (step === 'root') { newStep = '1'; oldStep = 'root'; base = '--root'; } else { // Else, adjust only the steps in the given super step newStep = step.split('.').map(Number)[0]; oldStep = newStep - 1 || 'root'; newStep = `${newStep}.${1}`; base = getStepBase(newStep); } // Setting local storage variables so re-sortment could be done properly LocalStorage.setItem('REBASE_NEW_STEP', newStep); LocalStorage.setItem('REBASE_OLD_STEP', oldStep); Git.print(['rebase', '-i', base, '--keep-empty'], { env: { GIT_SEQUENCE_EDITOR: `node ${Paths.tortilla.editor} sort`, }, }); } // Reword the provided step with the provided message function rewordStep(step, message) { const base = getStepBase(step); const argv = [Paths.tortilla.editor, 'reword']; if (message) { argv.push('-m', `"${message}"`); } Git.print(['rebase', '-i', base, '--keep-empty'], { env: { GIT_SEQUENCE_EDITOR: `node ${argv.join(' ')}`, }, }); } // Run git-show for given step index function showStep(step, ...args) { assertStep(step) step = step.split('.').join('\\.') const hash = Git(['log', `--grep=^Step ${step}`, '--format=%H']) if (!hash) { throw Error('Step not found') } Git.print(['show', hash, ...args]) } // Asserts whether provided string is a step index or not function assertStep(step: string | number, silent = false) { if (typeof step !== 'string' && typeof step !== 'number') { if (silent) { return false } throw TypeError('Provided argument is not of type string or number') } step = step.toString() if (!/\d+/.test(step) && !/\d+\.\d+/.test(step)) { if (silent) { return false } throw TypeError('Provided argument is not a step') } return true } // Add a new commit of the provided step with the provided message function commitStep(step, message, options: any = {}) { const argv = ['commit']; if (message) { argv.push('-m', message); } if (options.allowEmpty) { argv.push('--allow-empty'); } // Specified step is gonna be used for when forming the commit message LocalStorage.setItem('HOOK_STEP', step); try { // commit Git.print(argv); } catch (err) { // Clearing storage to prevent conflicts with upcoming commits LocalStorage.removeItem('HOOK_STEP'); throw err; } } // Get the current step function getCurrentStep() { // Probably root commit const recentStepCommit = getRecentStepCommit('%s'); if (!recentStepCommit) { return 'root'; } // Cover unexpected behavior const descriptor = getStepDescriptor(recentStepCommit); if (!descriptor) { return 'root'; } return descriptor.number; } // Get the current super step function getCurrentSuperStep() { // Probably root commit const recentStepCommit = getRecentSuperStepCommit('%s'); if (!recentStepCommit) { return 'root'; } // Cover unexpected behavior const descriptor = getSuperStepDescriptor(recentStepCommit); if (!descriptor) { return 'root'; } return descriptor.number; } // Get the next step function getNextStep(offset?) { // Fetch data about recent step commit const stepCommitMessage = getRecentStepCommit(offset, '%s'); const followedByStep = !!stepCommitMessage; // If no previous steps found return the first one if (!followedByStep) { return '1.1'; } // Fetch data about current step const stepDescriptor = getStepDescriptor(stepCommitMessage); const stepNumbers = stepDescriptor.number.split('.'); const superStepNumber = Number(stepNumbers[0]); const subStepNumber = Number(stepNumbers[1]); const isSuperStep = !subStepNumber; if (!offset) { // If this is a super step return the first sub step of a new step if (isSuperStep) { return `${superStepNumber + 1}.${1}`; } // Else, return the next step as expected return `${superStepNumber}.${subStepNumber + 1}`; } // Fetch data about next step const nextStepCommitMessage = getRecentStepCommit(offset - 1, '%s'); const nextStepDescriptor = getStepDescriptor(nextStepCommitMessage); const nextStepNumbers = nextStepDescriptor.number.split('.'); const nextSubStepNumber = Number(nextStepNumbers[1]); const isNextSuperStep = !nextSubStepNumber; if (isNextSuperStep) { // If this is a super step return the next super step right away if (isSuperStep) { return (superStepNumber + 1).toString(); } // Else, return the current super step return superStepNumber.toString(); } // If this is a super step return the first sub step of the next step if (isSuperStep) { return `${superStepNumber + 1}.${1}`; } // Else, return the next step as expected return `${superStepNumber}.${subStepNumber + 1}`; } // Get the next super step function getNextSuperStep(offset?) { return getNextStep(offset).split('.')[0]; } // Pending flag indicates that this step map will be used in another tortilla repo function initializeStepMap(pending) { const map = Git([ 'log', '--format=%s', '--grep=^Step [0-9]\\+', ]) .split('\n') .filter(Boolean) .reduce((m, subject) => { const num = getStepDescriptor(subject).number; m[num] = num; return m; }, {}); LocalStorage.setItem('STEP_MAP', JSON.stringify(map)); if (pending) { LocalStorage.setItem('STEP_MAP_PENDING', true); } else { LocalStorage.removeItem('STEP_MAP_PENDING'); } } // First argument represents the module we would like to read the steps map from function getStepMap(submoduleCwd?, checkPending?) { let localStorage; // In case this process was launched from a submodule if (submoduleCwd) { localStorage = LocalStorage.create(submoduleCwd); } else { localStorage = LocalStorage; } if (ensureStepMap(submoduleCwd, checkPending)) { return JSON.parse(localStorage.getItem('STEP_MAP')); } } // Provided argument will run an extra condition to check whether the pending flag // exists or not function ensureStepMap(submoduleCwd?, checkPending?) { // Step map shouldn't be used in this process if (checkPending && LocalStorage.getItem('STEP_MAP_PENDING')) { return false; } let paths; // In case this process was launched from a submodule if (submoduleCwd) { paths = Paths.resolveProject(submoduleCwd); } else { paths = Paths; } return Utils.exists(Path.resolve(paths.storage, 'STEP_MAP'), 'file'); } function disposeStepMap() { LocalStorage.deleteItem('STEP_MAP'); LocalStorage.deleteItem('STEP_MAP_PENDING'); } function updateStepMap(type, payload) { const map = getStepMap(); switch (type) { case 'remove': delete map[payload.step]; break; case 'reset': map[payload.oldStep] = payload.newStep; break; } LocalStorage.setItem('STEP_MAP', JSON.stringify(map)); } // Gets a list of all steps, from root to the most recent step function getAllSteps() { const allSteps = Git(['log', '--grep=^Step [0-9]\\+.\\?[0-9]*:', '--format=%s']) .split('\n') .map(message => getStepDescriptor(message)) .filter(Boolean) .map(descriptor => descriptor.number) .reverse(); allSteps.unshift('root'); return allSteps; } /** Contains step related utilities. */ (() => { if (require.main !== module) { return; } const argv = Minimist(process.argv.slice(2), { string: ['_', 'message', 'm'], boolean: ['root', 'udiff', 'allow-empty'], }); const method = argv._[0]; let step = argv._[1]; const message = argv.message || argv.m; const root = argv.root; const allowEmpty = argv['allow-empty']; const udiff = argv.udiff; if (!step && root) { step = 'root'; } const options = { allowEmpty, udiff, }; switch (method) { case 'push': return pushStep(message, options); case 'pop': return popStep(); case 'tag': return tagStep(message); case 'edit': return editStep(step, options); case 'sort': return sortStep(step); case 'reword': return rewordStep(step, message); } })(); export const Step = { push: pushStep, pop: popStep, tag: tagStep, back: stepBack, edit: editStep, sort: sortStep, reword: rewordStep, show: showStep, assert: assertStep, commit: commitStep, current: getCurrentStep, currentSuper: getCurrentSuperStep, next: getNextStep, nextSuper: getNextSuperStep, base: getStepBase, recentCommit: getRecentStepCommit, recentSuperCommit: getRecentSuperStepCommit, recentSubCommit: getRecentSubStepCommit, descriptor: getStepDescriptor, superDescriptor: getSuperStepDescriptor, subDescriptor: getSubStepDescriptor, initializeStepMap, getStepMap, ensureStepMap, disposeStepMap, updateStepMap, all: getAllSteps, };
the_stack
import { StrokeJoin, StrokeCap, Orientation, Interpolation, HorizontalAlignment, SymbolType, VerticalTextAlignment, TextDirection, FontWeight, MarkType, Gradient, } from './common-types' export enum SGNodeType { Mark = 'mark', Item = 'item', } export interface Metadata { id: string index?: number [key: string]: any } export interface SGNode { readonly nodetype: SGNodeType readonly parent?: SGNode readonly parentType?: SGNodeType readonly metadata: Metadata } export interface SGMark<Item extends SGItem> extends SGNode { /** * The type of mark this is */ readonly marktype?: MarkType /** * The mark items */ readonly items: Item[] /** * Whether to clip children of this mark */ readonly clip?: boolean /** * Whether this mark responds to interactive events */ readonly interactive?: boolean /** * The custom role of this mark, used to emit class information */ readonly role?: string /** * A helpful name for this mark */ readonly name?: string /** * The z-index of this mark */ readonly zIndex?: number } export interface SGItem extends SGNode { readonly itemtype: string /** * The primary x-coordinate in pixels. */ readonly x?: number /** * The secondary x-coordinate in pixels. */ readonly x2?: number /** * The center x-coordinate. Incompatible with x and x2. */ readonly xc?: number /** * The width of the mark in pixels, if supported. */ readonly width?: number /** * The primary y-coordinate in pixels. */ readonly y?: number /** * The secondary y-coordinate in pixels. */ readonly y2?: number /** * The center y-coordinate. Incompatible with y and y2. */ readonly yc?: number /** * The height of the mark in pixels, if supported. */ readonly height?: number /** * The mark opacity from 0 (transparent) to 1 (opaque). */ readonly opacity?: number /** * The fill color. */ readonly fill?: string | Gradient /** * The fill opacity from 0 (transparent) to 1 (opaque). */ readonly fillOpacity?: number /** * The stroke color. */ readonly stroke?: string | Gradient /** * The stroke opacity from 0 (transparent) to 1 (opaque). */ readonly strokeOpacity?: number /** * The stroke width in pixels. */ readonly strokeWidth?: number /** * The stroke cap for line ending style. One of butt (default), round or square. */ readonly strokeCap?: StrokeCap /** * An array of [stroke, space] lengths for creating dashed or dotted lines. */ readonly strokeDash?: [number, number] /** * The pixel offset at which to start the stroke dash array. */ readonly strokeDashOffset?: number /** * The stroke line join method. One of miter (default), round or bevel. */ readonly strokeJoin?: StrokeJoin /** * The miter limit at which to bevel a line join. */ readonly strokeMiterLimit?: number /** * The mouse cursor used over the mark. Any valid CSS cursor type can be used. */ readonly cursor?: string /** * A URL to load upon mouse click. If defined, the mark acts as a hyperlink. */ readonly href?: string /** * The tooltip text to show upon mouse hover. If the value is an object (other than a Date or an array), * then all key-value pairs in the object will be shown in the tooltip, one per line * (e.g., "key1: value1\nkey2: value2"). Array values will be shown in brackets [value1, value2, ...]. * * Other values will be coerced to strings. Nested object values will not be recursively printed. */ readonly tooltip?: any /** * An integer z-index indicating the layering order of sibling mark items. The default value is 0. * Higher values (1) will cause marks to be drawn on top of those with lower z-index values. Setting * the z-index as an encoding property only affects ordering among sibling mark items; it will not change * the layering relative to other mark definitions. Unlike the mark-level sort property, zindex changes the * rendering order only; it does not otherwise change mark item order (such as line or area point order). * * The most common use of zindex is to ensure that a mark is drawn over its siblings when selected, such as * by mouse hover. */ readonly zIndex?: number /** * The accessible title to apply to the scenegraph item */ readonly ariaTitle?: string /** * The accessible description to apply to the scenegraph item */ readonly ariaDescription?: string /** * The tab-index to use for the given item. If defined, it is a tab stop. This is important to use * when making charts accessible, as it allows screen-reader users to navigate the chart via keyboard. */ readonly tabIndex?: number /** * A mapping of client event-names to channel-identifiers */ readonly channels?: { [key: string]: string } } /** * Arc marks are circular arcs defined by a center point plus angular and radial extents. * Arc marks are typically used for radial plots such as pie and donut charts, but are * also useful for radial space-filling visualizations of hierarchical data. */ export interface SGArcItem extends SGItem { /** * The start angle in radians. A value of 0 indicates up or “north”, increasing values proceed clockwise. */ startAngle?: number /** * The end angle in radians. A value of 0 indicates up or “north”, increasing values proceed clockwise. */ endAngle?: number /** * The angular padding applied to sides of the arc, in radians. */ padAngle?: number /** * The inner radius in pixels. */ innerRadius?: number /** * The outer radius in pixels. */ outerRadius?: number /** * The radius in pixels of rounded arc corners (default 0). */ cornerRadius?: number } /** * Area marks are filled areas with either horizontal or vertical alignment. * Area marks are often used to show change over time, using either a single area or stacked areas. * Area marks can also be used to encode value ranges (min, max) or uncertainty over time. */ export interface SGAreaItem extends SGItem { /** * The orientation of the area mark. One of horizontal or vertical (the default). * With a vertical orientation, an area mark is defined by the x, y, and (y2 or height) * properties; with a horizontal orientation, the y, x and (x2 or width) properties must * be specified instead. */ orient?: Orientation /** * The interpolation method to use. One of basis, cardinal, catmull-rom, linear, monotone, * natural, step, step-after, step-before. The default is linear. */ interpolate: Interpolation /** * The tension value in the range [0, 1] to parameterize cardinal (default 0) or * catmull-rom (default 0.5) interpolation. */ tension?: number /** * A boolean flag indicating if the current data point is defined. * If false, the corresponding area segment will be omitted, creating a “break”. */ defined?: boolean } /** * Group marks are containers for other marks, and used to create visualizations with multiple views or layers. * Each group instance recursively defines its own nested visualization specification. * Group marks provide their own coordinate space and can include nested data, signal, scale, axis, legend, * title and mark definitions. In addition a group mark may have a colored background, similar to a rect mark. */ export interface SGGroupItem extends SGItem { /** * A boolean flag indicating if the visible group content should be clipped to the group’s * specified width and height. */ clip?: boolean /** * The radius in pixels of rounded rectangle corners for the group background (default 0). */ cornerRadius?: number /** * The nested marks for this item */ items: Array<SGMark<any>> } /** * Line marks are stroked paths with constant width, defined by an ordered set of (x, y) coordinates. * While line marks default to using straight line segments, different interpolation methods can be * used to create smoothed or stepped paths. Line marks are commonly used to depict trajectories or * change over time. * * Note: If a data point on a line is surrounded by points with defined: false, it may not be visible. * Use a strokeCap of round or square to ensure a visible point is drawn. */ export interface SGLineItem extends SGItem { /** * The interpolation method to use. One of basis, bundle, cardinal, catmull-rom, linear, * monotone, natural, step, step-after, step-before. The default is linear. You can find * explanations for these line interpolators in the d3-shape documentation. */ interpolate?: Interpolation /** * The tension value in the range [0, 1] to parameterize bundle (default 0.8), * cardinal (default 0) or catmull-rom (default 0.5) interpolation. */ tension?: number /** * A boolean flag indicating if the current data point is defined. * If false, the corresponding line segment will be omitted, creating a “break”. */ defined?: boolean } /** * Path marks are arbitrary shapes, defined as an SVG path. Path marks can be used to represent custom shapes, * including geographic regions on maps. */ export interface SGPathItem extends SGItem { /** * An SVG path string describing the geometry of the path. */ path?: string } /** * Rect marks are rectangles with a given position, width and height. * Rect marks are useful in a wide variety of visualizations, including bar charts and timelines. */ export interface SGRectItem extends SGItem { /** * The radius in pixels of rounded rectangle corners (default 0). */ cornerRadius?: number } /** * Rule marks provide a convenient way to draw individual line segments. * A rule is simply a line from (x, y) to (x2, y2). * One of the primary uses of rule marks is to draw axis ticks and grid lines. */ export type SGRuleItem = SGItem /** * Symbol marks are shapes useful for plotting data, and include circles, squares and oriented triangles. * Symbol size can be scaled to indicate magnitudes. In addition to a set of built-in shapes, custom shapes * can be defined using SVG path strings. */ export interface SGSymbolItem extends SGItem { /** * The area in pixels of the symbols bounding box. Note that this value sets the area of the symbol; * the side lengths will increase with the square root of this value. */ size?: number /** * The symbol shape. One of circle (default), square, cross, diamond, triangle-up, triangle-down, * triangle-right, triangle-left. Alternatively, a custom SVG path string can be provided. * * For correct sizing, custom shape paths should be defined within a square with coordinates * ranging from -1 to 1 along both the x and y dimensions. */ shape?: SymbolType | string } /** * Text marks can be used to annotate data, and provide labels and titles for axes and legends. */ export interface SGTextItem extends SGItem { /** * The horizontal text alignment. One of left (default), center, or right. */ align?: HorizontalAlignment /** * The rotation angle of the text in degrees (default 0). */ angle?: number /** * The vertical text baseline. One of alphabetic (default), top, middle, bottom. */ baseline?: VerticalTextAlignment /** * The direction of the text. One of ltr (left-to-right, default) or rtl (right-to-left). * This property determines on which side is truncated in response to the limit parameter. */ dir?: TextDirection /** * The horizontal offset in pixels (before rotation), between the text and anchor point. */ dx?: number /** * The vertical offset in pixels (before rotation), between the text and anchor point. */ dy?: number /** * The ellipsis string for text truncated in response to the limit parameter (default “…”). */ ellipsis?: string /** * The typeface to set the text in (e.g., Helvetica Neue). */ font?: string /** * The font size in pixels. */ fontSize?: number /** * The font weight (e.g., normal or bold). */ fontWeight?: FontWeight /** * The variant of the font to use */ fontVariant?: string | number /** * The font style (e.g., normal or italic). */ fontStyle?: string /** * The maximum length of the text mark in pixels (default 0, indicating no limit). * The text value will be automatically truncated if the rendered size exceeds the limit. */ limit?: number /** * Polar coordinate radial offset in pixels, relative to the origin determined by the * x and y properties (default 0). */ radius?: number /** * The text to display. This text may be truncated if the rendered length of the text exceeds the limit parameter. */ text?: string /** * Polar coordinate angle in radians, relative to the origin determined by the x and y properties (default 0). * Values for theta follow the same convention of arc marks: angles are measured in radians, with 0 indicating * up or “north”. */ theta?: number }
the_stack
import _ from 'lodash' import Knex from 'knex' import { BuilderOptions, Dialect, Expressions, FromFindBuilder, FromPaginateBuilder, OrderBy, QueryBuilderContext, Models, Where, } from '../types' import { BaseBuilder } from './base' import { PaginateBuilder } from './paginate' import { getAlias, getJsonObjectFunctionByDialect } from './utilities' import { parseResolveInfo, FlattenedResolveTree } from '../utilities' import { GraphQLResolveInfo } from 'graphql' export abstract class FindBuilder< TDialect extends Dialect, TFields extends Record<string, any>, TIds, TEnums, TAssociations extends Record< string, [ FindBuilder<any, any, any, any, any, any, any, any, any>, PaginateBuilder<any, any, any, any, any, any, any, any, any> ] >, TMany extends boolean = true, TSelected extends Pick<TFields, any> = TFields, TRawSelected extends Record<string, any> = {}, TLoaded extends Record<string, any> = {} > extends BaseBuilder { protected readonly _isMany: boolean constructor(options: BuilderOptions, modelName: string, models: Models, isMany: boolean) { super(options, modelName, models) this._isMany = isMany this._limit = isMany ? 0 : 1 this._select = Object.keys(this._model.fields) } /** * Sets the SELECT clause for the query. */ public select<T extends keyof TFields>( ...fields: T[] ): FindBuilder<TDialect, TFields, TIds, TEnums, TAssociations, TMany, Pick<TFields, T>, TRawSelected, TLoaded> { this._select = fields return this } /** * Sets the SELECT clause for the query to all available fields. */ public selectAll(): FindBuilder< TDialect, TFields, TIds, TEnums, TAssociations, TMany, TFields, TRawSelected, TLoaded > { this._select = Object.keys(this._model.fields) return this } /** * Adds on to the SELECT clause for the query. */ public addSelect<T extends keyof TFields>( ...fields: T[] ): FindBuilder< TDialect, TFields, TIds, TEnums, TAssociations, TMany, TSelected & Pick<TFields, T>, TRawSelected, TLoaded > { this._select = [...this._select, ...fields] return this } /** * Adds an additional column to the SELECT clause under the provided alias. */ public addSelectRaw<TColumn extends string, TAlias extends string = TColumn>( column: TColumn, as?: TAlias ): FindBuilder< TDialect, TFields, TIds, TEnums, TAssociations, TMany, TSelected, TRawSelected & { [key in TAlias]: any }, TLoaded > { this._rawSelect[as || column] = column return this } /** * Sets the WHERE clause for the query. */ public where(where: Where<TDialect, TFields, TIds, TEnums, TAssociations>) { this._where = where return this } /** * Deep merges the provided object with the existing WHERE options for the query. */ public mergeWhere(where: Where<TDialect, TFields, TIds, TEnums, TAssociations>) { this._where = _.merge({}, this._where, where) return this } /** * Sets the ORDER BY clause for the query. */ public orderBy(orderBy: OrderBy<TFields, TAssociations>[]) { this._orderBy = orderBy return this } /** * Sets the LIMIT for the query. */ public limit(value: number) { this._limit = value return this } /** * Sets the OFFSET for the query. */ public offset(value: number) { this._offset = value return this } /** * Eager loads the specified model relation. An optional alias can be provided to return the related model or models under a * different property name. The `getBuilder` parameter is a function that's passed a fresh `FindOneBuilder` or `FindManyBuilder` * instance for the associated model and should return the same kind of Builder instance. */ public load< TName extends Extract<keyof TAssociations, string>, TGetBuilder extends (builder: TAssociations[TName][0]) => FindBuilder<any, any, any, any, any, any, any, any> = ( builder: TAssociations[TName][0] ) => TAssociations[TName][0] >( associationName: TName, getBuilder?: TGetBuilder ): FindBuilder< TDialect, TFields, TIds, TEnums, TAssociations, TMany, TSelected, TRawSelected, TLoaded & { [key in TName]: FromFindBuilder<ReturnType<TGetBuilder>> } > public load< TName extends Extract<keyof TAssociations, string>, TAlias extends string, TGetBuilder extends (builder: TAssociations[TName][0]) => FindBuilder<any, any, any, any, any, any, any, any> = ( builder: TAssociations[TName][0] ) => TAssociations[TName][0] >( associationName: TName, as: TAlias, getBuilder?: TGetBuilder ): FindBuilder< TDialect, TFields, TIds, TEnums, TAssociations, TMany, TSelected, TRawSelected, TLoaded & { [key in TAlias]: FromFindBuilder<ReturnType<TGetBuilder>> } > public load< TName extends Extract<keyof TAssociations, string>, TAlias extends string, TGetBuilder extends ( builder: TAssociations[TName][0] ) => FindBuilder<any, any, any, any, any, any, any, any, any> = ( builder: TAssociations[TName][0] ) => TAssociations[TName][0] >(associationName: TName, aliasOrGetBuilder?: TAlias | TGetBuilder, getBuilder?: TGetBuilder) { const association = this._model.associations[associationName] if (!association) { throw new Error(`Invalid association name: ${associationName}`) } const alias = typeof aliasOrGetBuilder === 'string' ? aliasOrGetBuilder : associationName const getBuilderFn = typeof aliasOrGetBuilder === 'string' ? getBuilder : aliasOrGetBuilder const builders = this._models[association.modelName].builders const Builder = association.isMany ? builders.findMany : builders.findOne const initialBuilder = new Builder(this._options) this._loadedAssociations[alias] = [ associationName, getBuilderFn ? getBuilderFn(initialBuilder as TAssociations[TName][0]) : initialBuilder, ] return this } public loadPaginated< TName extends Extract<keyof TAssociations, string>, TGetBuilder extends ( builder: TAssociations[TName][1] ) => PaginateBuilder<any, any, any, any, any, any, any, any, any> = ( builder: TAssociations[TName][1] ) => TAssociations[TName][1] >( associationName: TName, getBuilder?: TGetBuilder ): FindBuilder< TDialect, TFields, TIds, TEnums, TAssociations, TMany, TSelected, TRawSelected, TLoaded & { [key in TName]: FromFindBuilder<ReturnType<TGetBuilder>> } > public loadPaginated< TName extends Extract<keyof TAssociations, string>, TAlias extends string, TGetBuilder extends ( builder: TAssociations[TName][1] ) => PaginateBuilder<any, any, any, any, any, any, any, any, any> = ( builder: TAssociations[TName][1] ) => TAssociations[TName][1] >( associationName: TName, alias: TAlias, getBuilder?: TGetBuilder ): FindBuilder< TDialect, TFields, TIds, TEnums, TAssociations, TMany, TSelected, TRawSelected, TLoaded & { [key in TAlias]: FromPaginateBuilder<ReturnType<TGetBuilder>> } > public loadPaginated< TName extends Extract<keyof TAssociations, string>, TAlias extends string, TGetBuilder extends ( builder: TAssociations[TName][1] ) => PaginateBuilder<any, any, any, any, any, any, any, any, any> = ( builder: TAssociations[TName][1] ) => TAssociations[TName][1] >(associationName: TName, aliasOrGetBuilder?: TAlias | TGetBuilder, getBuilder?: TGetBuilder) { const association = this._model.associations[associationName] if (!association) { throw new Error(`Invalid association name: ${associationName}`) } const alias = typeof aliasOrGetBuilder === 'string' ? aliasOrGetBuilder : associationName const getBuilderFn = typeof aliasOrGetBuilder === 'string' ? getBuilder : aliasOrGetBuilder const Builder = this._models[association.modelName].builders.paginate const initialBuilder = new Builder(this._options) this._loadedPaginated[alias] = [ associationName, getBuilderFn ? getBuilderFn(initialBuilder as TAssociations[TName][1]) : initialBuilder, ] return this } /** * Modifies the query based on the passed in GraphQLResolveInfo object. The selection set will determine what columns * should be selected and which related models should be loaded. The `where`, `orderBy`, `limit` and `offset` arguments, * if they exist on the field and were provided, will be used to set the corresponding clauses in the query. * * An optional `path` parameter can be passed in when the model will be returned as part of a more deeply nested field. * For example, the type of the field being returned might be `CreatePostPayload` with a field named `post` and it's this * field we're populating using a PostFindOneBuilder instance. In this case, we would pass in a value of "post" for the * `path` to identify the correct selection set and arguments to be parsed. The path can be arbitrarily deep, with each * level separated by a period, for example: "result.post". */ public resolveInfo(info: GraphQLResolveInfo | FlattenedResolveTree, path?: string) { let tree: FlattenedResolveTree if ('path' in info) { tree = parseResolveInfo(info)! } else { tree = info } if (path) { tree = _.get( tree, path .split('.') .map((fieldName) => `fields.${fieldName}`) .join('.') ) if (!tree) { return this } } const { fields, args } = tree // The builder defaults to selecting all fields, so we need to clear the selected fields first this.select() Object.keys(fields).forEach((fieldName) => { const field = fields[fieldName] if (field.name in this._model.fields) { this.addSelect(field.name) } else if (field.name in this._model.dependencies) { this._model.dependencies[field.name].forEach((columnName) => this.addSelectRaw(columnName)) } else if (field.name in this._model.associations) { const association = this._model.associations[field.name] if (association.pagination) { this.loadPaginated(field.name as Extract<keyof TAssociations, string>, field.alias, (builder) => builder.resolveInfo(field) ) } else { this.load(field.name as Extract<keyof TAssociations, string>, field.alias, (builder) => builder.resolveInfo(field) ) } } }) const { where, orderBy, limit, offset } = args if (!_.isNil(where)) { this.where(where) } if (!_.isNil(orderBy)) { this.orderBy(orderBy) } if (!_.isNil(limit)) { this.limit(limit) } if (!_.isNil(offset)) { this.offset(offset) } return this } /** * Executes the query and returns a Promise that will resolve to the found row or rows. */ public async execute<TRow = TSelected & TRawSelected & TLoaded>() { const rows = await this.toQueryBuilder() // We use JSON aggregation for loading related models and SQLite returns those fields as strings if (this._dialect === 'sqlite') { const jsonFields = [...Object.keys(this._loadedAssociations), ...Object.keys(this._loadedPaginated)] rows.forEach((row: any) => { Object.keys(row).forEach((fieldName) => { if (jsonFields.includes(fieldName)) { row[fieldName] = JSON.parse(row[fieldName]) } }) }) } return (this._isMany ? rows : rows[0] || null) as TMany extends true ? TRow[] : TRow | null } /** * Compiles the query into a Knex QueryBuilder instance */ public toQueryBuilder(context?: QueryBuilderContext): Knex.QueryBuilder { if (!context) { return this.toQueryBuilder({ alias: {} }) } const tableAlias = getAlias(this._tableName || this._modelName, context) const throughAlias = context.nested && context.nested.association.through ? getAlias(context.nested.association.through, context) : '' const expressions: Expressions = { select: {}, join: [], where: [], groupBy: [], orderBy: [], } this._addOrderByExpressions(tableAlias, expressions, context) this._addSelectExpressions(tableAlias, expressions, context) this._addJoinExpressions(tableAlias, throughAlias, expressions, context) this._addWhereExpressions(tableAlias, throughAlias, expressions, context) const query = this._knex.queryBuilder() if (context.nested) { const fields = _.toPairs(expressions.select) const jsonObjectFn = getJsonObjectFunctionByDialect(this._dialect) query.select( this._knex.raw( `${jsonObjectFn}(${fields .map(([fieldName, value]) => `'${fieldName}', ${value.constructor.name === 'Builder' ? '(??)' : '??'}`) .join(', ')}) as ${this._knex.ref('o')}`, fields.map(([, value]) => value) ) ) } else if (Object.keys(expressions.select).length) { query.select(expressions.select) } else { query.select(this._knex.raw('null')) } if (this._tableName) { query.from({ [tableAlias]: this._tableName }) } else { query.with(tableAlias, this._knex.raw(this._cte!)).from(tableAlias) } this._applyExpressions(query, expressions) if (this._transaction) { query.transacting(this._transaction) } return query } }
the_stack
import { expect } from "chai"; import { IModelConnection, SnapshotConnection } from "@itwin/core-frontend"; import { ContentSpecificationTypes, Field, KeySet, RelationshipDirection, RelationshipMeaning, Ruleset, RuleTypes } from "@itwin/presentation-common"; import { Presentation } from "@itwin/presentation-frontend"; import { initialize, terminate } from "../../../IntegrationTests"; import { printRuleset } from "../../Utils"; describe("Learning Snippets", () => { let imodel: IModelConnection; beforeEach(async () => { await initialize(); imodel = await SnapshotConnection.openFile("assets/datasets/Properties_60InstancesWithUrl2.ibim"); }); afterEach(async () => { await imodel.close(); await terminate(); }); describe("Content Customization", () => { describe("RelatedPropertiesSpecification", () => { it("uses `propertiesSource` attribute", async () => { // __PUBLISH_EXTRACT_START__ Presentation.Content.Customization.RelatedPropertiesSpecification.PropertiesSource.Ruleset // There's a content rule for returning content of given `bis.Subject` instance. The produced content is customized to // additionally include properties of parent element by following the `bis.ElementOwnsChildElements` relationship // in backwards direction. const ruleset: Ruleset = { id: "example", rules: [{ ruleType: RuleTypes.Content, specifications: [{ specType: ContentSpecificationTypes.SelectedNodeInstances, relatedProperties: [{ propertiesSource: [{ relationship: { schemaName: "BisCore", className: "ElementOwnsChildElements" }, direction: RelationshipDirection.Backward, }], }], }], }], }; // __PUBLISH_EXTRACT_END__ printRuleset(ruleset); // Ensure that the custom property was created const content = (await Presentation.presentation.getContent({ imodel, rulesetOrId: ruleset, keys: new KeySet([{ className: "BisCore:Subject", id: "0x12" }]), descriptor: {}, }))!; expect(content.descriptor.fields).to.containSubset([{ label: "Element", nestedFields: [{ label: "Model", }, { label: "Code", }, { label: "User Label", }], }]); }); it("uses `handleTargetClassPolymorphically` attribute", async () => { // __PUBLISH_EXTRACT_START__ Presentation.Content.Customization.RelatedPropertiesSpecification.HandleTargetClassPolymorphically.Ruleset // There's a content rule for returning content of given `bis.Subject` instance. The produced content is customized to // additionally include properties of parent element by following the `bis.ElementOwnsChildElements` relationship // in backwards direction. Setting `handleTargetClassPolymorphically` to `true` makes sure that the concrete target class is // determined and all its properties are loaded. const ruleset: Ruleset = { id: "example", rules: [{ ruleType: RuleTypes.Content, specifications: [{ specType: ContentSpecificationTypes.SelectedNodeInstances, relatedProperties: [{ propertiesSource: [{ relationship: { schemaName: "BisCore", className: "ElementOwnsChildElements" }, direction: RelationshipDirection.Backward, }], handleTargetClassPolymorphically: true, }], }], }], }; // __PUBLISH_EXTRACT_END__ printRuleset(ruleset); // Ensure that the custom property was created const content = (await Presentation.presentation.getContent({ imodel, rulesetOrId: ruleset, keys: new KeySet([{ className: "BisCore:Subject", id: "0x12" }]), descriptor: {}, }))!; expect(content.descriptor.fields).to.containSubset([{ label: "Subject", nestedFields: [{ label: "Model", }, { label: "Code", }, { label: "User Label", }, { label: "Description", }], }]); }); it("uses `relationshipMeaning` attribute", async () => { // __PUBLISH_EXTRACT_START__ Presentation.Content.Customization.RelatedPropertiesSpecification.RelationshipMeaning.Ruleset // There's a content rule for returning content of given `bis.PhysicalModel` instance. The produced content is customized to // additionally include properties of modeled element by following the `bis.ModelModelsElement` relationship. // Setting `relationshipMeaning` to `SameInstance` makes sure that all related properties are placed into a category // nested under the default category. const ruleset: Ruleset = { id: "example", rules: [{ ruleType: RuleTypes.Content, specifications: [{ specType: ContentSpecificationTypes.SelectedNodeInstances, relatedProperties: [{ propertiesSource: [{ relationship: { schemaName: "BisCore", className: "ModelModelsElement" }, direction: RelationshipDirection.Forward, targetClass: { schemaName: "BisCore", className: "PhysicalPartition" }, }], relationshipMeaning: RelationshipMeaning.SameInstance, }], }], }], }; // __PUBLISH_EXTRACT_END__ printRuleset(ruleset); // Ensure that all related properties are placed into a category nested under the default category const content = (await Presentation.presentation.getContent({ imodel, rulesetOrId: ruleset, keys: new KeySet([{ className: "BisCore:PhysicalModel", id: "0x1c" }]), descriptor: {}, }))!; const defaultCategory = content.descriptor.categories[0]; expect(content.descriptor.fields).to.containSubset([{ label: "Physical Partition", category: defaultCategory, nestedFields: [{ label: "Model", category: { parent: defaultCategory, }, }, { label: "Code", category: { parent: defaultCategory, }, }, { label: "User Label", category: { parent: defaultCategory, }, }, { label: "Description", category: { parent: defaultCategory, }, }], }]); }); it("uses `properties` attribute", async () => { // __PUBLISH_EXTRACT_START__ Presentation.Content.Customization.RelatedPropertiesSpecification.Properties.Ruleset // There's a content rule for returning content of given `bis.PhysicalModel` instance. The produced content is customized to // additionally include specific properties of modeled Element by following the `bis.ModelModelsElement` relationship. const ruleset: Ruleset = { id: "example", rules: [{ ruleType: RuleTypes.Content, specifications: [{ specType: ContentSpecificationTypes.SelectedNodeInstances, relatedProperties: [{ propertiesSource: [{ relationship: { schemaName: "BisCore", className: "ModelModelsElement" }, direction: RelationshipDirection.Forward, targetClass: { schemaName: "BisCore", className: "PhysicalPartition" }, }], properties: ["UserLabel", "Description"], }], }], }], }; // __PUBLISH_EXTRACT_END__ printRuleset(ruleset); // Ensure that the two related properties are picked up const content = (await Presentation.presentation.getContent({ imodel, rulesetOrId: ruleset, keys: new KeySet([{ className: "BisCore:PhysicalModel", id: "0x1c" }]), descriptor: {}, }))!; expect(content.descriptor.fields).to.containSubset([{ label: "Physical Partition", nestedFields: [{ label: "User Label", }, { label: "Description", }], }]); }); it("uses `autoExpand` attribute", async () => { // __PUBLISH_EXTRACT_START__ Presentation.Content.Customization.RelatedPropertiesSpecification.AutoExpand.Ruleset // There's a content rule for returning content of given `bis.Subject` instance. The produced content is customized to // additionally include all properties of child subjects by following the `bis.SubjectOwnsSubjects` relationship and that // the properties should be automatically expanded. const ruleset: Ruleset = { id: "example", rules: [{ ruleType: RuleTypes.Content, specifications: [{ specType: ContentSpecificationTypes.SelectedNodeInstances, relatedProperties: [{ propertiesSource: [{ relationship: { schemaName: "BisCore", className: "SubjectOwnsSubjects" }, direction: RelationshipDirection.Forward, }], autoExpand: true, }], }], }], }; // __PUBLISH_EXTRACT_END__ printRuleset(ruleset); // Ensure the field has `autoExpand` attribute set to `true` const content = (await Presentation.presentation.getContent({ imodel, rulesetOrId: ruleset, keys: new KeySet([{ className: "BisCore:Subject", id: "0x1" }]), descriptor: {}, }))!; expect(content.descriptor.fields).to.containSubset([{ label: "Subject", autoExpand: true, nestedFields: [{ label: "Model", }, { label: "Code", }, { label: "User Label", }, { label: "Description", }], }]); }); it("uses `skipIfDuplicate` attribute", async () => { // __PUBLISH_EXTRACT_START__ Presentation.Content.Customization.RelatedPropertiesSpecification.SkipIfDuplicate.Ruleset // There's a content rule for returning content of given `bis.PhysicalModel` instance. There are also two specifications // requesting to load related properties: // - the one specified through a content modifier requests all properties of the target class and has `skipIfDuplicate` flag. // - the one specified through the content specification requests only `UserLabel` property. // The specification at content specification level takes precedence and loads the `UserLabel` property. The other is completely // ignored due to `skipIfDuplicate` attribute being set to `true`. const ruleset: Ruleset = { id: "example", rules: [{ ruleType: RuleTypes.Content, specifications: [{ specType: ContentSpecificationTypes.SelectedNodeInstances, relatedProperties: [{ propertiesSource: [{ relationship: { schemaName: "BisCore", className: "ModelModelsElement" }, direction: RelationshipDirection.Forward, targetClass: { schemaName: "BisCore", className: "PhysicalPartition" }, }], properties: ["UserLabel"], }], }], }, { ruleType: RuleTypes.ContentModifier, class: { schemaName: "BisCore", className: "Model" }, relatedProperties: [{ propertiesSource: [{ relationship: { schemaName: "BisCore", className: "ModelModelsElement" }, direction: RelationshipDirection.Forward, targetClass: { schemaName: "BisCore", className: "PhysicalPartition" }, }], skipIfDuplicate: true, }], }], }; // __PUBLISH_EXTRACT_END__ printRuleset(ruleset); // Ensure only one related property is loaded const content = (await Presentation.presentation.getContent({ imodel, rulesetOrId: ruleset, keys: new KeySet([{ className: "BisCore:PhysicalModel", id: "0x1c" }]), descriptor: {}, }))!; expect(content.descriptor.fields).to.containSubset([{ label: "Physical Partition", nestedFields: (nestedFields: Field[]) => { return nestedFields.length === 1 && nestedFields[0].label === "User Label"; }, }]); }); it("uses `nestedRelatedProperties` attribute", async () => { // __PUBLISH_EXTRACT_START__ Presentation.Content.Customization.RelatedPropertiesSpecification.NestedRelatedProperties.Ruleset // There's a content rule for returning content of given `bis.PhysicalModel` instance. There's also a related properties // specification that loads modeled element properties and properties of `bis.LinkElement` related to the modeled element. const ruleset: Ruleset = { id: "example", rules: [{ ruleType: RuleTypes.Content, specifications: [{ specType: ContentSpecificationTypes.SelectedNodeInstances, relatedProperties: [{ propertiesSource: [{ relationship: { schemaName: "BisCore", className: "ModelModelsElement" }, direction: RelationshipDirection.Forward, targetClass: { schemaName: "BisCore", className: "PhysicalPartition" }, }], nestedRelatedProperties: [{ propertiesSource: [{ relationship: { schemaName: "BisCore", className: "ElementHasLinks" }, direction: RelationshipDirection.Forward, targetClass: { schemaName: "BisCore", className: "RepositoryLink" }, }], }], }], }], }], }; // __PUBLISH_EXTRACT_END__ printRuleset(ruleset); // Ensure properties of physical partition and repository link are loaded const content = (await Presentation.presentation.getContent({ imodel, rulesetOrId: ruleset, keys: new KeySet([{ className: "BisCore:PhysicalModel", id: "0x1c" }]), descriptor: {}, }))!; expect(content.descriptor.fields).to.containSubset([{ label: "Physical Partition", nestedFields: [{ label: "Repository Link", nestedFields: [{ label: "URL", }], }], }]); }); }); }); });
the_stack