text
stringlengths
2.5k
6.39M
kind
stringclasses
3 values
import { CycleResolutionInputChrono } from "../../src/chrono/CycleResolver.js" import { ProposedArgumentsOf, ProposedOrPrevious, ProposedValueOf } from "../../src/chrono/Effect.js" import { Identifier } from "../../src/chrono/Identifier.js" import { Quark } from "../../src/chrono/Quark.js" import { SyncEffectHandler, Transaction } from "../../src/chrono/Transaction.js" import { Base } from "../../src/class/Base.js" import { Formula, CycleDescription, CycleResolution, FormulaId, CalculateProposed } from "../../src/cycle_resolver/CycleResolver.js" import { CalculationIterator } from "../../src/primitives/Calculation.js" import { build_proposed, calculate, Entity, field } from "../../src/replica/Entity.js" import { Replica } from "../../src/replica/Replica.js" declare const StartTest : any //--------------------------------------------------------------------------------------------------------------------- const StartVar = Symbol('Start') const EndVar = Symbol('End') const DurationVar = Symbol('Duration') //--------------------------------------------------------------------------------------------------------------------- const startFormula = Formula.new({ output : StartVar, inputs : new Set([ DurationVar, EndVar ]) }) const endFormula = Formula.new({ output : EndVar, inputs : new Set([ DurationVar, StartVar ]) }) const durationFormula = Formula.new({ output : DurationVar, inputs : new Set([ StartVar, EndVar ]) }) //--------------------------------------------------------------------------------------------------------------------- const cycleDescription = CycleDescription.new({ variables : new Set([ StartVar, EndVar, DurationVar ]), formulas : new Set([ startFormula, endFormula, durationFormula ]) }) const cycleResolution = CycleResolution.new({ description : cycleDescription, defaultResolutionFormulas : new Set([ endFormula ]) }) //--------------------------------------------------------------------------------------------------------------------- enum Instruction { KeepDuration = 'KeepDuration', KeepStart = 'KeepStart', KeepEnd = 'KeepEnd' } //--------------------------------------------------------------------------------------------------------------------- class CycleDispatcher extends CycleResolutionInputChrono { addInstruction (instruction : Instruction) { if (instruction === Instruction.KeepStart) this.addKeepIfPossibleFlag(StartVar) if (instruction === Instruction.KeepEnd) this.addKeepIfPossibleFlag(EndVar) if (instruction === Instruction.KeepDuration) this.addKeepIfPossibleFlag(DurationVar) } } const isNotNumber = (value : any) : boolean => value !== Number(value) const dispatcherEq = (v1 : CycleDispatcher, v2 : CycleDispatcher) : boolean => { const resolution1 = v1.resolution const resolution2 = v2.resolution return resolution1.get(StartVar) === resolution2.get(StartVar) && resolution1.get(EndVar) === resolution2.get(EndVar) && resolution1.get(DurationVar) === resolution2.get(DurationVar) } const defaultDispatcher = CycleDispatcher.new({ context : cycleResolution }) defaultDispatcher.addPreviousValueFlag(StartVar) defaultDispatcher.addPreviousValueFlag(EndVar) defaultDispatcher.addPreviousValueFlag(DurationVar) class Event extends Entity.mix(Base) { @field() start : number @field() end : number @field() duration : number @field({ equality : dispatcherEq }) dispatcher : CycleDispatcher setStart : (value : number, instruction : Instruction) => any setEnd : (value : number, instruction : Instruction) => any setDuration : (value : number, instruction : Instruction) => any @calculate('start') calculateStart (Y) : number { const dispatch : CycleDispatcher = this.dispatcher const instruction : FormulaId = dispatch.resolution.get(StartVar) if (instruction === startFormula.formulaId) { const endValue : number = this.end const durationValue : number = this.duration if (isNotNumber(endValue) || isNotNumber(durationValue)) return null return endValue - durationValue } else if (instruction === CalculateProposed) { return Y(ProposedOrPrevious) } } @calculate('end') calculateEnd (Y) : number { const dispatch : CycleDispatcher = this.dispatcher const instruction : FormulaId = dispatch.resolution.get(EndVar) if (instruction === endFormula.formulaId) { const startValue : number = this.start const durationValue : number = this.duration if (isNotNumber(startValue) || isNotNumber(durationValue)) return null return startValue + durationValue } else if (instruction === CalculateProposed) { return Y(ProposedOrPrevious) } } @calculate('duration') calculateDuration (Y) : number { const dispatch : CycleDispatcher = this.dispatcher const instruction : FormulaId = dispatch.resolution.get(DurationVar) if (instruction === durationFormula.formulaId) { const startValue : number = this.start const endValue : number = this.end if (isNotNumber(startValue) || isNotNumber(endValue)) return null return endValue - startValue } else if (instruction === CalculateProposed) { return Y(ProposedOrPrevious) } } @build_proposed('dispatcher') buildProposedDispatcher (me : Identifier, quark : Quark, transaction : Transaction) : CycleDispatcher { return defaultDispatcher } @calculate('dispatcher') calculateDispatcher (Y : SyncEffectHandler) : CycleDispatcher { const proposedOrPrevious = Y(ProposedOrPrevious) const cycleDispatcher = CycleDispatcher.new({ context : cycleResolution }) cycleDispatcher.collectInfo(Y, this.$.start, StartVar) cycleDispatcher.collectInfo(Y, this.$.end, EndVar) cycleDispatcher.collectInfo(Y, this.$.duration, DurationVar) //--------------- const startProposedArgs = Y(ProposedArgumentsOf(this.$.start)) const startInstruction : Instruction = startProposedArgs ? startProposedArgs[ 0 ] : undefined if (startInstruction) cycleDispatcher.addInstruction(startInstruction) //--------------- const endProposedArgs = Y(ProposedArgumentsOf(this.$.end)) const endInstruction : Instruction = endProposedArgs ? endProposedArgs[ 0 ] : undefined if (endInstruction) cycleDispatcher.addInstruction(endInstruction) //--------------- const durationProposedArgs = Y(ProposedArgumentsOf(this.$.duration)) const durationInstruction : Instruction = durationProposedArgs ? durationProposedArgs[ 0 ] : undefined if (durationInstruction) cycleDispatcher.addInstruction(durationInstruction) return cycleDispatcher } } StartTest(t => { let replica : Replica let event : Event let var0 const read = () => [ event.start, event.end, event.duration ] t.beforeEach(t => { replica = Replica.new() event = Event.new() var0 = replica.variable(0) replica.addEntity(event) }) t.it('Should keep all-null state', async t => { replica.commit() t.isDeeply(read(), [ null, null, null ], 'Initial propagation is ok') }) t.it('Should keep partial data - start', async t => { event.start = 10 replica.commit() t.isDeeply(read(), [ 10, null, null ], 'Initial propagation is ok') }) t.it('Should keep partial data - end', async t => { event.end = 10 replica.commit() t.isDeeply(read(), [ null, 10, null ], 'Initial propagation is ok') }) t.it('Should keep partial data - duration', async t => { event.duration = 10 replica.commit() t.isDeeply(read(), [ null, null, 10 ], 'Initial propagation is ok') }) t.it('Should normalize end date', async t => { event.start = 10 event.duration = 5 replica.commit() t.isDeeply(read(), [ 10, 15, 5 ], 'Initial propagation is ok') }) t.it('Should normalize duration', async t => { event.start = 10 event.end = 15 replica.commit() t.isDeeply(read(), [ 10, 15, 5 ], 'Initial propagation is ok') }) t.it('Should normalize start and recalculate everything after', async t => { const spyDispatcher = t.spyOn(event.$.dispatcher, 'calculation') const spyStart = t.spyOn(event.$.start, 'calculation') const spyEnd = t.spyOn(event.$.end, 'calculation') const spyDuration = t.spyOn(event.$.duration, 'calculation') event.end = 15 event.duration = 5 replica.commit() t.isDeeply(read(), [ 10, 15, 5 ], 'Initial propagation is ok') // 1st time calculation is done during the propagate - 2nd during read t.expect(spyDispatcher).toHaveBeenCalled(1) t.expect(spyStart).toHaveBeenCalled(1) t.expect(spyEnd).toHaveBeenCalled(1) t.expect(spyDuration).toHaveBeenCalled(1) //---------------- // tslint:disable-next-line ;[ spyDispatcher, spyStart, spyEnd, spyDuration ].forEach(spy => spy.reset()) replica.write(var0, 1) replica.commit() // no calculations during the propagate, as those were already done during the read t.expect(spyDispatcher).toHaveBeenCalled(1) t.expect(spyStart).toHaveBeenCalled(1) t.expect(spyEnd).toHaveBeenCalled(1) t.expect(spyDuration).toHaveBeenCalled(1) }) t.it('Should normalize end date by default', async t => { event.start = 10 event.end = 18 event.duration = 5 replica.commit() t.isDeeply(read(), [ 10, 15, 5 ], 'Initial propagation is ok') }) t.it('Should not recalculate everything on 2nd propagation', async t => { const spy = t.spyOn(event.$.dispatcher, 'calculation') event.start = 10 event.end = 18 event.duration = 5 replica.commit() t.isDeeply(read(), [ 10, 15, 5 ], 'Initial propagation is ok') t.expect(spy).toHaveBeenCalled(1) //---------------- spy.reset() replica.write(var0, 1) replica.commit() t.expect(spy).toHaveBeenCalled(0) }) t.it('Should rebuild edges dynamically', async t => { event.start = 10 event.duration = 5 replica.commit() t.isDeeply(read(), [ 10, 15, 5 ], 'Initial propagation is ok') //----------------------- await event.setDuration(1, Instruction.KeepEnd) replica.commit() t.isDeeply(read(), [ 14, 15, 1 ], 'Edges rebuilt correctly') //----------------------- await event.setDuration(3, Instruction.KeepStart) replica.commit() t.isDeeply(read(), [ 14, 17, 3 ], 'Edges rebuilt correctly') //----------------------- await event.setStart(5, Instruction.KeepDuration) replica.commit() t.isDeeply(read(), [ 5, 8, 3 ], 'Edges rebuilt correctly') }) })
the_stack
'use strict'; import { existsSync, readFileSync } from 'fs'; import * as path from 'path'; import { PriorityQueue, Stack } from 'typescript-collections'; import { Range, TextEditor } from 'vscode'; import * as yamlMetadata from '../helper/yaml-metadata'; import * as common from './common'; import * as utilityHelper from './utility'; import jsyaml = require('js-yaml'); import lodash = require('lodash.merge'); import matcher = require('matcher'); export enum MetadataSourceContentType { MarkdownFile, DocFxFile, GlobalMetadataFx, FileMetadataFx, YamlContent } export const dateTimeFormat = 'MM/dd/yyyy'; export class EmptyYamlHeaderError extends Error { public constructor(message: string) { super(message); Object.setPrototypeOf(this, EmptyYamlHeaderError.prototype); } } export class MetadataContentBase { public MetadataType: MetadataSourceContentType; public OriginalData: string; public FileName: string; public RawMetadata: string; constructor( metadataType: MetadataSourceContentType, dataContent: string, fileName: string, rawMetadata: string ) { this.MetadataType = metadataType; this.OriginalData = dataContent; this.FileName = fileName; this.RawMetadata = rawMetadata; } public getYamlMetadataContent(): string { try { return this.getYamlMetadataContentInner(); } catch (err) { this.checkSyntaxError(err.toString()); throw new Error( 'Yaml header could not be parsed. If any free-form text spans multiple lines, please wrap the entire string in quotes.' ); } } public getRawMetadataContent(): string { try { return this.getRawMetadataContentInner(); } catch (err) { throw new Error( 'Yaml header could not be parsed. If any free-form text spans multiple lines, please wrap the entire string in quotes.' ); } } public getYamlMetadataContentInner(): string { return this.OriginalData; } public getRawMetadataContentInner(): string { return this.RawMetadata; } private translateSyntaxErrorMessage(errMsg: string): string { if (errMsg.indexOf('ms.date:') >= 0) { return 'ms.date format is incorrect. Change to ' + dateTimeFormat + ' and re-run validation.'; } else if (errMsg.indexOf('Malformed inline YAML string') >= 0) { return ( errMsg.replace('Malformed inline YAML string', 'Incorrect YAML syntax in string') + '. Please fix YAML syntax and re-run validation.' ); } return errMsg; } private checkSyntaxError(errMsg: string) { if (errMsg.indexOf('<ParseException> ') >= 0) { errMsg = errMsg.replace('<ParseException> ', ''); throw new Error(this.translateSyntaxErrorMessage(errMsg).toString()); } } } export class YamlMetadataContent extends MetadataContentBase { public MetadataType!: MetadataSourceContentType; public OriginalData!: string; public FileName!: string; public RawMetadata!: string; constructor(originalContent: string, fileName: string, rawMetadata: string = '') { super(MetadataSourceContentType.YamlContent, originalContent, fileName, rawMetadata); } public getYamlMetadataContentInner(): string { return this.OriginalData; } public getRawMetadataContentInner(): string { return this.RawMetadata; } } export class MarkdownFileMetadataContent extends MetadataContentBase { constructor(originalContent: string, fileName: string, rawMetadata: string = '') { super(MetadataSourceContentType.MarkdownFile, originalContent, fileName, rawMetadata); } public getYamlMetadataContentInner(): string { const re = /^(-{3}(?:\n|\r)([\w\W]+?)(?:\n|\r)-{3})?([\w\W]*)*/; const results = re.exec(utilityHelper.stripBOMFromString(this.OriginalData.toString()) || '{}'); if (results !== null) { const result = results[1]; const trimmed = common.rtrim(result.trim(), '---'); const parsed = jsyaml.load(trimmed); if (parsed === null) { // fix if yaml header is empty or contains only comments return ''; } return JSON.stringify(parsed); } return ''; } public getRawMetadataContentInner(): string { const re = /^(-{3}(?:\n|\r)([\w\W]+?)(?:\n|\r)-{3})?([\w\W]*)*/; const results = re.exec(this.OriginalData.toString()); if (results !== null) { const result = results[1]; if (result === undefined) { return ''; } return result; } return ' '; } } export class DocFxMetadataContent extends MetadataContentBase { private referenceTemplateFileName: string; constructor( originalContent: string, fileName: string, referenceTemplateFileName: string, rawMetadata: string = '' ) { super(MetadataSourceContentType.DocFxFile, originalContent, fileName, rawMetadata); this.referenceTemplateFileName = referenceTemplateFileName; } public Expand(): YamlMetadataContent[] { try { const result: yamlMetadata.YamlMetadataContent[] = []; const docfxParsed = JSON.parse( utilityHelper.stripBOMFromString(this.OriginalData.toString()) || '{}' ); if (docfxParsed === undefined || docfxParsed.build === undefined) { // log.debug("file '" + this.FileName + "' does not contain requested root element 'build'"); return result; } if (docfxParsed.build.fileMetadata !== undefined) { result.push( new FileFxMetadataContent( JSON.stringify(docfxParsed.build.fileMetadata), this.FileName, this.referenceTemplateFileName ) ); } if (docfxParsed.build.globalMetadata !== undefined) { result.push( new GlobalFxMetadataContent( JSON.stringify(docfxParsed.build.globalMetadata), this.FileName ) ); } return result; } catch (err) { throw new Error( "Yaml headers could not be parsed from file '" + this.FileName + "'. Original error :" + err.toString() ); } } } export class GlobalFxMetadataContent extends MetadataContentBase { constructor(originalContent: string, fileName: string, rawMetadata: string = '') { super(MetadataSourceContentType.GlobalMetadataFx, originalContent, fileName, rawMetadata); } public getYamlMetadataContentInner(): string { if (this.OriginalData === undefined || this.OriginalData === '') { return ''; } const parsed = JSON.parse( utilityHelper.stripBOMFromString(this.OriginalData.toString()) || '{}' ); return JSON.stringify(parsed); } } export class FileFxMetadataContent extends MetadataContentBase { private referenceTopicFileName: string; constructor( originalContent: string, fileName: string, referenceTemplateFileName: string, rawMetadata: string = '' ) { super(MetadataSourceContentType.FileMetadataFx, originalContent, fileName, rawMetadata); this.referenceTopicFileName = referenceTemplateFileName; } public getYamlMetadataContentInner(): string { // Parses file-base metadata passed from from the docfx.json file. If filepattern is specified it's taken only if // this.referenceTopicFileName is matched. const parsed = JSON.parse( utilityHelper.stripBOMFromString(this.OriginalData.toString()) || '{}' ); const returned: any = {}; for (const key in parsed) { if (parsed.hasOwnProperty(key)) { const atributeItem = parsed[key]; if (atributeItem instanceof Object) { for (const filePattern in atributeItem) { if (atributeItem.hasOwnProperty(filePattern)) { if (this.matchFilePattern(this.referenceTopicFileName.toString(), filePattern)) { returned[key] = atributeItem[filePattern]; } } } } else { if (atributeItem instanceof String) { returned[key] = parsed[key]; } } } } return JSON.stringify(returned); } private matchFilePattern(referenceFile: string, filePattern: string): boolean { const newSearchPattern = path.dirname(this.FileName).replace(/\\/g, '/') + '/' + filePattern; // normalizes path and replaces backslashes const filePath = referenceFile.replace(/\\/g, '/'); // normalizes path and replaces backslashes const isJS = matcher.isMatch(filePath, newSearchPattern); return isJS; } } /** * Returns docfx.json metadata representation if found in the specified directory. */ export function findDocFxMetadataForDir( dirname: string, referenceTopicFileName: string ): DocFxMetadataContent | undefined { const searchedMetadata = path.join(dirname, GetDocFxMetadataName()); if (existsSync(searchedMetadata)) { return new DocFxMetadataContent( readFileSync(searchedMetadata, 'utf8'), searchedMetadata, referenceTopicFileName ); } return; } /** * Merges 2 metadata content specified by the holder class. Higher priority overwrites lower priority metadata. * Return type is yaml metadata content only. */ function mergeYamlMetadata( higherPriorityMetadata: MetadataContentBase, lowerPriorityMetadata: MetadataContentBase ): YamlMetadataContent { if (higherPriorityMetadata === undefined) { throw new RangeError('higherPriorityMetadata must be defined.'); } if (lowerPriorityMetadata === undefined) { throw new RangeError('lowerPriorityMetadata must be defined.'); } const contentHi = higherPriorityMetadata.getYamlMetadataContent(); const contentLo = lowerPriorityMetadata.getYamlMetadataContent(); const contentRaw = higherPriorityMetadata.getRawMetadataContent(); let mergedContent: string; const newFileName = higherPriorityMetadata.FileName; if (contentHi === undefined || contentHi === '') { if (contentLo === undefined) { mergedContent = ''; } else { mergedContent = contentLo; } } else if (contentLo === undefined || contentLo === '') { if (contentHi === undefined) { mergedContent = ''; } else { mergedContent = contentHi; } } else { const yamlFrontHi = jsyaml.load(contentHi); const yamlFrontLo = jsyaml.load(contentLo); const mergedContentAny = lodash(yamlFrontLo, yamlFrontHi); mergedContent = JSON.stringify(mergedContentAny); if (mergedContent.trim() === '{}') { mergedContent = ''; } } return new YamlMetadataContent(mergedContent, newFileName, contentRaw); } /** * Merges all metadata in the passed priority queue into one. This merge does expansion if DocFxMetadataContent representing docfx.json is specified. * Highest priority item in the queue will be merged as highest priority metadata. */ export function mergeMetadata( priorityQueue: PriorityQueue<MetadataContentBase> ): MetadataContentBase { if (priorityQueue === undefined) { throw new RangeError('priorityStack must be defined.'); } const stack = new Stack<MetadataContentBase>(); if (priorityQueue.isEmpty()) { throw new RangeError("priorityQueue can't be empty."); } while (!priorityQueue.isEmpty()) { const item = priorityQueue.dequeue(); if (item !== undefined) { switch (item.MetadataType) { case MetadataSourceContentType.MarkdownFile: stack.push(item); break; case MetadataSourceContentType.GlobalMetadataFx: case MetadataSourceContentType.FileMetadataFx: case MetadataSourceContentType.YamlContent: stack.push(item); break; case MetadataSourceContentType.DocFxFile: const docfxContent = item as DocFxMetadataContent; const expandedList = docfxContent.Expand(); for (const expandedItem of expandedList) { stack.push(expandedItem); } break; default: throw new RangeError('switch value:' + item.MetadataType + ' is not implemented'); } } return mergeMetadataFromTop(stack); } return mergeMetadataFromTop(stack); } /** * Merges all metadata in the passed stack specified by content holder class. Merging is done from the top of the stack to the bottom. * Bottom item has highest priority. * Does not expand docfx.json content representation! */ function mergeMetadataFromTop(stack: Stack<MetadataContentBase>): MetadataContentBase { let currentMergedItem = stack.pop()!; while (!stack.isEmpty()) { currentMergedItem = mergeYamlMetadata(stack.pop()!, currentMergedItem); } return currentMergedItem; } function GetDocFxMetadataName(): string { return 'docfx.json'; } /** * Return true if cursor is within the YAML Header * @param */ export function isCursorInsideYamlHeader(editor: TextEditor) { const docText = editor.document.getText(); const secondDashPosition = docText.indexOf('---', 4); const range = new Range(0, 0, editor.selection.end.line, editor.selection.end.character); const cursorText = editor.document.getText(range); const isInHeader = cursorText.length < secondDashPosition; return isInHeader; }
the_stack
import { allHtmlAttribs } from "html-all-known-attributes"; import { isAttrNameChar } from "is-char-suitable-for-html-attr-name"; import { left, right } from "string-left-right"; import { matchRight } from "string-match-left-right"; import { ensureXIsNotPresentBeforeOneOfY, xBeforeYOnTheRight, plausibleAttrStartsAtX, guaranteedAttrStartsAtX, findAttrNameCharsChunkOnTheLeft, makeTheQuoteOpposite, } from "./util"; import { version as v } from "../package.json"; const version: string = v; function isAttrClosing( str: string, idxOfAttrOpening: number, isThisClosingIdx: number ): boolean { if ( typeof str !== "string" || !str.trim() || !Number.isInteger(idxOfAttrOpening) || !Number.isInteger(isThisClosingIdx) || !str[idxOfAttrOpening] || !str[isThisClosingIdx] || idxOfAttrOpening >= isThisClosingIdx ) { console.log( `031 ${`\u001b[${31}m${`WRONG INPUTS, RETURN FALSE`}\u001b[${39}m`}` ); return false; } const openingQuote = `'"`.includes(str[idxOfAttrOpening]) ? str[idxOfAttrOpening] : null; let oppositeToOpeningQuote = null; if (openingQuote) { oppositeToOpeningQuote = makeTheQuoteOpposite(openingQuote); } console.log( `044 ${`\u001b[${33}m${`openingQuote`}\u001b[${39}m`}: ${`\u001b[${35}m${openingQuote}\u001b[${39}m`} ${`\u001b[${33}m${`oppositeToOpeningQuote`}\u001b[${39}m`}: ${`\u001b[${35}m${oppositeToOpeningQuote}\u001b[${39}m`}` ); let chunkStartsAt; const quotesCount = new Map().set(`'`, 0).set(`"`, 0).set(`matchedPairs`, 0); let lastQuoteAt: number | null = null; let totalQuotesCount = 0; let lastQuoteWasMatched = false; let lastMatchedQuotesPairsStartIsAt: undefined | number; let lastMatchedQuotesPairsEndIsAt: undefined | number; // when suspected attribute name chunks end, we wipe them, but here // we store the last extracted chunk - then later, for example, when we // traverse further and meet opening quote (even with equal missing), // we can evaluate that chunk, was it a known attribute name (idea being, // known attribute name followed by quote is probably legit attribute starting) let lastCapturedChunk; let secondLastCapturedChunk; // this boolean flag signifies, was the last chunk captured after passing // "isThisClosingIdx": // idea being, if you pass suspected quotes, then encounter new-ones and // in-between does not resemble an attribute name, it's falsey result: // <img alt="so-called "artists"!' class='yo'/> // ^ ^ // start suspected // // that exclamation mark above doesn't resemble an attribute name, // so single quote that follows it is not a starting of its value let lastChunkWasCapturedAfterSuspectedClosing = false; // does what it says on the tin - flips on the first instance let closingBracketMet = false; let openingBracketMet = false; // let's traverse from opening to the end of the string, then in happy // path scenarios, let's exit way earlier, upon closing quote for (let i = idxOfAttrOpening, len = str.length; i < len; i++) { // // // // // // // // // // // // THE TOP // ███████ // // // // // // // // // // const rightVal = right(str, i) as number; const leftVal = left(str, i) as number; // // Logging: // ------------------------------------------------------------------------- console.log( `\u001b[${36}m${`===============================`}\u001b[${39}m \u001b[${35}m${`str[ ${i} ] = ${ str[i] && str[i].trim() ? str[i] : JSON.stringify(str[i], null, 4) }`}\u001b[${39}m \u001b[${36}m${`===============================`}\u001b[${39}m\n` ); console.log( i === isThisClosingIdx ? ` ██ isThisClosingIdx met at ${i} ██` : "" ); if ( // Imagine we're here: // <z bbb"c" ddd'e> // ^ ^ // start suspected closing // // this single quote at 13 is preceded by fully matched pair of quotes // there's also attribute-name-like chunk preceding in front. // Let's catch such case. // // 1. we're on a quote `'"`.includes(str[i]) && // 2. we ensure that a pair of quotes was catched so far lastQuoteWasMatched && // 3. lastMatchedQuotesPairsStartIsAt is our known opening lastMatchedQuotesPairsStartIsAt === idxOfAttrOpening && // 4. lastMatchedQuotesPairsEndIsAt is the last matched pair's closing: // <z bbb"c" ddd'e> // ^ // this if to reuse the example.. // lastMatchedQuotesPairsEndIsAt !== undefined && lastMatchedQuotesPairsEndIsAt < i && // rule must not trigger before the suspected quote index i >= isThisClosingIdx ) { console.log(`151 ███████████████████████████████████████`); console.log( `153 ${`\u001b[${33}m${`plausibleAttrStartsAtX(str, ${ i + 1 })`}\u001b[${39}m`} = ${JSON.stringify( plausibleAttrStartsAtX(str, i + 1), null, 4 )}` ); console.log( `163 FIY, ${`\u001b[${33}m${`lastCapturedChunk`}\u001b[${39}m`} = ${JSON.stringify( lastCapturedChunk, null, 4 )}; ${`\u001b[${33}m${`secondLastCapturedChunk`}\u001b[${39}m`} = ${JSON.stringify( secondLastCapturedChunk, null, 4 )}` ); // ███████████████████████████████████████ E1 // // consider WHERE WE ARE AT THE MOMENT in relation to // the INDEX THAT'S QUESTIONED FOR BEING A CLOSING-ONE // FALSEY result: // <z bbb"c" ddd'e'>.<z fff"g"> // ^ ^ // start suspected // // <z bbb"c" ddd'e'>.<z fff"g"> // ^ // we're here // TRUTHY result: // <img class="so-called "alt"!' border='10'/> // ^ ^ // start suspected // // where we're at: // <img class="so-called "alt"!' border='10'/> // ^ // const E1 = i !== isThisClosingIdx || guaranteedAttrStartsAtX(str, right(str, isThisClosingIdx) as number) || `/>`.includes(str[rightVal]); // ███████████████████████████████████████ E2 // // // ensure it's not a triplet of quotes: // <img alt="so-called "artists"!' class='yo'/> // ^ ^ ^ // start suspected | // current index // const E2 = !( i > isThisClosingIdx && str[idxOfAttrOpening] === str[isThisClosingIdx] && str[idxOfAttrOpening] === str[i] && // rule out cases where plausible attribute starts: // <img class="so-called "alt"!' border='10'/> // ^ ^ ^ // start | \ // suspected end currently on plausibleAttrStartsAtX(str, i + 1) ); // ███████████████████████████████████████ E3 const E31 = // or a proper recognised attribute follows: // <img alt="so-called "artists"class='yo'/> // ^ ^ // start suspected and currently on // // we're on a suspected quote i === isThisClosingIdx && // plus one because we're on a quote plausibleAttrStartsAtX(str, isThisClosingIdx + 1); const E32 = // or the last chunk is a known attribute name: // <img class="so-called "alt"!' border='10'/> // ^ ^ // start suspected/we're currently on // chunkStartsAt && chunkStartsAt < i && allHtmlAttribs.has(str.slice(chunkStartsAt, i).trim()); // imagine: // <z bbb"c" ddd"e'> // ^ ^ // / \ // start suspected // // <z bbb"c" ddd"e'> // ^ // currently on // E23, recognised attribute name is very weighty argument; however // in light of unrecognised attributes, we might still try to salvage // some, as long as they resemble valid attribute names. We just // validate each character and drop in more rules into the bag, // like requiring whitespace to be in front and opening/closing to match // there's a whitespace in front of last chunk ("ddd" in example above) let plausibleAttrName; if (chunkStartsAt) { plausibleAttrName = str.slice(chunkStartsAt, i).trim(); } console.log( `269 ${`\u001b[${33}m${`plausibleAttrName`}\u001b[${39}m`} = ${JSON.stringify( plausibleAttrName, null, 4 )}` ); const E33 = chunkStartsAt && chunkStartsAt < i && str[chunkStartsAt - 1] && !str[chunkStartsAt - 1].trim() && // and whole chunk is a plausible attribute name Array.from(str.slice(chunkStartsAt, i).trim()).every((char) => isAttrNameChar(char) ) && // known opening and suspected closing are both singles or doubles str[idxOfAttrOpening] === str[isThisClosingIdx] && !`/>`.includes(str[rightVal]) && ensureXIsNotPresentBeforeOneOfY(str, i + 1, "=", [`'`, `"`]); // anti-rule - it's fine if we're on suspected ending and to the left // it's not an attribute start // <img alt='Deal is your's!"/> // ^ ^ // start suspected/current // extract attr name characters chunk on the left, "s" in the case below // <img alt='Deal is your's"/> // ^ // start let attrNameCharsChunkOnTheLeft; if (i === isThisClosingIdx) { attrNameCharsChunkOnTheLeft = findAttrNameCharsChunkOnTheLeft(str, i); } console.log( `305 CALCULATED ${`\u001b[${33}m${`attrNameCharsChunkOnTheLeft`}\u001b[${39}m`} = ${JSON.stringify( attrNameCharsChunkOnTheLeft, null, 4 )}` ); const E34 = // we're on suspected i === isThisClosingIdx && // it's not a character suitable for attr name, (!isAttrNameChar(str[leftVal]) || // or it is, but whatever we extracted is not recognised attr name (attrNameCharsChunkOnTheLeft && !allHtmlAttribs.has(attrNameCharsChunkOnTheLeft))) && // rule out equal str[leftVal] !== "="; // ███████████████████████████████████████ E4 const E41 = // either it's a tag ending and we're at the suspected quote `/>`.includes(str[rightVal]) && i === isThisClosingIdx; const E42 = // or next character is suitable for a tag name: isAttrNameChar(str[rightVal]); const E43 = // or in case of: // <img class="so-called "alt"!' border='10'/> // ^ ^ // start suspected // // where we're at: // <img class="so-called "alt"!' border='10'/> // ^ // here lastQuoteWasMatched && i !== isThisClosingIdx; const E5 = // it's not a double-wrapped attribute value: // // <div style="float:"left"">z</div> // ^ ^ // start suspected // // we're at: // <div style="float:"left"">z</div> // ^ // here !( // rule must not trigger before the suspected quote index ( i >= isThisClosingIdx && // there's colon to the left of a suspected quote str[left(str, isThisClosingIdx) as number] === ":" ) ); console.log(`365 RES:`); console.log(`E1: ${`\u001b[${E1 ? 32 : 31}m${E1}\u001b[${39}m`}`); console.log(`E2: ${`\u001b[${E2 ? 32 : 31}m${E2}\u001b[${39}m`}`); console.log( `E3: ${`\u001b[${E31 ? 32 : 31}m${E31}\u001b[${39}m`} || ${`\u001b[${ E32 ? 32 : 31 }m${E32}\u001b[${39}m`} || ${`\u001b[${ E33 ? 32 : 31 }m${E33}\u001b[${39}m`} || ${`\u001b[${ E34 ? 32 : 31 }m${E34}\u001b[${39}m`} ==> ${`\u001b[${ E31 || E32 || E33 || E34 ? 32 : 31 }m${E31 || E32 || E33 || E34}\u001b[${39}m`}` ); console.log( `E4: ${`\u001b[${E41 ? 32 : 31}m${E41}\u001b[${39}m`} || ${`\u001b[${ E42 ? 32 : 31 }m${E42}\u001b[${39}m`} || ${`\u001b[${ E43 ? 32 : 31 }m${E43}\u001b[${39}m`} ==> ${`\u001b[${E41 || E42 || E43 ? 32 : 31}m${ E41 || E42 || E43 }\u001b[${39}m`}` ); console.log(`E5: ${`\u001b[${E5 ? 32 : 31}m${E5}\u001b[${39}m`}`); return !!( E1 && E2 && (E31 || E32 || E33 || E34) && (E41 || E42 || E43) && E5 ); } // catch quotes if (`'"`.includes(str[i])) { // catch the non-overlapping matched pairs of quotes // for example that's three pairs in total below: // <z bbb"c" ddd'e'>.<z fff"g"> // Insurace against the Killer Triplet - a quoted quote if ( (str[i] === `'` && str[i - 1] === `"` && str[i + 1] === `"`) || (str[i] === `"` && str[i - 1] === `'` && str[i + 1] === `'`) ) { console.log( `411 killer triplet detected - ${`\u001b[${31}m${`CONTINUE`}\u001b[${39}m`}` ); continue; } console.log( `417 FIY, ${`\u001b[${33}m${`lastQuoteAt`}\u001b[${39}m`} = ${JSON.stringify( lastQuoteAt, null, 4 )}; ${`\u001b[${33}m${`str[lastQuoteAt]`}\u001b[${39}m`}: ${ str[lastQuoteAt as any] }` ); if (lastQuoteAt && str[i] === str[lastQuoteAt]) { console.log(`426 quotes matching`); quotesCount.set("matchedPairs", quotesCount.get("matchedPairs") + 1); lastMatchedQuotesPairsStartIsAt = lastQuoteAt; lastMatchedQuotesPairsEndIsAt = i; lastQuoteAt = null; lastQuoteWasMatched = true; console.log( `434 ${`\u001b[${32}m${`SET`}\u001b[${39}m`} ${`\u001b[${33}m${`lastQuoteWasMatched`}\u001b[${39}m`} = ${JSON.stringify( lastQuoteWasMatched, null, 4 )}` ); } else { console.log(`441 quotes not matching`); lastQuoteWasMatched = false; console.log( `444 ${`\u001b[${32}m${`SET`}\u001b[${39}m`} ${`\u001b[${33}m${`lastQuoteWasMatched`}\u001b[${39}m`} = ${JSON.stringify( lastQuoteWasMatched, null, 4 )}` ); } // bump total counts: quotesCount.set(str[i], quotesCount.get(str[i]) + 1); totalQuotesCount = quotesCount.get(`"`) + quotesCount.get(`'`); // lastQuoteAt = i; // console.log( // `325 ${`\u001b[${32}m${`SET`}\u001b[${39}m`} ${`\u001b[${33}m${`lastQuoteAt`}\u001b[${39}m`} = ${JSON.stringify( // lastQuoteAt, // null, // 4 // )}` // ); } // catch closing brackets if (str[i] === ">" && !closingBracketMet) { closingBracketMet = true; console.log( `471 ${`\u001b[${32}m${`SET`}\u001b[${39}m`} ${`\u001b[${33}m${`closingBracketMet`}\u001b[${39}m`} = ${JSON.stringify( closingBracketMet, null, 4 )}` ); // if all pairs of quotes were met, that's a good indicator, imagine // <z bbb"c" ddd'e'> // ^ if ( totalQuotesCount && quotesCount.get(`matchedPairs`) && totalQuotesCount === quotesCount.get(`matchedPairs`) * 2 && // we haven't reached the suspected quote and tag's already ending i < isThisClosingIdx ) { console.log( `489 all quotes matched so far and it looks like tag ending` ); console.log(`491 RETURN false`); return false; } } // catch opening brackets if ( str[i] === "<" && // consider ERB templating tags, <%= zzz %> str[rightVal] !== "%" && closingBracketMet && !openingBracketMet ) { openingBracketMet = true; console.log( `506 ${`\u001b[${32}m${`SET`}\u001b[${39}m`} ${`\u001b[${33}m${`openingBracketMet`}\u001b[${39}m`} = ${JSON.stringify( openingBracketMet, null, 4 )}` ); // if it's past the "isThisClosingIdx", that's very falsey // if (i > isThisClosingIdx) { console.log( `516 new tag starts - ${`\u001b[${31}m${`RETURN FALSE`}\u001b[${39}m`}` ); return false; // } } // // // // // // // // // // // MIDDLE // ██████ // // // // // // // // // // // // before and after the suspected index, all the way while traversing the // string from known, starting quotes (or in their absence, starting of // the attribute's value, the second input argument "idxOfAttrOpening") // all the way until the end, we catch the first character past the // questioned attribute closing. // imagine // <img alt="so-called "artists"!' class='yo'/> // ^ ^ // opening suspected closing if (str[i].trim() && !chunkStartsAt) { console.log(`558 inside the attr name START catching clauses`); // <img alt="so-called "artists"!' class='yo'/> // ^ // we land here, on excl. mark if (isAttrNameChar(str[i])) { console.log( `564 ${`\u001b[${32}m${`██ new attribute name starts`}\u001b[${39}m`}` ); chunkStartsAt = i; console.log( `568 ${`\u001b[${32}m${`SET`}\u001b[${39}m`} ${`\u001b[${33}m${`chunkStartsAt`}\u001b[${39}m`} = ${JSON.stringify( chunkStartsAt, null, 4 )}` ); } } else if (chunkStartsAt && !isAttrNameChar(str[i])) { console.log(`576 inside the attr name END catching clauses`); // ending of an attr name chunk console.log( `580 ${`\u001b[${32}m${`EXTRACTED`}\u001b[${39}m`}: "${str.slice( chunkStartsAt, i )}"` ); secondLastCapturedChunk = lastCapturedChunk; lastCapturedChunk = str.slice(chunkStartsAt, i); console.log( `588 ${`\u001b[${32}m${`SET`}\u001b[${39}m`} ${`\u001b[${33}m${`lastCapturedChunk`}\u001b[${39}m`} = ${JSON.stringify( lastCapturedChunk, null, 4 )}; ${`\u001b[${33}m${`secondLastCapturedChunk`}\u001b[${39}m`} = ${JSON.stringify( secondLastCapturedChunk, null, 4 )}` ); lastChunkWasCapturedAfterSuspectedClosing = chunkStartsAt >= isThisClosingIdx; // console.log( // `434 ${`\u001b[${31}m${`RESET`}\u001b[${39}m`} ${`\u001b[${33}m${`chunkStartsAt`}\u001b[${39}m`}` // ); // chunkStartsAt = null; // imagine: // <z bbb"c' href"e> // ^ ^ // start suspected ending // // we're here: // <z bbb"c' href"e> // ^ if ( `'"`.includes(str[i]) && quotesCount.get(`matchedPairs`) === 0 && totalQuotesCount === 3 && str[idxOfAttrOpening] === str[i] && allHtmlAttribs.has(lastCapturedChunk) && !`'"`.includes(str[rightVal]) ) { console.log( `624 ${`\u001b[${32}m${`RETURN`}\u001b[${39}m`} ${ i > isThisClosingIdx }` ); console.log( `629 FIY, ${`\u001b[${33}m${`lastQuoteAt`}\u001b[${39}m`} = ${JSON.stringify( lastQuoteAt, null, 4 )}` ); console.log( `SPLIT: ${JSON.stringify( str .slice((lastQuoteAt as number) + 1, i) .trim() .split(/\s+/), null, 4 )}` ); const A1 = i > isThisClosingIdx; // // ensure that all continuous chunks since the last quote are // recognised attribute names const A21 = !lastQuoteAt; const A22 = (lastQuoteAt as number) + 1 >= i; const A23 = str .slice((lastQuoteAt as number) + 1, i) .trim() .split(/\s+/) .every((chunk) => allHtmlAttribs.has(chunk)); // <div style="float:'left"">z</div> // ^ ^ // start we're here const A3 = !lastCapturedChunk || !secondLastCapturedChunk || !secondLastCapturedChunk.endsWith(":"); const B1 = i === isThisClosingIdx; const B21 = totalQuotesCount < 3; const B22 = !!lastQuoteWasMatched; const B23 = !lastQuoteAt; const B24 = (lastQuoteAt as number) + 1 >= i; const B25 = !str .slice((lastQuoteAt as number) + 1, i) .trim() .split(/\s+/) .every((chunk) => allHtmlAttribs.has(chunk)); console.log(`678:`); console.log( `(A1=${`\u001b[${ A1 ? 32 : 31 }m${A1}\u001b[${39}m`} && (A21=${`\u001b[${ A21 ? 32 : 31 }m${A21}\u001b[${39}m`} || A22=${`\u001b[${ A22 ? 32 : 31 }m${A22}\u001b[${39}m`} || A23=${`\u001b[${ A23 ? 32 : 31 }m${A23}\u001b[${39}m`}) && A3=${`\u001b[${ A3 ? 32 : 31 }m${A3}\u001b[${39}m`}) ==> ${`\u001b[${ A1 && (A21 || A22 || A23) && A3 ? 32 : 31 }m${A1 && (A21 || A22 || A23) && A3}\u001b[${39}m`}` ); console.log(`OR`); console.log( `(B1=${`\u001b[${ B1 ? 32 : 31 }m${B1}\u001b[${39}m`} && (B21=${`\u001b[${ B21 ? 32 : 31 }m${B21}\u001b[${39}m`} || B22=${`\u001b[${ B22 ? 32 : 31 }m${B22}\u001b[${39}m`} || B23=${`\u001b[${ B23 ? 32 : 31 }m${B23}\u001b[${39}m`} || B24=${`\u001b[${ B24 ? 32 : 31 }m${B24}\u001b[${39}m`} || B25=${`\u001b[${ B25 ? 32 : 31 }m${B25}\u001b[${39}m`})) ==> ${`\u001b[${ B1 && (B21 || B22 || B23 || B24 || B25) ? 32 : 31 }m${B1 && (B21 || B22 || B23 || B24 || B25)}\u001b[${39}m`}` ); return ( (A1 && (A21 || A22 || A23) && A3) || (B1 && (B21 || B22 || B23 || B24 || B25)) ); } if ( // this is a recognised attribute lastCapturedChunk && allHtmlAttribs.has(lastCapturedChunk) && lastMatchedQuotesPairsStartIsAt === idxOfAttrOpening && lastMatchedQuotesPairsEndIsAt === isThisClosingIdx ) { console.log(`725 pattern: matched pair + attribute name after`); console.log(`726 ${`\u001b[${32}m${`RETURN`}\u001b[${39}m`} true`); return true; } } // catching new attributes that follow after suspected quote. // Imagine // <a class "c" id 'e' href "www"> // ^ ^ // known start at 16 suspected ending at 29 console.log( `737 ${ i > isThisClosingIdx + 1 ? `FIY, the trim [${isThisClosingIdx + 1}, ${i}]: "${str .slice(isThisClosingIdx + 1, i) .trim()}"` : "z" }` ); if ( // if we're currently on some quote: `'"`.includes(str[i]) && // and if either quote count is an even number (the "!" checking is it zero) (!(quotesCount.get(`"`) % 2) || !(quotesCount.get(`'`) % 2)) && // and sum of quotes is odd, for example, // <a class "c" id 'e' href "www"> // ^ // reusing example above, let's say we're here // // in this situation, both single quotes around "e" add up to 2, then // current opening quote of "www" adds up to 3. // // In human language, this means, we check, was there a complete // set of quotes recorded by now, plus is current chunk a known // attribute name - this allows us to catch an attribute with equal missing (quotesCount.get(`"`) + quotesCount.get(`'`)) % 2 && // // last chunk is not falsey (thus a string): ((lastCapturedChunk && // and finally, perf resource-taxing evaluation, is it recognised: allHtmlAttribs.has(lastCapturedChunk)) || // imagine // <z bbb"c" ddd'e'> // ^ // a suspected closing // // alternatively, check the count of remaining quotes, ensure that // leading up to closing bracket, everything's neat (not overlapping // at least and opened and closed) // this catch is for the following attributes, for example, // <z bbb"c" ddd'e'> // ^ ^ // start suspected ending (i > isThisClosingIdx + 1 && allHtmlAttribs.has(str.slice(isThisClosingIdx + 1, i).trim()))) && // // the same quote doesn't follow on the right, // think <div style="float:"left"">z</div> // ^ ^ // start suspected closing !(str[i + 1] === str[i] && str[i] === str[idxOfAttrOpening]) && // // // and it's not this case: // // <div style="float:'left'">z</div> // ^ ^ // start suspected // // we're here: // <div style="float:'left'">z</div> // ^ // here !( // we're part the suspected closing, on another closing ( i > isThisClosingIdx + 1 && // colon is to the left of suspected str[left(str, isThisClosingIdx) as number] === ":" ) ) && // // the suspected quote is the fourth, // <div style="float:'left'">z</div> // ^ ^ // start suspected // // we want to exclude the quote on the left: // <div style="float:'left'">z</div> // ^ // this // // in which case, we'd have: // lastCapturedChunk = "left" // secondLastCapturedChunk = "float:" !( lastCapturedChunk && secondLastCapturedChunk && secondLastCapturedChunk.trim().endsWith(":") ) ) { console.log(`828 FIY, doubles count: ${quotesCount.get(`"`)}`); console.log(`829 FIY, singles count: ${quotesCount.get(`'`)}`); console.log( `831 FIY, lastCapturedChunk: ${JSON.stringify( lastCapturedChunk, null, 0 )}; secondLastCapturedChunk: ${JSON.stringify( secondLastCapturedChunk, null, 0 )}` ); console.log( `842 FIY, ${`\u001b[${33}m${`chunkStartsAt`}\u001b[${39}m`} = ${JSON.stringify( chunkStartsAt, null, 4 )}:` ); console.log(`:`); // rules: // before suspected index this pattern is falsey, after - truthy const R0 = i > isThisClosingIdx; // const R1 = !!openingQuote; const R2 = str[idxOfAttrOpening] !== str[isThisClosingIdx]; const R3 = allHtmlAttribs.has( str.slice(idxOfAttrOpening + 1, isThisClosingIdx).trim() ); // that quote we suspected as closing, is from an opening-closing // set on another attribute: const R4 = !xBeforeYOnTheRight( str, i + 1, str[isThisClosingIdx], makeTheQuoteOpposite(str[isThisClosingIdx]) ); // const R5 = plausibleAttrStartsAtX(str, start) console.log(`R0: ${`\u001b[${R0 ? 32 : 31}m${R0}\u001b[${39}m`}`); console.log(`&&`); // consider: // <z alt"href' www'/> // ^ ^ // start suspected ending // let's rule out the case where a whole (suspected) attribute's value is // a known attribute value, plus quotes mismatch plus that closing quote // is on the right, before the its opposite kind console.log(`!(`); console.log(`R1: ${`\u001b[${R1 ? 32 : 31}m${R1}\u001b[${39}m`}`); console.log(`R2: ${`\u001b[${R2 ? 32 : 31}m${R2}\u001b[${39}m`}`); console.log(`R3: ${`\u001b[${R3 ? 32 : 31}m${R3}\u001b[${39}m`}`); console.log(`R4: ${`\u001b[${R4 ? 32 : 31}m${R4}\u001b[${39}m`}`); console.log( `) ==> ${`\u001b[${!(R1 && R2 && R3 && R4) ? 32 : 31}m${!( R1 && R2 && R3 && R4 )}\u001b[${39}m`}` ); console.log(`898 ${`\u001b[${32}m${`RETURN`}\u001b[${39}m`}`); return R0 && !(R1 && R2 && R3 && R4); } if ( // imagine // <a href=www" class=e'> // ^ ^ // start suspected // if it's equal following attribute name (str[i] === "=" || // OR // it's whitespace (!str[i].length && // and next non-whitespace character is "equal" character str[rightVal] === "=")) && // last chunk is not falsey (thus a string) lastCapturedChunk && // and finally, perf resource-taxing evaluation, is it recognised: allHtmlAttribs.has(lastCapturedChunk) ) { // definitely that's new attribute starting const W1 = i > isThisClosingIdx; const W2 = // insurance against: // <z alt"href' www' id=z"/> // ^ ^ // start suspected ending // // <z alt"href' www' id=z"/> // ^ // we're here currently !( !( // // first, rule out healthy code scenarios, // <a href="zzz" target="_blank" style="color: black;"> // ^ ^ ^ // / | \ // start suspected we're here ( (lastQuoteWasMatched && lastMatchedQuotesPairsStartIsAt === idxOfAttrOpening && lastMatchedQuotesPairsEndIsAt === isThisClosingIdx) || // or quotes can be mismatching, but last chunk's start should // match a confirmed attribute regex (with matching quotes and // equal present) guaranteedAttrStartsAtX(str, chunkStartsAt as number) ) ) && // // continuing with catch clauses of the insurance case: lastQuoteWasMatched && lastMatchedQuotesPairsStartIsAt !== undefined && lastMatchedQuotesPairsStartIsAt <= isThisClosingIdx ); console.log(`956 new attr starting`); console.log( `W1=${`\u001b[${W1 ? 32 : 31}m${W1}\u001b[${39}m`} && W2=${`\u001b[${ W2 ? 32 : 31 }m${W2}\u001b[${39}m`} ===> ${`\u001b[${W1 && W2 ? 32 : 31}m${`RETURN ${ W1 && W2 }`}\u001b[${39}m`}` ); return W1 && W2; } // when index "isThisClosingIdx" has been passed... if (i > isThisClosingIdx) { console.log(`970 i > isThisClosingIdx`); // if current quote matches the opening if (openingQuote && str[i] === openingQuote) { console.log( `975 a true opening quote matched beyond the suspected-one - ${`\u001b[${32}m${`RETURN`}\u001b[${39}m`}` ); console.log( `978 ! ${`\u001b[${33}m${`lastCapturedChunk`}\u001b[${39}m`} = ${JSON.stringify( lastCapturedChunk, null, 4 )}` ); console.log( `985 ! ${`\u001b[${33}m${`lastQuoteAt`}\u001b[${39}m`} = ${JSON.stringify( lastQuoteAt, null, 4 )}` ); console.log( `992 ! ${`\u001b[${33}m${`isThisClosingIdx`}\u001b[${39}m`} = ${JSON.stringify( isThisClosingIdx, null, 4 )}` ); // we want to return false as default... // except if we're able to extract a clean recognised attribute name // in front of here and prove that it's actually a new attribute starting // here, then it's true // imagine // <img alt="somethin' fishy going on' class">z<a class="y"> // ^ ^ ^ // start suspected we're here const Y1 = !!lastQuoteAt; const Y2 = lastQuoteAt === isThisClosingIdx; // ensure there's some content between suspected and "here": const Y3 = (lastQuoteAt as number) + 1 < i && str.slice((lastQuoteAt as number) + 1, i).trim(); const Y4 = str .slice((lastQuoteAt as number) + 1, i) .trim() .split(/\s+/) .every((chunk) => allHtmlAttribs.has(chunk)); const Y5 = i >= isThisClosingIdx; const Y6 = !str[rightVal] || !`'"`.includes(str[rightVal]); console.log(`Y1: ${`\u001b[${Y1 ? 32 : 31}m${Y1}\u001b[${39}m`}`); console.log(`Y2: ${`\u001b[${Y2 ? 32 : 31}m${Y2}\u001b[${39}m`}`); console.log(`Y3: ${`\u001b[${Y3 ? 32 : 31}m${Y3}\u001b[${39}m`}`); console.log(`Y4: ${`\u001b[${Y4 ? 32 : 31}m${Y4}\u001b[${39}m`}`); console.log(`Y5: ${`\u001b[${Y5 ? 32 : 31}m${Y5}\u001b[${39}m`}`); console.log(`Y6: ${`\u001b[${Y6 ? 32 : 31}m${Y6}\u001b[${39}m`}`); console.log( `1033 ${`\u001b[${ Y1 && Y2 && Y3 && Y4 && Y5 && Y6 ? 32 : 31 }m${`RETURN`}\u001b[${39}m`} Y1 && Y2 && Y3 && Y4 && Y5 && Y6 ===> ${`\u001b[${ Y1 && Y2 && Y3 && Y4 && Y5 && Y6 ? 32 : 31 }m${Y1 && Y2 && Y3 && Y4 && Y5 && Y6}\u001b[${39}m`}` ); return !!(Y1 && Y2 && Y3 && Y4 && Y5 && Y6); } // if we have passed the suspected closing quote // and we meet another quote of the same kind, // it's false result. Imagine code: // <img alt='so-called "artists"!" class='yo'/> // ^ ^ // questioned | // index we're here // so it's false if ( // if attribute starts with a quote openingQuote && // and we're suspecting a mismatching pair: str[isThisClosingIdx] === oppositeToOpeningQuote && // we're questioning, maybe current // suspected closing quote is of the // opposite kind (single-double, double-single) str[i] === oppositeToOpeningQuote ) { console.log( `1062 another quote same as suspected was met - ${`\u001b[${31}m${`RETURN FALSE`}\u001b[${39}m`}` ); return false; } // if the tag closing was met, that's fine, imagine: // <div class='c">.</div> // ^ // we went past this suspected closing quote // and reached the tag ending... if (str[i] === "/" || str[i] === ">" || str[i] === "<") { console.log(`1071 ${`\u001b[${32}m${`RETURN`}\u001b[${39}m`}`); // happy path scenario const R0 = // opening matches closing str[idxOfAttrOpening] === str[isThisClosingIdx] && // last captured quote was the suspected ("isThisClosingIdx") lastQuoteAt === isThisClosingIdx && // all is clean inside - there are no quotes of the ones used in // opening/closing (there can be opposite type quotes though) !str .slice(idxOfAttrOpening + 1, isThisClosingIdx) .includes(str[idxOfAttrOpening]); // Not more than one pair of non-overlapping quotes should have been matched. const R11 = quotesCount.get(`matchedPairs`) < 2; // at least it's not a recognised attribute name on the left: const attrNameCharsChunkOnTheLeft = findAttrNameCharsChunkOnTheLeft( str, i ); const R12 = (!attrNameCharsChunkOnTheLeft || !allHtmlAttribs.has(attrNameCharsChunkOnTheLeft)) && // avoid cases where multiple pairs of mismatching quotes were matched // we're past suspected closing: (!( ( i > isThisClosingIdx && // and there were some single quotes recorded so far quotesCount.get(`'`) && // and doubles too quotesCount.get(`"`) && // and there were few quote pairs matched quotesCount.get(`matchedPairs`) > 1 ) // in which case, // too much fun is going on, like in: // <z bbb"c" ddd'e'>.<z fff"g"> // | ^^ // start | \ // suspected currently on ) || // but add escape latch for when tag closing follows: // <img alt='so-called "artists"!"/> // ^ ^^ // start suspected currently we're on slash `/>`.includes(str[rightVal])); const R2 = totalQuotesCount < 3 || // there's only two quotes mismatching: quotesCount.get(`"`) + quotesCount.get(`'`) - quotesCount.get(`matchedPairs`) * 2 !== 2; const R31 = !lastQuoteWasMatched || (lastQuoteWasMatched && !( lastMatchedQuotesPairsStartIsAt !== undefined && Array.from( str .slice(idxOfAttrOpening + 1, lastMatchedQuotesPairsStartIsAt) .trim() ).every((char) => isAttrNameChar(char)) && allHtmlAttribs.has( str .slice(idxOfAttrOpening + 1, lastMatchedQuotesPairsStartIsAt) .trim() ) )); const R32 = !rightVal && totalQuotesCount % 2 === 0; const R33 = str[idxOfAttrOpening - 2] && str[idxOfAttrOpening - 1] === "=" && isAttrNameChar(str[idxOfAttrOpening - 2]); const R34 = !ensureXIsNotPresentBeforeOneOfY(str, i + 1, "<", [ `='`, `="`, ]); console.log(`1158:`); console.log(" "); console.log( `R0 (happy path): ${`\u001b[${R0 ? 32 : 31}m${R0}\u001b[${39}m`}` ); console.log(" "); console.log(`OR ALL OF THE FOLLOWING`); console.log(" "); console.log("("); console.log(` R11: ${`\u001b[${R11 ? 32 : 31}m${R11}\u001b[${39}m`}`); console.log(` R12: ${`\u001b[${R12 ? 32 : 31}m${R12}\u001b[${39}m`}`); console.log( `) ==> ${`\u001b[${R11 || R12 ? 32 : 31}m${R11 || R12}\u001b[${39}m`}` ); console.log(" "); console.log(`AND`); console.log(" "); console.log(`R2: ${`\u001b[${R2 ? 32 : 31}m${R2}\u001b[${39}m`}`); console.log(" "); console.log(`AND`); console.log(" "); console.log("("); console.log(` R31: ${`\u001b[${R31 ? 32 : 31}m${R31}\u001b[${39}m`}`); console.log(` R32: ${`\u001b[${R32 ? 32 : 31}m${R32}\u001b[${39}m`}`); console.log(` R33: ${`\u001b[${R33 ? 32 : 31}m${R33}\u001b[${39}m`}`); console.log(` R34: ${`\u001b[${R34 ? 32 : 31}m${R34}\u001b[${39}m`}`); console.log( `) ==> ${`\u001b[${R31 || R32 || R33 || R34 ? 32 : 31}m${ R31 || R32 || R33 || R34 }\u001b[${39}m`}` ); return ( // happy path - known opening matched suspected closing and // that suspected closing was the last captured quote ("lastQuoteAt") // R0 || // The matched pair count total has not reach or exceed two // // because we're talking about fully matched opening-closing quote // pairs. // // Let me remind you the question algorithm is answering: // Is quote at index y closing quote, considering opening is at x? // // Now, imagine we went past index y, reached index z, and up to // this point two sets of quotes were caught, as in: // <z bbb"c" ddd"e"> // ^ ^ // start we're here, quote in question // // above, that's falsey result, it can't be fourth caught quote! ((R11 || R12) && // besides that, // We need to account for mismatching quote pair. If a pair is // mismatching, "matchedPairs" might not get bumped to two thus // leading to a mistake. // When pair is mismatching, we can tell it's so because total count // minus matched count times two would be equal to two - two // quotes left unmatched. // Mind you, it's not more because algorithm would exit by the time // we would reach 4 let's say... // either there's not more than one pair: R2 && // also, protection against cases like: // <z bbb"c" ddd'e> // ^ ^ // start suspected // // in case above, all the clauses up until now pass // // we need to check against "lastQuoteWasMatched" flag // // // or last pair was matched: (R31 || // either this closing bracket is the last: R32 || // or char before starting is equal and char before that // satisfies attribute name requirements R33 || // or it seems like it's outside rather inside a tag: R34)) ); } // if the true attribute ending was met passing // past the suspected one, this means that // suspected one was a false guess. Correct ending // is at this index "i" if ( str[i] === "=" && matchRight(str, i, [`'`, `"`], { trimBeforeMatching: true, trimCharsBeforeMatching: ["="], }) ) { console.log( `1260 new attribute starts - ${`\u001b[${32}m${`RETURN TRUE`}\u001b[${39}m`}` ); return true; } } else { console.log(`1265 i <= isThisClosingIdx`); // this clause is meant to catch the suspected quotes // which don't belong to the tag, it's where quotes // in question are way beyond the actual attribute's ending. // For example, consider // <div class="c' id="x'>.</div> // ^ ^ // | | // known suspected // opening closing // // That equal-quote after "id" would trigger the alarm, // that is the clause below.. // BUT mind the false positive: // <img src="xyz" alt="="/> // ^ ^ // | | // known opening/ \suspected closing // // by the way we use right() to jump over whitespace // for example, this will also catch: // <img src="xyz" alt="= "/> // console.log(`1290 *`); let firstNonWhitespaceCharOnTheLeft; if (str[i - 1] && str[i - 1].trim() && str[i - 1] !== "=") { // happy path firstNonWhitespaceCharOnTheLeft = i - 1; console.log(`1295 happy path`); console.log( `1297 ${`\u001b[${32}m${`SET`}\u001b[${39}m`} ${`\u001b[${33}m${`firstNonWhitespaceCharOnTheLeft`}\u001b[${39}m`} = ${JSON.stringify( firstNonWhitespaceCharOnTheLeft, null, 4 )}` ); } else { console.log(`1304 traverse backwards`); for (let y = i; y--; ) { console.log( `1307 ${`\u001b[${33}m${`str[${y}]`}\u001b[${39}m`} = ${JSON.stringify( str[y], null, 4 )}` ); if (str[y].trim() && str[y] !== "=") { firstNonWhitespaceCharOnTheLeft = y; console.log( `1316 ${`\u001b[${32}m${`SET`}\u001b[${39}m`} ${`\u001b[${33}m${`firstNonWhitespaceCharOnTheLeft`}\u001b[${39}m`} = ${JSON.stringify( firstNonWhitespaceCharOnTheLeft, null, 4 )}; BREAK` ); break; } } } if ( str[i] === "=" && matchRight(str, i, [`'`, `"`], { // ensure it's not tag ending on the right // before freaking out: cb: (char) => !`/>`.includes(char as string), trimBeforeMatching: true, trimCharsBeforeMatching: ["="], }) && // ensure it's a character suitable for attribute // name on the left of equal (if it's a real // attribute name its name characters must pass // the isAttrNameChar()...) isAttrNameChar(str[firstNonWhitespaceCharOnTheLeft as number]) && // ensure it's not // <img src="https://z.com/r.png?a=" /> // ^ // here // // in which case it's a false positive!!! !str.slice(idxOfAttrOpening + 1).startsWith("http") && !str.slice(idxOfAttrOpening + 1, i).includes("/") && !str.endsWith("src=", idxOfAttrOpening) && !str.endsWith("href=", idxOfAttrOpening) ) { console.log( `1353 new attribute starts - ${`\u001b[${31}m${`RETURN FALSE`}\u001b[${39}m`}` ); return false; } console.log(`1358 new attr didn't start`); if (i === isThisClosingIdx && guaranteedAttrStartsAtX(str, i + 1)) { console.log(`1361 another attribute starts on the right!`); return true; } // also some insurance for crazier patterns like: // <z alt"href" www'/> // ^ | ^ // start | suspected // | // currently on // // catch this pattern where initial equal to the left of start is missing // and this pattern implies equals will be missing further console.log( `1375 ${`\u001b[${35}m${`██`}\u001b[${39}m`} ${`\u001b[${35}m${`FIY`}\u001b[${39}m`}, ${`\u001b[${33}m${`lastCapturedChunk`}\u001b[${39}m`} = ${JSON.stringify( lastCapturedChunk, null, 4 )}; ${`\u001b[${33}m${`secondLastCapturedChunk`}\u001b[${39}m`} = ${JSON.stringify( secondLastCapturedChunk, null, 4 )}; ${`\u001b[${33}m${`firstNonWhitespaceCharOnTheLeft`}\u001b[${39}m`} = ${JSON.stringify( firstNonWhitespaceCharOnTheLeft, null, 4 )}; ${`\u001b[${33}m${`str[firstNonWhitespaceCharOnTheLeft]`}\u001b[${39}m`} = ${JSON.stringify( str[firstNonWhitespaceCharOnTheLeft as any], null, 4 )}` ); if ( i < isThisClosingIdx && `'"`.includes(str[i]) && lastCapturedChunk && str[left(str, idxOfAttrOpening) as number] && str[left(str, idxOfAttrOpening) as number] !== "=" && lastMatchedQuotesPairsStartIsAt === idxOfAttrOpening && allHtmlAttribs.has(lastCapturedChunk) ) { console.log( `1404 freak out clause, it seems an attribute started in between start and suspected` ); return false; } // catch // <div style="float:"left'">z</div> // ^ ^ // start we're here, and also it's suspected too // if ( i === isThisClosingIdx && `'"`.includes(str[i]) && (str[leftVal] === `'` || str[leftVal] === `"`) && lastCapturedChunk && secondLastCapturedChunk && totalQuotesCount % 2 === 0 && secondLastCapturedChunk.endsWith(":") ) { console.log( `1424 it's ending of an attribute with a double-wrapped value - RETURN ${`\u001b[${32}m${`true`}\u001b[${39}m`}` ); return true; } // catch mismatching pairs: case when definitely a tag end follows, // <td style='font-family:'AbCd-Ef', 'AbCd', Ab, cd-ef;"> // ^ ^ // opening suspected closing if ( i === isThisClosingIdx && `'"`.includes(str[i]) && str.slice(idxOfAttrOpening, isThisClosingIdx).includes(":") && (str[rightVal] === ">" || (str[rightVal] === "/" && str[right(str, rightVal) as number] === ">")) ) { console.log( `1442 tag ending to the right, RETURN ${`\u001b[${32}m${`true`}\u001b[${39}m`}` ); return true; } } // // // // // // // // // // BOTTOM // ██████ // // // // // // // // // // // catch quotes again - these clauses are specifically at the bottom // because they're depdendent on "lastCapturedChunk" which is calculated // after quote catching at the top if ( `'"`.includes(str[i]) && // if these quotes are after "isThisClosingIdx", a suspected closing i > isThisClosingIdx ) { // if these quotes are after "isThisClosingIdx", if there // was no chunk recorded after it until now, // ("lastChunkWasCapturedAfterSuspectedClosing" flag) // or there was but it's not recognised, that's falsey result console.log( `1483 FIY, ${`\u001b[${33}m${`lastCapturedChunk`}\u001b[${39}m`} = ${JSON.stringify( lastCapturedChunk, null, 4 )}; lastChunkWasCapturedAfterSuspectedClosing=${lastChunkWasCapturedAfterSuspectedClosing}` ); if ( // if there was no chunk recorded after it until now !lastChunkWasCapturedAfterSuspectedClosing || !lastCapturedChunk || // or there was but lastCapturedChunk is not recognised !allHtmlAttribs.has(lastCapturedChunk) ) { console.log( `1498 the slice "${str.slice( isThisClosingIdx, i )}" does not contain a new attribute name, ${`\u001b[${31}m${`RETURN FALSE`}\u001b[${39}m`}` ); return false; } // ELSE - it does match, so it seems legit console.log(`1506 ${`\u001b[${32}m${`RETURN TRUE`}\u001b[${39}m`}`); return true; } // at the bottom, PART II of catch quotes if (`'"`.includes(str[i])) { lastQuoteAt = i; console.log( `1514 ${`\u001b[${32}m${`SET`}\u001b[${39}m`} ${`\u001b[${33}m${`lastQuoteAt`}\u001b[${39}m`} = ${JSON.stringify( lastQuoteAt, null, 4 )}` ); } // at the bottom, PART II of reset chunk if (chunkStartsAt && !isAttrNameChar(str[i])) { console.log( `1525 ${`\u001b[${31}m${`RESET`}\u001b[${39}m`} ${`\u001b[${33}m${`chunkStartsAt`}\u001b[${39}m`}` ); chunkStartsAt = null; } // logging // ----------------------------------------------------------------------------- console.log( `${`\u001b[${90}m${`██ chunkStartsAt: ${chunkStartsAt}`}\u001b[${39}m`}` ); console.log( `${`\u001b[${90}m${`██ lastCapturedChunk: ${lastCapturedChunk}`}\u001b[${39}m`}; ${`\u001b[${90}m${`██ secondLastCapturedChunk: ${secondLastCapturedChunk}`}\u001b[${39}m`}` ); console.log( `${`\u001b[${90}m${`██ lastChunkWasCapturedAfterSuspectedClosing: ${lastChunkWasCapturedAfterSuspectedClosing}`}\u001b[${39}m`}` ); console.log( `${`\u001b[${90}m${`██ quotesCount: singles - ${quotesCount.get( `'` )}; doubles - ${quotesCount.get( `"` )}; matchedPairs - ${quotesCount.get(`matchedPairs`)}`}\u001b[${39}m`}` ); console.log( `${`\u001b[${90}m${`██ totalQuotesCount - ${totalQuotesCount}; lastQuoteWasMatched - ${lastQuoteWasMatched}`}\u001b[${39}m`}` ); console.log( `${`\u001b[${90}m${`██ lastMatchedQuotesPairsStartIsAt - ${lastMatchedQuotesPairsStartIsAt}; lastMatchedQuotesPairsEndIsAt - ${lastMatchedQuotesPairsEndIsAt}`}\u001b[${39}m`}` ); } // if this point was reached and loop didn't exit... // default is false console.log(`1558 ${`\u001b[${31}m${`RETURN DEFAULT FALSE`}\u001b[${39}m`}`); return false; } export { isAttrClosing, version };
the_stack
import { Component, Input, Inject, HostListener, OnInit } from '@angular/core'; import { Router } from '@angular/router'; import { WebSite } from './site'; import { OrderBy, SortPipe } from 'common/sort.pipe'; import { Range } from 'common/virtual-list.component'; import { Status } from 'common/status'; import { resolveWebsiteRoute, resolveAppPoolRoute } from 'webserver/webserver-routing.module'; import { WebSitesService } from './websites.service'; import { NotificationService } from 'notification/notification.service'; import { ListOperationDef, ListOperationContext } from 'common/list'; enum WebSiteOp { browse = 0, start, stop, edit, delete, } const WebSiteOperations: ListOperationDef<WebSiteOp>[] = [ new ListOperationDef<WebSiteOp>(WebSiteOp.browse, "Browse", "browse"), new ListOperationDef<WebSiteOp>(WebSiteOp.start, "Start", "start"), new ListOperationDef<WebSiteOp>(WebSiteOp.stop, "Stop", "stop"), new ListOperationDef<WebSiteOp>(WebSiteOp.edit, "Edit", "edit"), new ListOperationDef<WebSiteOp>(WebSiteOp.delete, "Delete", "delete"), ] const actionRestrictions: Map<WebSiteOp, Status> = new Map<WebSiteOp, Status>([ [WebSiteOp.start, Status.Stopped], [WebSiteOp.stop, Status.Started], ]); enum WebSiteFields { path = 1, status = 2, appPool = 4, } @Component({ selector: 'website-item', template: ` <div class="row grid-item border-color" [class.selected-for-edit]="selected" (click)="onItemSelected($event)" (keydown.space)="onItemSelected($event)" (dblclick)="onEnter($event)" (keydown.enter)="onEnter($event)"> <div class='col-xs-7 col-sm-4 col-md-3 col-lg-3'> <div class='name'> <a tabindex="0" class="focusable color-normal hover-color-active" (click)="onEnter($event)">{{model.name}}</a> <small class='physical-path' *ngIf="field(${WebSiteFields.path})">{{model.physical_path}}</small> </div> </div> <div class='col-xs-3 col-sm-2 col-md-1 valign' *ngIf="field(${WebSiteFields.status})"> <span class='status' [ngClass]="model.status">{{model.status}}</span> <span title="HTTPS is ON" class="visible-xs-inline https" *ngIf="hasHttps()"></span> </div> <div class='col-lg-2 visible-lg valign' *ngIf="field(${WebSiteFields.appPool})"> <div *ngIf="model.application_pool"> <a [routerLink]="appPoolRoute" (keydown.enter)="$event.stopPropagation()"> <span [ngClass]="model.application_pool.status">{{model.application_pool.name}} <span *ngIf="model.application_pool.status != 'started'">({{model.application_pool.status}})</span> </span> </a> </div> </div> <div class=' hidden-xs col-xs-4 col-xs-push-1 col-sm-3 col-md-3 valign overflow-visible'> <navigator [model]="model.bindings" [right]="true"></navigator> </div> </div>`, styles: [` .name { font-size: 16px; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; } .https:after { font-family: FontAwesome; content: "\\f023"; padding-left: 5px; } a { background: transparent; display: inline; } .name small { font-size: 12px; } .row { margin: 0; } `] }) export class WebSiteItem extends ListOperationContext<WebSiteOp> implements OnInit { @Input() model: WebSite; @Input() fields: WebSiteFields; private siteUrl: string; constructor( private router: Router, private notifications: NotificationService, @Inject("WebSitesService") private service: WebSitesService, ) { super(); } @HostListener('keydown', ['$event']) handleKeyboardEvent(event: KeyboardEvent) { // Disable AccessibilityManager's keyboardEvent handler due to this issue: // https://github.com/microsoft/IIS.WebManager/issues/360 // Capture enter key if (event.keyCode === 13) { event.stopPropagation(); } } ngOnInit(): void { this.siteUrl = this.service.getDefaultUrl(this.model); } started() { return this.model.status == Status.Started; } field(f: WebSiteFields): boolean { return (this.fields & f) != 0; } get appPoolRoute() { return [resolveAppPoolRoute(this.model.application_pool.id)]; } getTitle(op: ListOperationDef<WebSiteOp>): string { if (op.id == WebSiteOp.browse) { return this.siteUrl || `${super.getTitle(op)} (unavailable)`; } return super.getTitle(op); } isDisabled(op: ListOperationDef<WebSiteOp>) { let restriction = actionRestrictions.get(op.id); if (restriction && this.model.status != restriction) { return true; } if (op.id == WebSiteOp.browse && !this.siteUrl) { return true; } return null; } execute(op: ListOperationDef<WebSiteOp>): Promise<any> { switch (op.id) { case WebSiteOp.browse: return Promise.resolve(window.open(this.siteUrl, '_blank')); case WebSiteOp.start: return this.service.start(this.model); case WebSiteOp.stop: return this.notifications.confirmAsync( "Stop Web Site", `Are you sure you want to stop "${this.model.name}"?`, () => this.service.stop(this.model), ); case WebSiteOp.edit: return this.router.navigate([resolveWebsiteRoute(this.model.id)]); case WebSiteOp.delete: return this.notifications.confirmAsync( "Delete Web Site", `Are you sure you want to delete "${this.model.name}"?`, () => this.service.delete(this.model), ); } } edit() { return this.router.navigate([resolveWebsiteRoute(this.model.id)]); } hasHttps(): boolean { for (var i = 0; i < this.model.bindings.length; ++i) { if (this.model.bindings[i].is_https) { return true; } } return false; } } export enum Perspective { WebServer = 0, AppPool, } const perspectives = [ // Index 0 => WebServer WebSiteFields.path | WebSiteFields.status | WebSiteFields.appPool, // Index 1 => AppPool WebSiteFields.path | WebSiteFields.status, ]; @Component({ selector: 'website-list', template: ` <list-operations-bar *ngIf="!canAdd" [operations]="operations" [context]="selected"></list-operations-bar> <list-operations-bar *ngIf="canAdd" [operations]="operations" [context]="selected"> <selector class="container-fluid list-operation-addon-view" #newWebSite> <new-website *ngIf="newWebSite.opened" (created)="newWebSite.close()" (cancel)="newWebSite.close()"> </new-website> </selector> <button class="list-operation-addon-left add list-action-button" [class.background-active]="newWebSite.opened" (click)="newWebSite.toggle()" title="Create">Create</button> </list-operations-bar> <div class="container-fluid"> <div class="hidden-xs border-active grid-list-header row" [hidden]="model.length == 0"> <label class="col-xs-8 col-sm-4 col-md-3 col-lg-3" [ngClass]="_orderBy.css('name')" (click)="doSort('name')" (keyup.enter)="doSort('name')" (keyup.space)="doSort('name')" tabindex="0" [attr.aria-sort]="_orderBy.ariaSort('name')" role="columnheader">Name</label> <label class="col-xs-3 col-md-1 col-lg-1" [ngClass]="_orderBy.css('status')" (click)="doSort('status')" (keyup.space)="doSort('status')" (keyup.enter)="doSort('status')" tabindex="0" [attr.aria-sort]="_orderBy.ariaSort('status')" role="columnheader">Status</label> <label class="col-lg-2 visible-lg" *ngIf="hasField(${WebSiteFields.appPool})" [ngClass]="_orderBy.css('application_pool.name')" (click)="doSort('application_pool.name')" (keyup.enter)="doSort('application_pool.name')" (keyup.space)="doSort('application_pool.name')" tabindex="0" [attr.aria-sort]="_orderBy.ariaSort('application_pool.name')" role="columnheader">Application Pool</label> </div> <virtual-list class="grid-list" *ngIf="model" [count]="model.length" [loaded]="this.model" emptyText="No website found" (rangeChange)="onRangeChange($event)"> <li class="hover-editing" tabindex="-1" *ngFor="let s of _view"> <website-item [model]="s" [fields]="fields" (onSelected)="onItemSelected($event)"></website-item> </li> </virtual-list> </div> `, styles: [` .container-fluid, .row { margin: 0; padding: 0; } `] }) export class WebSiteList implements OnInit { @Input() model: Array<WebSite>; @Input() perspective: Perspective = Perspective.WebServer; fields: WebSiteFields; private _selected: WebSiteItem; private _orderBy: OrderBy = new OrderBy(); private _sortPipe: SortPipe = new SortPipe(); private _range: Range = new Range(0, 0); private _view: Array<WebSite> = []; constructor( @Inject("WebSitesService") private service: WebSitesService, ){} public ngOnInit() { this.onRangeChange(this._range); this.fields = perspectives[this.perspective]; } get canAdd() { return this.perspective == Perspective.WebServer && this.service.installStatus != Status.Stopped; } get operations() { return WebSiteOperations; } get selected() { return this._selected; } onItemSelected(item: WebSiteItem) { if (this._selected) { this._selected.selected = false; } this._selected = item; } hasField(f: WebSiteFields): boolean { return (this.fields & f) != 0; } onRangeChange(range: Range) { Range.fillView(this._view, this.model, range); this._range = range; } doSort(field: string) { this._orderBy.sort(field); this._sortPipe.transform(this.model, this._orderBy.Field, this._orderBy.Asc, null, true); this.onRangeChange(this._range); } }
the_stack
import { expect } from 'chai'; import 'mocha'; export default function test(tldts: any): void { describe('from https://github.com/rushmorem/publicsuffix/blob/master/src/tests.rs', () => { // Copyright (c) 2016 Rushmore Mushambi it('should allow parsing IDN email addresses', () => { expect(tldts.parse('Pelé@example.com')).to.deep.include({ domain: 'example.com', hostname: 'example.com', publicSuffix: 'com', }); expect(tldts.parse('δοκιμή@παράδειγμα.δοκιμή')).to.deep.include({ domain: 'παράδειγμα.δοκιμή', hostname: 'παράδειγμα.δοκιμή', publicSuffix: 'δοκιμή', }); expect(tldts.parse('我買@屋企.香港')).to.deep.include({ domain: '屋企.香港', hostname: '屋企.香港', publicSuffix: '香港', }); expect(tldts.parse('甲斐@黒川.日本')).to.deep.include({ domain: '黒川.日本', hostname: '黒川.日本', publicSuffix: '日本', }); expect(tldts.parse('чебурашка@ящик-с-апельсинами.рф')).to.deep.include({ domain: 'ящик-с-апельсинами.рф', hostname: 'ящик-с-апельсинами.рф', publicSuffix: 'рф', }); expect(tldts.parse('用户@例子.广告')).to.deep.include({ domain: '例子.广告', hostname: '例子.广告', publicSuffix: '广告', }); }); }); describe('#getDomain', () => { it('should allow disabling parsing/validation of hostnames', () => { expect( tldts.getDomain('foo.com', { extractHostname: false, }), ).to.equal('foo.com'); }); describe('supports reserved keywords', () => { [ 'abstract', 'arguments', 'await', 'boolean', 'break', 'byte', 'case', 'catch', 'char', 'class', 'const', 'continue', 'debugger', 'default', 'delete', 'do', 'double', 'else', 'enum', 'eval', 'export', 'extends', 'false', 'final', 'finally', 'float', 'for', 'function', 'goto', 'if', 'implements', 'import', 'in', 'instanceof', 'int', 'interface', 'let', 'long', 'native', 'new', 'null', 'package', 'private', 'protected', 'public', 'return', 'short', 'static', 'super', 'switch', 'synchronized', 'this', 'throw', 'throws', 'transient', 'true', 'try', 'typeof', 'var', 'void', 'volatile', 'while', 'with', 'yield', ].forEach(keyword => { it(keyword, () => { expect(tldts.getDomain(`https://${keyword}.com`)).to.equal( `${keyword}.com`, ); expect(tldts.getDomain(`https://foo.${keyword}.com`)).to.equal( `${keyword}.com`, ); expect(tldts.getDomain(`https://foo.${keyword}`)).to.equal( `foo.${keyword}`, ); }); }); }); it('handle IPs', () => { expect(tldts.getDomain('1.2.3.4')).to.equal(null); expect(tldts.getHostname('1.2.3.4')).to.equal('1.2.3.4'); }); it('handle weird urls', () => { expect(tldts.getDomain(' ftp:/mapasamazonsa.com.ve./ ')).to.equal( 'mapasamazonsa.com.ve', ); expect(tldts.getDomain(' ftp://///mapasamazonsa.com.ve./ ')).to.equal( 'mapasamazonsa.com.ve', ); expect(tldts.getDomain(' ftp://///mapasamazonsa.com.ve/ ')).to.equal( 'mapasamazonsa.com.ve', ); expect(tldts.getDomain('ftp://///mapasamazonsa.com.ve/')).to.equal( 'mapasamazonsa.com.ve', ); // From https://github.com/peerigon/parse-domain/issues/49 expect(tldts.getDomain('ftp://mapasamazonsa.com.ve/')).to.equal( 'mapasamazonsa.com.ve', ); expect( tldts.getDomain('http://y399.3466633.be:4/235222/399.html'), ).to.equal('3466633.be'); expect(tldts.getDomain('this%20file%20was%')).to.equal(null); expect( tldts.getDomain( 'wss://ie14.zopim.com/s/W/ws/zPYsGUAnWMyJ1XOL/c/1537265376519', ), ).to.equal('zopim.com'); expect(tldts.getDomain('wss://mp.sparkchess.com/ ')).to.equal( 'sparkchess.com', ); expect( tldts.getDomain( 'wss://119.92.223.221.prod.hosts.ooklaserver.net:8080/ws', ), ).to.equal('ooklaserver.net'); expect( tldts.getDomain( 'wss://gscspeedtest1.dctechmicro.com.prod.hosts.ooklaserver.net:8080/ws', ), ).to.equal('ooklaserver.net'); expect( tldts.getDomain('ws://lhg2-speedtest.globe.com.ph:8080/ws'), ).to.equal('globe.com.ph'); expect( tldts.getDomain('wss://s-usc1c-nss-218.firebaseio.com/.ws'), ).to.equal('firebaseio.com'); expect(tldts.getDomain('http://server.dr.pt./')).to.equal('dr.pt'); }); it('should return the expected domain from a simple string', () => { expect(tldts.getDomain('google.com')).to.equal('google.com'); expect(tldts.getDomain('t.co')).to.equal('t.co'); expect(tldts.getDomain(' GOOGLE.COM ')).to.equal('google.com'); expect(tldts.getDomain(' t.CO ')).to.equal('t.co'); }); it('should return the relevant domain of a two levels domain', () => { expect(tldts.getDomain('google.co.uk')).to.equal('google.co.uk'); }); it('should return the relevant domain from a subdomain string', () => { expect(tldts.getDomain('fr.google.com')).to.equal('google.com'); expect(tldts.getDomain('foo.google.co.uk')).to.equal('google.co.uk'); expect(tldts.getDomain('fr.t.co')).to.equal('t.co'); }); it('should handle domains with lots of subdomains', () => { expect(tldts.getDomain('a.f.g.h.i.bar.baz.google.com')).to.equal( 'google.com', ); expect(tldts.getDomain('foo.bar.baz.fr.t.co')).to.equal('t.co'); expect(tldts.getDomain('sub.sub2.foo.bar.baz.fr.t.co')).to.equal('t.co'); }); it('should not break on specific RegExp characters', () => { expect(() => { // @see https://github.com/oncletom/tld.js/issues/33 tldts.getDomain('www.weir)domain.com'); }).not.to.throw(); expect(() => { // @see https://github.com/oncletom/tld.js/issues/53 tldts.getDomain( "http://('4drsteve.com', [], ['54.213.246.177'])/xmlrpc.php", ); }).not.to.throw(); expect(() => { // @see https://github.com/oncletom/tld.js/issues/53 tldts.getDomain("('4drsteve.com', [], ['54.213.246.177'])"); }).not.to.throw(); }); // @see https://github.com/oncletom/tld.js/issues/53 it('should correctly extract domain from paths including "@" in the path', () => { const domain = tldts.getDomain( 'http://cdn.jsdelivr.net/g/jquery@1.8.2,jquery.waypoints@2.0.2,qtip2@2.2.1,typeahead.js@0.9.3,sisyphus@0.1,jquery.slick@1.3.15,fastclick@1.0.3', ); expect(domain).to.equal('jsdelivr.net'); }); it('should provide consistent results', () => { expect(tldts.getDomain('www.bl.uk')).to.equal('bl.uk'); expect(tldts.getDomain('www.majestic12.co.uk')).to.equal( 'majestic12.co.uk', ); }); // @see https://github.com/oncletom/tld.js/issues/25 // @see https://github.com/oncletom/tld.js/issues/30 it('existing rule constraint', () => { expect(tldts.getDomain('s3.amazonaws.com')).to.equal('amazonaws.com'); expect( tldts.getDomain('s3.amazonaws.com', { allowPrivateDomains: true }), ).to.equal(null); expect( tldts.getDomain('blogspot.co.uk', { allowPrivateDomains: true }), ).to.equal(null); expect(tldts.getDomain('blogspot.co.uk')).to.equal('blogspot.co.uk'); }); it('should return nytimes.com even in a whole valid', () => { expect(tldts.getDomain('http://www.nytimes.com/')).to.equal('nytimes.com'); }); // @see https://github.com/oncletom/tld.js/issues/95 it('should ignore the trailing dot in a domain', () => { expect(tldts.getDomain('https://www.google.co.uk./maps')).to.equal( 'google.co.uk', ); }); }); describe('#getPublicSuffix', () => { describe('allowPrivateDomains', () => { const getPublicSuffix = (url: string) => { return tldts.getPublicSuffix(url, { allowPrivateDomains: true }); }; it('should return de if example.de', () => { expect(getPublicSuffix('example.de')).to.equal('de'); }); it('should return co.uk if google.co.uk', () => { expect(getPublicSuffix('google.co.uk')).to.equal('co.uk'); }); // @see https://github.com/oncletom/tld.js/pull/97 it('should return www.ck if www.www.ck', () => { expect(getPublicSuffix('www.www.ck')).to.equal('ck'); }); // @see https://github.com/oncletom/tld.js/issues/30 it('should return s3.amazonaws.com if s3.amazonaws.com', () => { expect(getPublicSuffix('s3.amazonaws.com')).to.equal('s3.amazonaws.com'); }); it('should return s3.amazonaws.com if www.s3.amazonaws.com', () => { expect(getPublicSuffix('www.s3.amazonaws.com')).to.equal( 's3.amazonaws.com', ); }); it('should directly return the suffix if it matches a rule key', () => { expect(getPublicSuffix('youtube')).to.equal('youtube'); }); it('should return the suffix if a rule exists that has no exceptions', () => { expect(getPublicSuffix('microsoft.eu')).to.equal('eu'); }); // @see https://github.com/oncletom/tld.js/pull/97 it('should return the string tldts if the publicsuffix does not exist', () => { expect(getPublicSuffix('www.freedom.nsa')).to.equal('nsa'); }); // @see https://github.com/oncletom/tld.js/issues/95 it('should ignore the trailing dot in a domain', () => { expect(getPublicSuffix('https://www.google.co.uk./maps')).to.equal( 'co.uk', ); }); }); describe('ignoring Private domains', () => { const getPublicSuffix = (url: string) => { return tldts.getPublicSuffix(url, { allowPrivateDomains: false }); }; it('should return de if example.de', () => { expect(getPublicSuffix('example.de')).to.equal('de'); expect(getPublicSuffix('example.foo.de')).to.equal('de'); }); it('should return de if example.gov', () => { expect(getPublicSuffix('example.gov')).to.equal('gov'); expect(getPublicSuffix('example.foo.gov')).to.equal('gov'); }); it('should return de if example.edu', () => { expect(getPublicSuffix('example.edu')).to.equal('edu'); expect(getPublicSuffix('example.foo.edu')).to.equal('edu'); }); it('should return de if example.org', () => { expect(getPublicSuffix('example.org')).to.equal('org'); expect(getPublicSuffix('example.foo.org')).to.equal('org'); }); it('should return com if www.s3.amazonaws.com', () => { expect(getPublicSuffix('www.s3.amazonaws.com')).to.equal('com'); }); it('should return net if global.prod.fastly.net', () => { expect(getPublicSuffix('https://global.prod.fastly.net')).to.equal( 'net', ); }); it('should return co.uk if google.co.uk', () => { expect(getPublicSuffix('google.co.uk')).to.equal('co.uk'); }); }); describe('ignoring ICANN domains', () => { const getPublicSuffix = (url: string) => { return tldts.getPublicSuffix(url, { allowIcannDomains: false, allowPrivateDomains: true, }); }; it('should return s3.amazonaws.com if www.s3.amazonaws.com', () => { expect(getPublicSuffix('www.s3.amazonaws.com')).to.equal( 's3.amazonaws.com', ); }); it('should return global.prod.fastly.net if global.prod.fastly.net', () => { expect(getPublicSuffix('https://global.prod.fastly.net')).to.equal( 'global.prod.fastly.net', ); }); it('should return co.uk if google.co.uk', () => { expect(getPublicSuffix('google.co.uk')).to.equal('uk'); }); }); }); describe('#getHostname', () => { it('handles space only inputs', () => { expect(tldts.getHostname(' ')).to.equal(''); expect(tldts.getHostname(' ')).to.equal(''); }); it('handles space corner-cases', () => { expect(tldts.getHostname(' a')).to.equal('a'); expect(tldts.getHostname('a ')).to.equal('a'); expect(tldts.getHostname(' a ')).to.equal('a'); expect(tldts.getHostname(' a ')).to.equal('a'); }); it('should return a valid hostname as is', () => { expect(tldts.getHostname(' example.CO.uk ')).to.equal('example.co.uk'); expect(tldts.getHostname(' example.CO.uk ')).to.equal('example.co.uk'); expect(tldts.getHostname(' example.CO.uk ')).to.equal('example.co.uk'); }); it('should strip trailing dots', () => { expect(tldts.getHostname('example.co.uk.')).to.equal('example.co.uk'); expect(tldts.getHostname('example.co.uk..')).to.equal('example.co.uk'); expect(tldts.getHostname('example.co.uk...')).to.equal('example.co.uk'); }); it('should return the hostname of a scheme-less URL', () => { expect( tldts.getHostname('example.co.uk/some/path?and&query#hash'), ).to.equal('example.co.uk'); }); it('should return the hostname of a scheme-less + port URL', () => { expect( tldts.getHostname('example.co.uk:8080/some/path?and&query#hash'), ).to.equal('example.co.uk'); }); it('should return the hostname of a scheme-less + authentication URL', () => { expect( tldts.getHostname( 'user:password@example.co.uk/some/path?and&query#hash', ), ).to.equal('example.co.uk'); }); it('should return the hostname of a scheme-less + passwordless URL', () => { expect( tldts.getHostname('user@example.co.uk/some/path?and&query#hash'), ).to.equal('example.co.uk'); }); it('should return the hostname of a scheme-less + authentication + port URL', () => { expect( tldts.getHostname( 'user:password@example.co.uk:8080/some/path?and&query#hash', ), ).to.equal('example.co.uk'); }); it('should return the hostname of a scheme-less + passwordless + port URL', () => { expect( tldts.getHostname('user@example.co.uk:8080/some/path?and&query#hash'), ).to.equal('example.co.uk'); }); it('should return the hostname of a user-password same-scheme URL', () => { expect( tldts.getHostname( '//user:password@example.co.uk:8080/some/path?and&query#hash', ), ).to.equal('example.co.uk'); expect( tldts.getHostname( ' //user:password@example.co.uk:8080/some/path?and&query#hash', ), ).to.equal('example.co.uk'); }); it('should return the hostname of a passwordless same-scheme URL', () => { expect( tldts.getHostname('//user@example.co.uk:8080/some/path?and&query#hash'), ).to.equal('example.co.uk'); expect( tldts.getHostname( ' //user@example.co.uk:8080/some/path?and&query#hash', ), ).to.equal('example.co.uk'); }); it('should return the hostname of a complex user-password scheme URL', () => { expect( tldts.getHostname( 'git+ssh://user:password@example.co.uk:8080/some/path?and&query#hash', ), ).to.equal('example.co.uk'); }); it('should return the hostname of a complex passwordless scheme URL', () => { expect( tldts.getHostname( 'git+ssh://user@example.co.uk:8080/some/path?and&query#hash', ), ).to.equal('example.co.uk'); }); it('should return www.nytimes.com even with an URL as a parameter', () => { expect( tldts.getHostname( 'http://www.nytimes.com/glogin?URI=http://www.notnytimes.com/2010/03/26/us/politics/26court.html&OQ=_rQ3D1Q26&OP=45263736Q2FKgi!KQ7Dr!K@@@Ko!fQ24KJg(Q3FQ5Cgg!Q60KQ60W.WKWQ22KQ60IKyQ3FKigQ24Q26!Q26(Q3FKQ60I(gyQ5C!Q2Ao!fQ24', ), ).to.equal('www.nytimes.com'); }); // @see https://github.com/oncletom/tld.js/issues/95 it('should ignore the trailing dot in a domain', () => { expect( tldts.getHostname('http://example.co.uk./some/path?and&query#hash'), ).to.equal('example.co.uk'); }); it('should handle fragment URL', () => { expect(tldts.getHostname('http://example.co.uk.#hash')).to.equal( 'example.co.uk', ); }); it('should handle parameter URL', () => { expect(tldts.getHostname('http://example.co.uk.?and&query#hash')).to.equal( 'example.co.uk', ); }); it('should detect invalid protocol characters', () => { expect(tldts.getHostname('ht~tp://example.co.uk.')).to.equal(null); }); it('should reject incomplete ipv6', () => { expect(tldts.getHostname('http://[::1')).to.equal(null); }); it('should allow disabling parsing of hostnames', () => { expect( tldts.getHostname('http://foo.com', { extractHostname: false, }), ).to.equal('http://foo.com'); }); it('should allow disabling validation of hostnames', () => { expect( tldts.parse('http://f__.._oo.com', { validateHostname: true, }).hostname, ).to.equal(null); expect( tldts.parse('http://f__.._oo.com', { validateHostname: false, }).hostname, ).to.equal('f__.._oo.com'); }); it('should allow specifying no mixed inputs', () => { const url = 'http://foo.com/baz?param=31'; expect(tldts.parse(url)).to.deep.equal( tldts.parse(url, { mixedInputs: false }), ); }); }); describe('getDomainWithoutSuffix method', () => { it('should return null if the domain cannot be found', () => { expect(tldts.getDomainWithoutSuffix('not-a-validHost')).to.equal(null); }); it('should return null if domain and suffix are the same', () => { expect(tldts.getDomainWithoutSuffix('co.uk')).to.equal(null); }); it('should return domain without suffix if domain exists', () => { expect(tldts.getDomainWithoutSuffix('https://sub.foo.co.uk')).to.equal('foo'); }); }); describe('getSubdomain method', () => { it('should return null if the domain cannot be found', () => { expect(tldts.getSubdomain('not-a-validHost')).to.equal(null); }); it('should return the relevant subdomain of a hostname', () => { expect(tldts.getSubdomain('localhost')).to.equal(null); expect(tldts.getSubdomain('google.com')).to.equal(''); expect(tldts.getSubdomain('fr.google.com')).to.equal('fr'); expect(tldts.getSubdomain('random.fr.google.com')).to.equal('random.fr'); expect(tldts.getSubdomain('my.custom.domain')).to.equal('my'); }); it('should return the relevant subdomain of a badly trimmed string', () => { expect(tldts.getSubdomain(' google.COM')).to.equal(''); expect(tldts.getSubdomain(' fr.GOOGLE.COM ')).to.equal('fr'); expect(tldts.getSubdomain(' random.FR.google.com')).to.equal('random.fr'); }); it('should return the subdomain of a tldts + SLD hostname', () => { expect(tldts.getSubdomain('love.fukushima.jp')).to.equal(''); expect(tldts.getSubdomain('i.love.fukushima.jp')).to.equal('i'); expect(tldts.getSubdomain('random.nuclear.strike.co.jp')).to.equal( 'random.nuclear', ); }); it('should return the subdomain of a wildcard hostname', () => { expect(tldts.getSubdomain('google.co.uk')).to.equal(''); expect(tldts.getSubdomain('fr.google.co.uk')).to.equal('fr'); expect(tldts.getSubdomain('random.fr.google.co.uk')).to.equal('random.fr'); }); // @see https://github.com/oncletom/tld.js/issues/25 it('should return the subdomain of reserved subdomains', () => { expect(tldts.getSubdomain('blogspot.co.uk')).to.equal(''); expect(tldts.getSubdomain('emergency.blogspot.co.uk')).to.equal( 'emergency', ); }); it('should not break on specific RegExp characters', () => { expect(() => { // @see https://github.com/oncletom/tld.js/issues/33 tldts.getSubdomain('www.weir)domain.com'); }).not.to.throw(); expect(() => { // @see https://github.com/oncletom/tld.js/issues/53 tldts.getSubdomain( "http://('4drsteve.com', [], ['54.213.246.177'])/xmlrpc.php", ); }).not.to.throw(); expect(() => { // @see https://github.com/oncletom/tld.js/issues/53 tldts.getSubdomain("('4drsteve.com', [], ['54.213.246.177'])"); }).not.to.throw(); }); // @see https://github.com/oncletom/tld.js/issues/53 it('should correctly extract domain from paths including "@" in the path', () => { const domain = tldts.getSubdomain( 'http://cdn.jsdelivr.net/g/jquery@1.8.2,jquery.waypoints@2.0.2,qtip2@2.2.1,typeahead.js@0.9.3,sisyphus@0.1,jquery.slick@1.3.15,fastclick@1.0.3', ); expect(domain).to.equal('cdn'); }); // @see https://github.com/oncletom/tld.js/issues/35 it('should provide consistent results', () => { expect(tldts.getSubdomain('www.bl.uk')).to.equal('www'); expect(tldts.getSubdomain('www.majestic12.co.uk')).to.equal('www'); }); // @see https://github.com/oncletom/tld.js/issues/95 it('should ignore the trailing dot in a domain', () => { expect(tldts.getSubdomain('random.fr.google.co.uk.')).to.equal( 'random.fr', ); }); }); describe('#parse', () => { const mockResponse = (hostname: string | null) => { return { domain: null, domainWithoutSuffix: null, hostname, isIcann: null, isIp: true, isPrivate: null, publicSuffix: null, subdomain: null, }; }; it('fallback to wildcard', () => { expect(tldts.parse('https://foo.bar.badasdasdada')).to.deep.equal({ domain: 'bar.badasdasdada', domainWithoutSuffix: 'bar', hostname: 'foo.bar.badasdasdada', isIcann: false, isIp: false, isPrivate: false, publicSuffix: 'badasdasdada', subdomain: 'foo', }); }); it('should handle data URLs', () => { expect( tldts.parse('data:image/png,some-base-64-value'), ).to.deep.equal({ ...mockResponse(null), isIp: null }); }); it('should handle ipv6 addresses properly', () => { expect( tldts.parse('http://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]'), ).to.deep.equal(mockResponse('2001:0db8:85a3:0000:0000:8a2e:0370:7334')); expect( tldts.parse('http://user:pass@[::1]/segment/index.html?query#frag'), ).to.deep.equal(mockResponse('::1')); expect(tldts.parse('https://[::1]')).to.deep.equal(mockResponse('::1')); expect(tldts.parse('http://[1080::8:800:200C:417A]/foo')).to.deep.equal( mockResponse('1080::8:800:200c:417a'), ); expect(tldts.parse('http://[1080::8:800:200C:417A]:4242/foo')).to.deep.equal( mockResponse('1080::8:800:200c:417a'), ); }); it('handles ipv6 address when extractHostname is false', () => { const hostname = '1080::8:800:200C:417A'; expect(tldts.parse(hostname, { extractHostname: false })).to.deep.equal({ domain: null, domainWithoutSuffix: null, hostname, isIcann: null, isIp: true, isPrivate: null, publicSuffix: null, subdomain: null, }); }); it('handles ipv6 address when extractHostname is false (with brackets)', () => { const hostname = '[1080::8:800:200C:417A]'; expect(tldts.parse(hostname, { extractHostname: false })).to.deep.equal({ domain: null, domainWithoutSuffix: null, hostname, isIcann: null, isIp: true, isPrivate: null, publicSuffix: null, subdomain: null, }); }); it('should handle ipv4 addresses properly', () => { expect(tldts.parse('http://192.168.0.1/')).to.deep.equal( mockResponse('192.168.0.1'), ); }); it('disable ip detection', () => { expect(tldts.parse('http://192.168.0.1/', { detectIp: false })).to.deep.equal({ domain: '0.1', domainWithoutSuffix: '0', hostname: '192.168.0.1', isIcann: false, isIp: null, isPrivate: false, publicSuffix: '1', subdomain: '192.168', }); }); }); describe('validHosts', () => { describe('non-empty array', () => { const options = { validHosts: ['localhost'], }; it('should return the known valid host', () => { expect(tldts.getDomain('localhost', options)).to.equal('localhost'); expect(tldts.getDomain('subdomain.localhost', options)).to.equal( 'localhost', ); expect(tldts.getDomain('subdomain.notlocalhost', options)).to.equal( 'subdomain.notlocalhost', ); expect(tldts.getDomain('subdomain.not-localhost', options)).to.equal( 'subdomain.not-localhost', ); }); // @see https://github.com/oncletom/tld.js/issues/66 it('should return the subdomain of a validHost', () => { expect(tldts.getSubdomain('vhost.localhost', options)).to.equal('vhost'); }); it('should fallback to normal extraction if no match in validHost', () => { expect(tldts.getSubdomain('vhost.evil.com', options)).to.equal('vhost'); }); }); }); }
the_stack
import "jest"; import React from "react"; import ReactDOM from "react-dom"; import { act } from "react-dom/test-utils"; import { inMemory } from "@hickory/in-memory"; import { createRouter, prepareRoutes } from "@curi/router"; import { createRouterComponent, useNavigationFocus, useResponse } from "@curi/react-dom"; describe("useNavigationFocus", () => { let node; let router, Router; let routes = prepareRoutes([ { name: "Home", path: "" }, { name: "About", path: "about" } ]); beforeEach(() => { node = document.createElement("div"); document.body.appendChild(node); router = createRouter(inMemory, routes); Router = createRouterComponent(router); }); afterEach(() => { ReactDOM.unmountComponentAtNode(node); document.body.removeChild(node); }); describe("mounting", () => { it("focuses ref when mounting", () => { function Focuser() { let ref = React.useRef(null); useNavigationFocus(ref); return <div id="test" tabIndex={-1} ref={ref} />; } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); let wrapper = document.querySelector("#test"); let focused = document.activeElement; expect(focused).toBe(wrapper); }); it("warns if ref isn't attached to an element (body focused)", () => { let realWarn = console.warn; let fakeWarn = (console.warn = jest.fn()); let routes = prepareRoutes([ { name: "Home", path: "" } ]); let router = createRouter(inMemory, routes); let Router = createRouterComponent(router); function Focuser() { let ref = React.useRef(null); useNavigationFocus(ref); return <div id="test" tabIndex={-1} />; } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); expect(document.activeElement).toBe(document.body); expect(fakeWarn.mock.calls[0][0]).toBe( "There is no element to focus. Did you forget to add the ref to an element?" ); console.warn = realWarn; }); }); describe("updates", () => { it("does not re-focus ref for regular re-renders", () => { function Focuser({ children }) { let ref = React.useRef(null); useNavigationFocus(ref); return ( <div id="test" tabIndex={-1} ref={ref}> {children} </div> ); } act(() => { ReactDOM.render( <Router> <Focuser> <input type="text" /> </Focuser> </Router>, node ); }); let wrapper = document.querySelector("#test"); let initialFocus = document.activeElement; expect(initialFocus).toBe(wrapper); let input = document.querySelector("input"); // steal the focus input.focus(); let stolenFocus = document.activeElement; expect(stolenFocus).toBe(input); act(() => { ReactDOM.render( <Router> <Focuser> <input type="number" /> </Focuser> </Router>, node ); }); expect(stolenFocus).toBe(input); }); describe("new response", () => { it("re-focuses ref for new response re-renders", () => { function Focuser() { let ref = React.useRef(null); useNavigationFocus(ref); return ( <div id="test" tabIndex={-1} ref={ref}> <input type="text" /> </div> ); } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); let input = document.querySelector("input"); let wrapper = input.parentElement; let initialFocused = document.activeElement; expect(wrapper).toBe(initialFocused); // steal the focus input.focus(); let stolenFocus = document.activeElement; expect(input).toBe(stolenFocus); act(() => { // navigate and verify wrapper is re-focused let url = router.url({ name: "About" }); router.navigate({ url }); }); let postNavFocus = document.activeElement; expect(wrapper).toBe(postNavFocus); }); it("focuses new ref for new responses", () => { let Home = React.forwardRef((_, ref: React.Ref<any>) => ( <div id="home" tabIndex={-1} ref={ref}> <h1>Home</h1> </div> )); let About = React.forwardRef((_, ref: React.Ref<any>) => ( <div id="about" tabIndex={-1} ref={ref}> <h1>About</h1> </div> )); let routes = prepareRoutes([ { name: "Home", path: "", respond() { return { body: Home }; } }, { name: "About", path: "about", respond() { return { body: About }; } } ]); let router = createRouter(inMemory, routes); let Router = createRouterComponent(router); function Focuser() { let { response } = useResponse(); let { body: Body } = response; let ref = React.useRef(null); useNavigationFocus(ref); return <Body ref={ref} />; } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); let homeDiv = node.querySelector("#home"); expect(document.activeElement).toBe(homeDiv); act(() => { let url = router.url({ name: "About" }); router.navigate({ url }); }); let aboutDiv = node.querySelector("#about"); expect(document.activeElement).toBe(aboutDiv); }); it("warns if ref isn't attached to an element (body focused)", () => { let realWarn = console.warn; let fakeWarn = (console.warn = jest.fn()); let Home = ({ innerRef }) => ( <div id="home" tabIndex={-1} ref={innerRef}> <h1>Home</h1> </div> ); let About = () => ( <div id="about"> <h1>About</h1> </div> ); let routes = prepareRoutes([ { name: "Home", path: "", respond() { return { body: Home }; } }, { name: "About", path: "about", respond() { return { body: About }; } } ]); let router = createRouter(inMemory, routes); let Router = createRouterComponent(router); function Focuser() { let { response } = useResponse(); let { body: Body } = response; let ref = React.useRef(null); useNavigationFocus(ref); return <Body innerRef={ref} />; } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); let homeDiv = node.querySelector("#home"); expect(document.activeElement).toBe(homeDiv); expect(fakeWarn.mock.calls.length).toBe(0); act(() => { let url = router.url({ name: "About" }); router.navigate({ url }); }); expect(document.activeElement).toBe(document.body); expect(fakeWarn.mock.calls[0][0]).toBe( "There is no element to focus. Did you forget to add the ref to an element?" ); console.warn = realWarn; }); }); }); describe("preserve", () => { describe("false (default)", () => { it("re-focuses for new response re-renders", () => { function Focuser() { let ref = React.useRef(null); useNavigationFocus(ref, { preserve: false }); return ( <div id="test" tabIndex={-1} ref={ref}> <input type="text" /> </div> ); } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); let input = document.querySelector("input"); let wrapper = input.parentElement; let initialFocused = document.activeElement; expect(wrapper).toBe(initialFocused); // steal the focus input.focus(); let stolenFocus = document.activeElement; expect(input).toBe(stolenFocus); act(() => { // navigate and verify wrapper is re-focused let url = router.url({ name: "About" }); router.navigate({ url }); }); let postNavFocus = document.activeElement; expect(wrapper).toBe(postNavFocus); }); }); describe("true", () => { it("does not focus ref if something is already ", () => { function Focuser() { let ref = React.useRef(null); useNavigationFocus(ref, { preserve: true }); return ( <div id="test" tabIndex={-1} ref={ref}> <input type="text" /> </div> ); } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); let input = document.querySelector("input"); let wrapper = input.parentElement; let initialFocused = document.activeElement; expect(wrapper).toBe(initialFocused); // steal the focus input.focus(); let stolenFocus = document.activeElement; expect(input).toBe(stolenFocus); act(() => { // navigate and verify wrapper is re-focused let url = router.url({ name: "About" }); router.navigate({ url }); }); let postNavFocus = document.activeElement; expect(postNavFocus).toBe(input); }); }); }); describe("preventScroll", () => { let realFocus = HTMLElement.prototype.focus; let fakeFocus; beforeEach(() => { fakeFocus = HTMLElement.prototype.focus = jest.fn(); }); afterEach(() => { fakeFocus.mockReset(); HTMLElement.prototype.focus = realFocus; }); it("calls focus({ preventScroll: false }} when not provided", () => { function Focuser() { let ref = React.useRef(null); useNavigationFocus(ref); return ( <div id="test" tabIndex={-1} ref={ref}> <input type="text" /> </div> ); } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); expect(fakeFocus.mock.calls[0][0]).toMatchObject({ preventScroll: false }); }); it("calls focus({ preventScroll: true }} when preventScroll = true", () => { function Focuser() { let ref = React.useRef(null); useNavigationFocus(ref, { preventScroll: true }); return ( <div id="test" tabIndex={-1} ref={ref}> <input type="text" /> </div> ); } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); expect(fakeFocus.mock.calls[0][0]).toMatchObject({ preventScroll: true }); }); it("calls focus({ preventScroll: false }} when preventScroll = false", () => { function Focuser() { let ref = React.useRef(null); useNavigationFocus(ref, { preventScroll: false }); return ( <div id="test" tabIndex={-1} ref={ref}> <input type="text" /> </div> ); } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); expect(fakeFocus.mock.calls[0][0]).toMatchObject({ preventScroll: false }); }); }); describe("tabIndex", () => { it("warns when ref element does not have a tabIndex attribute", () => { let realWarn = console.warn; let fakeWarn = (console.warn = jest.fn()); function Focuser() { let ref = React.useRef(null); useNavigationFocus(ref); return ( <div id="test" ref={ref}> <input type="text" /> </div> ); } act(() => { ReactDOM.render( <Router> <Focuser /> </Router>, node ); }); expect(fakeWarn.mock.calls.length).toBe(1); console.warn = realWarn; }); it("does not warn when ref element does not have a tabIndex attribute, but ele is already focusable", () => { let realWarn = console.warn; let fakeWarn = (console.warn = jest.fn()); function Focuser() { let ref = React.useRef(null); useNavigationFocus(ref); return ( <div id="test"> <input type="text" ref={ref} /> </div> ); } ReactDOM.render( <Router> <Focuser /> </Router>, node ); expect(fakeWarn.mock.calls.length).toBe(0); console.warn = realWarn; }); }); });
the_stack
//@ts-check ///<reference path="devkit.d.ts" /> declare namespace DevKit { namespace Formmsdyn_liveconversation_Information { interface Tabs { } interface Body { /** Subject associated with the conversation record */ msdyn_subject: DevKit.Controls.String; /** Unique identifier of the user or team who owns the conversation record. */ OwnerId: DevKit.Controls.Lookup; } } class Formmsdyn_liveconversation_Information extends DevKit.IForm { /** * DynamicsCrm.DevKit form msdyn_liveconversation_Information * @param executionContext the execution context * @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource" */ constructor(executionContext: any, defaultWebResourceName?: string); /** Utility functions/methods/objects for Dynamics 365 form */ Utility: DevKit.Utility; /** The Body section of form msdyn_liveconversation_Information */ Body: DevKit.Formmsdyn_liveconversation_Information.Body; } namespace FormOngoing_Conversation_Main_Form { interface Tabs { } interface Body { /** Subject associated with the conversation record */ msdyn_subject: DevKit.Controls.String; /** Unique identifier of the user or team who owns the conversation record. */ OwnerId: DevKit.Controls.Lookup; } } class FormOngoing_Conversation_Main_Form extends DevKit.IForm { /** * DynamicsCrm.DevKit form Ongoing_Conversation_Main_Form * @param executionContext the execution context * @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource" */ constructor(executionContext: any, defaultWebResourceName?: string); /** Utility functions/methods/objects for Dynamics 365 form */ Utility: DevKit.Utility; /** The Body section of form Ongoing_Conversation_Main_Form */ Body: DevKit.FormOngoing_Conversation_Main_Form.Body; } class msdyn_liveconversationApi { /** * DynamicsCrm.DevKit msdyn_liveconversationApi * @param entity The entity object */ constructor(entity?: any); /** * Get the value of alias * @param alias the alias value * @param isMultiOptionSet true if the alias is multi OptionSet */ getAliasedValue(alias: string, isMultiOptionSet?: boolean): any; /** * Get the formatted value of alias * @param alias the alias value * @param isMultiOptionSet true if the alias is multi OptionSet */ getAliasedFormattedValue(alias: string, isMultiOptionSet?: boolean): string; /** The entity object */ Entity: any; /** The entity name */ EntityName: string; /** The entity collection name */ EntityCollectionName: string; /** The @odata.etag is then used to build a cache of the response that is dependant on the fields that are retrieved */ "@odata.etag": string; /** Unique identifier of the user who created the Ongoing Conversation. */ CreatedBy: DevKit.WebApi.LookupValueReadonly; /** Date and time when the Ongoing Conversation was created. */ CreatedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Unique identifier of the delegate user who created the Ongoing Conversation. */ CreatedOnBehalfBy: DevKit.WebApi.LookupValueReadonly; /** Sequence number of the import that created this record. */ ImportSequenceNumber: DevKit.WebApi.IntegerValue; /** Unique identifier of user who last modified the Ongoing Conversation. */ ModifiedBy: DevKit.WebApi.LookupValueReadonly; /** Date and time when Ongoing Conversation was last modified. */ ModifiedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Unique identifier of the delegate user who last modified the Ongoing Conversation. */ ModifiedOnBehalfBy: DevKit.WebApi.LookupValueReadonly; /** Date and time when last agent was assigned to the conversation */ msdyn_activeagentassignedon_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Last agent assigned to the conversation */ msdyn_activeagentid: DevKit.WebApi.LookupValue; /** Unique identifier for Queue associated with Conversation. */ msdyn_cdsqueueid: DevKit.WebApi.LookupValue; /** The channel(s) in the conversation. */ msdyn_channel: DevKit.WebApi.MultiOptionSetValue; /** Date and time when conversation was closed */ msdyn_closedon_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Date and time when conversation was created */ msdyn_createdon_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; msdyn_customer_msdyn_liveconversation_account: DevKit.WebApi.LookupValue; msdyn_customer_msdyn_liveconversation_contact: DevKit.WebApi.LookupValue; /** Customer Sentiment Label powered by Sentiment Service */ msdyn_customersentimentlabel: DevKit.WebApi.OptionSetValue; /** Number of times conversation was escalated to Supervisor i.e. transferred to Supervisor */ msdyn_escalationcount: DevKit.WebApi.IntegerValue; /** Time when conversation was initiated */ msdyn_initiatedon_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Id of this ongoing conversation record */ msdyn_liveconversationId: DevKit.WebApi.GuidValue; /** Work stream associated to the conversation */ msdyn_liveworkstreamid: DevKit.WebApi.LookupValue; /** Date and time when conversation was last modified */ msdyn_modifiedon_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Last agent session */ msdyn_oclastsessionid: DevKit.WebApi.StringValue; /** Unique identifier for msdyn_omnichannelqueue associated with Conversation */ msdyn_queueid: DevKit.WebApi.LookupValue; /** Date and time when conversation was started */ msdyn_startedon_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** State of the conversation record */ msdyn_statecode: DevKit.WebApi.OptionSetValue; /** Reason for the status of Ongoing conversation record */ msdyn_statuscode: DevKit.WebApi.OptionSetValue; /** Date and time when conversation status was last modified */ msdyn_statusupdatedon_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Subject associated with the conversation record */ msdyn_subject: DevKit.WebApi.StringValue; /** Conversation Title */ msdyn_title: DevKit.WebApi.StringValue; /** Number of times the conversation was transferred */ msdyn_transfercount: DevKit.WebApi.IntegerValue; /** Work distribution mode of the associated work stream */ msdyn_workstreamworkdistributionmode: DevKit.WebApi.OptionSetValue; /** Date and time that the record was migrated. */ OverriddenCreatedOn_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Enter the user who is assigned to manage the record. This field is updated every time the record is assigned to a different user */ OwnerId_systemuser: DevKit.WebApi.LookupValue; /** Enter the team who is assigned to manage the record. This field is updated every time the record is assigned to a different team */ OwnerId_team: DevKit.WebApi.LookupValue; /** Unique identifier of the business unit that owns the Ongoing Conversation. */ OwningBusinessUnit: DevKit.WebApi.LookupValueReadonly; /** Unique identifier of the team that owns the Ongoing Conversation. */ OwningTeam: DevKit.WebApi.LookupValueReadonly; /** Unique identifier of the user that owns the Ongoing Conversation. */ OwningUser: DevKit.WebApi.LookupValueReadonly; /** Status of the Ongoing conversation record */ statecode: DevKit.WebApi.OptionSetValue; /** Reason for the status of Ongoing conversation record */ statuscode: DevKit.WebApi.OptionSetValue; /** For internal use only. */ TimeZoneRuleVersionNumber: DevKit.WebApi.IntegerValue; /** Time zone code that was in use when the record was created. */ UTCConversionTimeZoneCode: DevKit.WebApi.IntegerValue; /** Version Number */ VersionNumber: DevKit.WebApi.BigIntValueReadonly; } } declare namespace OptionSet { namespace msdyn_liveconversation { enum msdyn_channel { /** 192390000 */ Co_browse, /** 192350002 */ Custom, /** 192350000 */ Entity_Records, /** 192330000 */ Facebook, /** 192310000 */ LINE, /** 192360000 */ Live_chat, /** 19241000 */ Microsoft_Teams, /** 192400000 */ Screen_sharing, /** 192340000 */ SMS, /** 192350001 */ Twitter, /** 192380000 */ Video, /** 192370000 */ Voice, /** 192320000 */ WeChat, /** 192300000 */ WhatsApp } enum msdyn_customersentimentlabel { /** 0 */ NA, /** 8 */ Negative, /** 10 */ Neutral, /** 12 */ Positive, /** 9 */ Slightly_negative, /** 11 */ Slightly_positive, /** 7 */ Very_negative, /** 13 */ Very_positive } enum msdyn_statecode { /** 1 */ Active, /** 3 */ Closed, /** 0 */ Open, /** 2 */ Waiting, /** 4 */ Wrap_up } enum msdyn_statuscode { /** 2 */ Active, /** 4 */ Closed, /** 1 */ Open, /** 3 */ Waiting, /** 5 */ Wrap_up } enum msdyn_workstreamworkdistributionmode { /** 192350001 */ Pick, /** 192350000 */ Push } enum statecode { /** 1 */ Active, /** 3 */ Closed, /** 0 */ Open, /** 2 */ Waiting, /** 4 */ Wrap_up } enum statuscode { /** 2 */ Active, /** 4 */ Closed, /** 1 */ Open, /** 3 */ Waiting, /** 5 */ Wrap_up } enum RollupState { /** 0 - Attribute value is yet to be calculated */ NotCalculated, /** 1 - Attribute value has been calculated per the last update time in <AttributeSchemaName>_Date attribute */ Calculated, /** 2 - Attribute value calculation lead to overflow error */ OverflowError, /** 3 - Attribute value calculation failed due to an internal error, next run of calculation job will likely fix it */ OtherError, /** 4 - Attribute value calculation failed because the maximum number of retry attempts to calculate the value were exceeded likely due to high number of concurrency and locking conflicts */ RetryLimitExceeded, /** 5 - Attribute value calculation failed because maximum hierarchy depth limit for calculation was reached */ HierarchicalRecursionLimitReached, /** 6 - Attribute value calculation failed because a recursive loop was detected in the hierarchy of the record */ LoopDetected } } } //{'JsForm':['Information','Main Form'],'JsWebApi':true,'IsDebugForm':true,'IsDebugWebApi':true,'Version':'2.12.31','JsFormVersion':'v2'}
the_stack
export type GLTFId = number; /** * Indices of those attributes that deviate from their initialization value. */ export interface AccessorSparseIndices { /** * The index of the bufferView with sparse indices. Referenced bufferView can't have ARRAY_BUFFER or ELEMENT_ARRAY_BUFFER target. */ bufferView: GLTFId; /** * The offset relative to the start of the bufferView in bytes. Must be aligned. */ byteOffset?: number; /** * The indices data type. */ componentType: 5121 | 5123 | 5125 | number; extensions?: any; extras?: any; // [k: string]: any; } /** * Array of size `accessor.sparse.count` times number of components storing the displaced accessor attributes pointed by `accessor.sparse.indices`. */ export interface AccessorSparseValues { /** * The index of the bufferView with sparse values. Referenced bufferView can't have ARRAY_BUFFER or ELEMENT_ARRAY_BUFFER target. */ bufferView: GLTFId; /** * The offset relative to the start of the bufferView in bytes. Must be aligned. */ byteOffset?: number; extensions?: any; extras?: any; // [k: string]: any; } /** * Sparse storage of attributes that deviate from their initialization value. */ export interface AccessorSparse { /** * Number of entries stored in the sparse array. */ count: number; /** * Index array of size `count` that points to those accessor attributes that deviate from their initialization value. Indices must strictly increase. */ indices: AccessorSparseIndices; /** * Array of size `count` times number of components, storing the displaced accessor attributes pointed by `indices`. Substituted values must have the same `componentType` and number of components as the base accessor. */ values: AccessorSparseValues; extensions?: any; extras?: any; // [k: string]: any; } /** * A typed view into a bufferView. A bufferView contains raw binary data. An accessor provides a typed view into a bufferView or a subset of a bufferView similar to how WebGL's `vertexAttribPointer()` defines an attribute in a buffer. */ export interface Accessor { /** * The index of the bufferView. */ bufferView?: GLTFId; /** * The offset relative to the start of the bufferView in bytes. */ byteOffset?: number; /** * The datatype of components in the attribute. */ componentType: 5120 | 5121 | 5122 | 5123 | 5125 | 5126 | number; /** * Specifies whether integer data values should be normalized. */ normalized?: boolean; /** * The number of attributes referenced by this accessor. */ count: number; /** * Specifies if the attribute is a scalar, vector, or matrix. */ type: 'SCALAR' | 'VEC2' | 'VEC3' | 'VEC4' | 'MAT2' | 'MAT3' | 'MAT4' | string; /** * Maximum value of each component in this attribute. */ max?: number[]; /** * Minimum value of each component in this attribute. */ min?: number[]; /** * Sparse storage of attributes that deviate from their initialization value. */ sparse?: AccessorSparse; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * The index of the node and TRS property that an animation channel targets. */ export interface AnimationChannelTarget { /** * The index of the node to target. */ node?: GLTFId; /** * The name of the node's TRS property to modify, or the "weights" of the Morph Targets it instantiates. For the "translation" property, the values that are provided by the sampler are the translation along the x, y, and z axes. For the "rotation" property, the values are a quaternion in the order (x, y, z, w), where w is the scalar. For the "scale" property, the values are the scaling factors along the x, y, and z axes. */ path: 'translation' | 'rotation' | 'scale' | 'weights' | string; extensions?: any; extras?: any; // [k: string]: any; } /** * Targets an animation's sampler at a node's property. */ export interface AnimationChannel { /** * The index of a sampler in this animation used to compute the value for the target. */ sampler: GLTFId; /** * The index of the node and TRS property to target. */ target: AnimationChannelTarget; extensions?: any; extras?: any; // [k: string]: any; } /** * Combines input and output accessors with an interpolation algorithm to define a keyframe graph (but not its target). */ export interface AnimationSampler { /** * The index of an accessor containing keyframe input values, e.g., time. */ input: GLTFId; /** * Interpolation algorithm. */ interpolation?: 'LINEAR' | 'STEP' | 'CUBICSPLINE' | string; /** * The index of an accessor, containing keyframe output values. */ output: GLTFId; extensions?: any; extras?: any; // [k: string]: any; } /** * A keyframe animation. */ export interface Animation { /** * An array of channels, each of which targets an animation's sampler at a node's property. Different channels of the same animation can't have equal targets. */ channels: AnimationChannel[]; /** * An array of samplers that combines input and output accessors with an interpolation algorithm to define a keyframe graph (but not its target). */ samplers: AnimationSampler[]; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * Metadata about the glTF asset. */ export interface Asset { /** * A copyright message suitable for display to credit the content creator. */ copyright?: string; /** * Tool that generated this glTF model. Useful for debugging. */ generator?: string; /** * The glTF version that this asset targets. */ version: string; /** * The minimum glTF version that this asset targets. */ minVersion?: string; extensions?: any; extras?: any; // [k: string]: any; } /** * A buffer points to binary geometry, animation, or skins. */ export interface Buffer { /** * The uri of the buffer. */ uri?: string; /** * The length of the buffer in bytes. */ byteLength: number; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * A view into a buffer generally representing a subset of the buffer. */ export interface BufferView { /** * The index of the buffer. */ buffer: GLTFId; /** * The offset into the buffer in bytes. */ byteOffset?: number; /** * The length of the bufferView in bytes. */ byteLength: number; /** * The stride, in bytes. */ byteStride?: number; /** * The target that the GPU buffer should be bound to. */ target?: 34962 | 34963 | number; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * An orthographic camera containing properties to create an orthographic projection matrix. */ export interface CameraOrthographic { /** * The floating-point horizontal magnification of the view. Must not be zero. */ xmag: number; /** * The floating-point vertical magnification of the view. Must not be zero. */ ymag: number; /** * The floating-point distance to the far clipping plane. `zfar` must be greater than `znear`. */ zfar: number; /** * The floating-point distance to the near clipping plane. */ znear: number; extensions?: any; extras?: any; // [k: string]: any; } /** * A perspective camera containing properties to create a perspective projection matrix. */ export interface CameraPerspective { /** * The floating-point aspect ratio of the field of view. */ aspectRatio?: number; /** * The floating-point vertical field of view in radians. */ yfov: number; /** * The floating-point distance to the far clipping plane. */ zfar?: number; /** * The floating-point distance to the near clipping plane. */ znear: number; extensions?: any; extras?: any; // [k: string]: any; } /** * A camera's projection. A node can reference a camera to apply a transform to place the camera in the scene. */ export interface Camera { /** * An orthographic camera containing properties to create an orthographic projection matrix. */ orthographic?: CameraOrthographic; /** * A perspective camera containing properties to create a perspective projection matrix. */ perspective?: CameraPerspective; /** * Specifies if the camera uses a perspective or orthographic projection. */ type: 'perspective' | 'orthographic' | string; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * Image data used to create a texture. Image can be referenced by URI or `bufferView` index. `mimeType` is required in the latter case. */ export interface Image { /** * The uri of the image. */ uri?: string; /** * The image's MIME type. */ mimeType?: 'image/jpeg' | 'image/png' | string; /** * The index of the bufferView that contains the image. Use this instead of the image's uri property. */ bufferView?: GLTFId; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * Reference to a texture. */ export interface TextureInfo { /** * The index of the texture. */ index: GLTFId; /** * The set index of texture's TEXCOORD attribute used for texture coordinate mapping. */ texCoord?: number; extensions?: any; extras?: any; // [k: string]: any; } /** * A set of parameter values that are used to define the metallic-roughness material model from Physically-Based Rendering (PBR) methodology. */ export interface MaterialPbrMetallicRoughness { /** * The material's base color factor. */ baseColorFactor?: number[]; /** * The base color texture. */ baseColorTexture?: TextureInfo; /** * The metalness of the material. */ metallicFactor?: number; /** * The roughness of the material. */ roughnessFactor?: number; /** * The metallic-roughness texture. */ metallicRoughnessTexture?: TextureInfo; extensions?: any; extras?: any; // [k: string]: any; } export interface MaterialNormalTextureInfo { index: any; texCoord?: any; /** * The scalar multiplier applied to each normal vector of the normal texture. */ scale?: number; extensions?: any; extras?: any; // [k: string]: any; } export interface MaterialOcclusionTextureInfo { index: any; texCoord?: any; /** * A scalar multiplier controlling the amount of occlusion applied. */ strength?: number; extensions?: any; extras?: any; // [k: string]: any; } /** * The material appearance of a primitive. */ export interface Material { name?: any; extensions?: any; extras?: any; /** * A set of parameter values that are used to define the metallic-roughness material model from Physically-Based Rendering (PBR) methodology. When not specified, all the default values of `pbrMetallicRoughness` apply. */ pbrMetallicRoughness?: MaterialPbrMetallicRoughness; /** * The normal map texture. */ normalTexture?: MaterialNormalTextureInfo; /** * The occlusion map texture. */ occlusionTexture?: MaterialOcclusionTextureInfo; /** * The emissive map texture. */ emissiveTexture?: TextureInfo; /** * The emissive color of the material. */ emissiveFactor?: number[]; /** * The alpha rendering mode of the material. */ alphaMode?: 'OPAQUE' | 'MASK' | 'BLEND' | string; /** * The alpha cutoff value of the material. */ alphaCutoff?: number; /** * Specifies whether the material is double sided. */ doubleSided?: boolean; // [k: string]: any; } /** * Geometry to be rendered with the given material. */ export interface MeshPrimitive { /** * A dictionary object, where each key corresponds to mesh attribute semantic and each value is the index of the accessor containing attribute's data. */ attributes: { [k: string]: GLTFId; }; /** * The index of the accessor that contains the indices. */ indices?: GLTFId; /** * The index of the material to apply to this primitive when rendering. */ material?: GLTFId; /** * The type of primitives to render. */ mode?: 0 | 1 | 2 | 3 | 4 | 5 | 6 | number; /** * An array of Morph Targets, each Morph Target is a dictionary mapping attributes (only `POSITION`, `NORMAL`, and `TANGENT` supported) to their deviations in the Morph Target. */ targets?: { [k: string]: GLTFId; }[]; extensions?: any; extras?: any; // [k: string]: any; } /** * A set of primitives to be rendered. A node can contain one mesh. A node's transform places the mesh in the scene. */ export interface Mesh { id?: string; /** * An array of primitives, each defining geometry to be rendered with a material. */ primitives: MeshPrimitive[]; /** * Array of weights to be applied to the Morph Targets. */ weights?: number[]; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * A node in the node hierarchy. When the node contains `skin`, all `mesh.primitives` must contain `JOINTS_0` and `WEIGHTS_0` attributes. A node can have either a `matrix` or any combination of `translation`/`rotation`/`scale` (TRS) properties. TRS properties are converted to matrices and postmultiplied in the `T * R * S` order to compose the transformation matrix; first the scale is applied to the vertices, then the rotation, and then the translation. If none are provided, the transform is the identity. When a node is targeted for animation (referenced by an animation.channel.target), only TRS properties may be present; `matrix` will not be present. */ export interface Node { /** * The index of the camera referenced by this node. */ camera?: GLTFId; /** * The indices of this node's children. */ children?: GLTFId[]; /** * The index of the skin referenced by this node. */ skin?: GLTFId; /** * A floating-point 4x4 transformation matrix stored in column-major order. */ matrix?: number[]; /** * The index of the mesh in this node. */ mesh?: GLTFId; /** * The node's unit quaternion rotation in the order (x, y, z, w), where w is the scalar. */ rotation?: number[]; /** * The node's non-uniform scale, given as the scaling factors along the x, y, and z axes. */ scale?: number[]; /** * The node's translation along the x, y, and z axes. */ translation?: number[]; /** * The weights of the instantiated Morph Target. Number of elements must match number of Morph Targets of used mesh. */ weights?: number[]; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * Texture sampler properties for filtering and wrapping modes. */ export interface Sampler { /** * Magnification filter. */ magFilter?: 9728 | 9729 | number; /** * Minification filter. */ minFilter?: 9728 | 9729 | 9984 | 9985 | 9986 | 9987 | number; /** * s wrapping mode. */ wrapS?: 33071 | 33648 | 10497 | number; /** * t wrapping mode. */ wrapT?: 33071 | 33648 | 10497 | number; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * The root nodes of a scene. */ export interface Scene { /** * The indices of each root node. */ nodes?: GLTFId[]; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * Joints and matrices defining a skin. */ export interface Skin { /** * The index of the accessor containing the floating-point 4x4 inverse-bind matrices. The default is that each matrix is a 4x4 identity matrix, which implies that inverse-bind matrices were pre-applied. */ inverseBindMatrices?: GLTFId; /** * The index of the node used as a skeleton root. When undefined, joints transforms resolve to scene root. */ skeleton?: GLTFId; /** * Indices of skeleton nodes, used as joints in this skin. */ joints: GLTFId[]; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * A texture and its sampler. */ export interface Texture { /** * The index of the sampler used by this texture. When undefined, a sampler with repeat wrapping and auto filtering should be used. */ sampler?: GLTFId; /** * The index of the image used by this texture. */ source?: GLTFId; name?: any; extensions?: any; extras?: any; // [k: string]: any; } /** * The root object for a glTF asset. */ export interface GLTF { /** * Names of glTF extensions used somewhere in this asset. */ extensionsUsed?: string[]; /** * Names of glTF extensions required to properly load this asset. */ extensionsRequired?: string[]; /** * An array of accessors. */ accessors?: Accessor[]; /** * An array of keyframe animations. */ animations?: Animation[]; /** * Metadata about the glTF asset. */ asset: Asset; /** * An array of buffers. */ buffers?: Buffer[]; /** * An array of bufferViews. */ bufferViews?: BufferView[]; /** * An array of cameras. */ cameras?: Camera[]; /** * An array of images. */ images?: Image[]; /** * An array of materials. */ materials?: Material[]; /** * An array of meshes. */ meshes?: Mesh[]; /** * An array of nodes. */ nodes?: Node[]; /** * An array of samplers. */ samplers?: Sampler[]; /** * The index of the default scene. */ scene?: GLTFId; /** * An array of scenes. */ scenes?: Scene[]; /** * An array of skins. */ skins?: Skin[]; /** * An array of textures. */ textures?: Texture[]; extensions?: any; extras?: any; // [k: string]: any; } // GLTF Extensions /* eslint-disable camelcase */ /** * @see https://github.com/KhronosGroup/glTF/tree/master/extensions/1.0/Khronos/KHR_binary_glTF * TODO - this can be used on both images and shaders */ export type GLTF_KHR_binary_glTF = { bufferView: number; // required for images but not shaders mimeType?: string; height?: number; width?: number; extras?: any; }; /** * @see https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_draco_mesh_compression */ export type GLTF_KHR_draco_mesh_compression = { bufferView: GLTFId; attributes: {[name: string]: number}; extras?: any; }; /** * @see https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_texture_basisu */ export type GLTF_KHR_texture_basisu = { source: GLTFId; extras?: any; }; /** * @see https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Vendor/EXT_meshopt_compression * buffer: number; // The index of the buffer with compressed data. ✅ Required * byteOffset integer The offset into the buffer in bytes. Default: 0 * byteLength integer The length of the compressed data in bytes. ✅ Required * byteStride integer The stride, in bytes. ✅ Required * count integer The number of elements. ✅ Required * mode string The compression mode. ✅ Required * filter string The compression filter. Default: "NONE" */ export type GLTF_EXT_meshopt_compression = { buffer: number; byteOffset?: number; byteLength: number; byteStride: number; count: number; mode: 'ATTRIBUTES' | 'TRIANGLES' | 'INDICES'; filter?: 'NONE' | 'OCTAHEDRAL' | 'QUATERNION' | 'EXPONENTIAL'; extras?: any; }; /** * @see https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Vendor/EXT_texture_webp */ export type GLTF_EXT_texture_webp = { source: GLTFId; extras?: any; }; /** * @see https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Vendor/MSFT_texture_dds */ export type GLTF_MSFT_texture_dds = { source: GLTFId; extras?: any; };
the_stack
import { h, render, getCurrentInstance, nodeOps, createApp, shallowReadonly } from '@vue/runtime-test' import { ComponentInternalInstance, ComponentOptions } from '../src/component' describe('component: proxy', () => { test('data', () => { let instance: ComponentInternalInstance let instanceProxy: any const Comp = { data() { return { foo: 1 } }, mounted() { instance = getCurrentInstance()! instanceProxy = this }, render() { return null } } render(h(Comp), nodeOps.createElement('div')) expect(instanceProxy.foo).toBe(1) instanceProxy.foo = 2 expect(instance!.data.foo).toBe(2) }) test('setupState', () => { let instance: ComponentInternalInstance let instanceProxy: any const Comp = { setup() { return { foo: 1 } }, mounted() { instance = getCurrentInstance()! instanceProxy = this }, render() { return null } } render(h(Comp), nodeOps.createElement('div')) expect(instanceProxy.foo).toBe(1) instanceProxy.foo = 2 expect(instance!.setupState.foo).toBe(2) }) test('should not expose non-declared props', () => { let instanceProxy: any const Comp = { setup() { return () => null }, mounted() { instanceProxy = this } } render(h(Comp, { count: 1 }), nodeOps.createElement('div')) expect('count' in instanceProxy).toBe(false) }) test('public properties', async () => { let instance: ComponentInternalInstance let instanceProxy: any const Comp = { setup() { return () => null }, mounted() { instance = getCurrentInstance()! instanceProxy = this } } render(h(Comp), nodeOps.createElement('div')) expect(instanceProxy.$data).toBe(instance!.data) expect(instanceProxy.$props).toBe(shallowReadonly(instance!.props)) expect(instanceProxy.$attrs).toBe(shallowReadonly(instance!.attrs)) expect(instanceProxy.$slots).toBe(shallowReadonly(instance!.slots)) expect(instanceProxy.$refs).toBe(shallowReadonly(instance!.refs)) expect(instanceProxy.$parent).toBe( instance!.parent && instance!.parent.proxy ) expect(instanceProxy.$root).toBe(instance!.root.proxy) expect(instanceProxy.$emit).toBe(instance!.emit) expect(instanceProxy.$el).toBe(instance!.vnode.el) expect(instanceProxy.$options).toBe(instance!.type as ComponentOptions) expect(() => (instanceProxy.$data = {})).toThrow(TypeError) expect(`Attempting to mutate public property "$data"`).toHaveBeenWarned() const nextTickThis = await instanceProxy.$nextTick(function (this: any) { return this }) expect(nextTickThis).toBe(instanceProxy) }) test('user attached properties', async () => { let instance: ComponentInternalInstance let instanceProxy: any const Comp = { setup() { return () => null }, mounted() { instance = getCurrentInstance()! instanceProxy = this } } render(h(Comp), nodeOps.createElement('div')) instanceProxy.foo = 1 expect(instanceProxy.foo).toBe(1) expect(instance!.ctx.foo).toBe(1) // should also allow properties that start with $ const obj = (instanceProxy.$store = {}) expect(instanceProxy.$store).toBe(obj) expect(instance!.ctx.$store).toBe(obj) }) test('globalProperties', () => { let instance: ComponentInternalInstance let instanceProxy: any const Comp = { setup() { return () => null }, mounted() { instance = getCurrentInstance()! instanceProxy = this } } const app = createApp(Comp) app.config.globalProperties.foo = 1 app.mount(nodeOps.createElement('div')) expect(instanceProxy.foo).toBe(1) // set should overwrite globalProperties with local instanceProxy.foo = 2 // expect(instanceProxy.foo).toBe(2) expect(instance!.ctx.foo).toBe(2) // should not affect global expect(app.config.globalProperties.foo).toBe(1) }) test('has check', () => { let instanceProxy: any const Comp = { render() {}, props: { msg: String }, data() { return { foo: 0 } }, setup() { return { bar: 1 } }, mounted() { instanceProxy = this } } const app = createApp(Comp, { msg: 'hello' }) app.config.globalProperties.global = 1 app.mount(nodeOps.createElement('div')) // props expect('msg' in instanceProxy).toBe(true) // data expect('foo' in instanceProxy).toBe(true) // ctx expect('bar' in instanceProxy).toBe(true) // public properties expect('$el' in instanceProxy).toBe(true) // global properties expect('global' in instanceProxy).toBe(true) // non-existent expect('$foobar' in instanceProxy).toBe(false) expect('baz' in instanceProxy).toBe(false) // #4962 triggering getter should not cause non-existent property to // pass the has check instanceProxy.baz expect('baz' in instanceProxy).toBe(false) // set non-existent (goes into proxyTarget sink) instanceProxy.baz = 1 expect('baz' in instanceProxy).toBe(true) // dev mode ownKeys check for console inspection // should only expose own keys expect(Object.keys(instanceProxy)).toMatchObject([ 'msg', 'bar', 'foo', 'baz' ]) }) test('allow updating proxy with Object.defineProperty', () => { let instanceProxy: any const Comp = { render() {}, setup() { return { isDisplayed: true } }, mounted() { instanceProxy = this } } const app = createApp(Comp) app.mount(nodeOps.createElement('div')) Object.defineProperty(instanceProxy, 'isDisplayed', { value: false }) expect(instanceProxy.isDisplayed).toBe(false) Object.defineProperty(instanceProxy, 'isDisplayed', { value: true }) expect(instanceProxy.isDisplayed).toBe(true) Object.defineProperty(instanceProxy, 'isDisplayed', { get() { return false } }) expect(instanceProxy.isDisplayed).toBe(false) Object.defineProperty(instanceProxy, 'isDisplayed', { get() { return true } }) expect(instanceProxy.isDisplayed).toBe(true) }) test('allow jest spying on proxy methods with Object.defineProperty', () => { // #5417 let instanceProxy: any const Comp = { render() {}, setup() { return { toggle() { return 'a' } } }, mounted() { instanceProxy = this } } const app = createApp(Comp) app.mount(nodeOps.createElement('div')) // access 'toggle' to ensure key is cached const v1 = instanceProxy.toggle() expect(v1).toEqual('a') // reconfigure "toggle" to be getter based. let getCalledTimes = 0 Object.defineProperty(instanceProxy, 'toggle', { get() { getCalledTimes++ return () => 'b' } }) // getter should not be evaluated on initial definition expect(getCalledTimes).toEqual(0) // invoke "toggle" after "defineProperty" const v2 = instanceProxy.toggle() expect(v2).toEqual('b') expect(getCalledTimes).toEqual(1) // expect toggle getter not to be cached. it can't be instanceProxy.toggle() expect(getCalledTimes).toEqual(2) // attaching jest spy, triggers the getter once, cache it and override the property. // also uses Object.defineProperty const spy = jest.spyOn(instanceProxy, 'toggle') expect(getCalledTimes).toEqual(3) // expect getter to not evaluate the jest spy caches its value const v3 = instanceProxy.toggle() expect(v3).toEqual('b') expect(spy).toHaveBeenCalled() expect(getCalledTimes).toEqual(3) }) test('defineProperty on proxy property with value descriptor', () => { // #5417 let instanceProxy: any const Comp = { render() {}, setup() { return { toggle: 'a' } }, mounted() { instanceProxy = this } } const app = createApp(Comp) app.mount(nodeOps.createElement('div')) const v1 = instanceProxy.toggle expect(v1).toEqual('a') Object.defineProperty(instanceProxy, 'toggle', { value: 'b' }) const v2 = instanceProxy.toggle expect(v2).toEqual('b') // expect null to be a settable value Object.defineProperty(instanceProxy, 'toggle', { value: null }) const v3 = instanceProxy.toggle expect(v3).toBeNull() }) test('defineProperty on public instance proxy should work with SETUP,DATA,CONTEXT,PROPS', () => { // #5417 let instanceProxy: any const Comp = { props: ['fromProp'], data() { return { name: 'data.name' } }, computed: { greet() { return 'Hi ' + (this as any).name } }, render() {}, setup() { return { fromSetup: true } }, mounted() { instanceProxy = this } } const app = createApp(Comp, { fromProp: true }) app.mount(nodeOps.createElement('div')) expect(instanceProxy.greet).toEqual('Hi data.name') // define property on data Object.defineProperty(instanceProxy, 'name', { get() { return 'getter.name' } }) // computed is same still cached expect(instanceProxy.greet).toEqual('Hi data.name') // trigger computed instanceProxy.name = '' // expect "greet" to evaluated and use name from context getter expect(instanceProxy.greet).toEqual('Hi getter.name') // defineProperty on computed ( context ) Object.defineProperty(instanceProxy, 'greet', { get() { return 'Hi greet.getter.computed' } }) expect(instanceProxy.greet).toEqual('Hi greet.getter.computed') // defineProperty on setupState expect(instanceProxy.fromSetup).toBe(true) Object.defineProperty(instanceProxy, 'fromSetup', { get() { return false } }) expect(instanceProxy.fromSetup).toBe(false) // defineProperty on Props expect(instanceProxy.fromProp).toBe(true) Object.defineProperty(instanceProxy, 'fromProp', { get() { return false } }) expect(instanceProxy.fromProp).toBe(false) }) // #864 test('should not warn declared but absent props', () => { const Comp = { props: ['test'], render(this: any) { return this.test } } render(h(Comp), nodeOps.createElement('div')) expect( `was accessed during render but is not defined` ).not.toHaveBeenWarned() }) test('should allow symbol to access on render', () => { const Comp = { render() { if ((this as any)[Symbol.unscopables]) { return '1' } return '2' } } const app = createApp(Comp) app.mount(nodeOps.createElement('div')) expect( `Property ${JSON.stringify( Symbol.unscopables )} was accessed during render ` + `but is not defined on instance.` ).toHaveBeenWarned() }) })
the_stack
import http from "http"; import { URL } from "url"; import { sha256 } from "js-sha256"; import express from "express"; import bodyParser from "body-parser"; import { Report, Trace } from "apollo-engine-reporting-protobuf"; import { GraphQLSchema, GraphQLObjectType, GraphQLString, GraphQLError, ValidationContext, FieldDefinitionNode, getIntrospectionQuery } from "graphql"; import { PubSub } from "graphql-subscriptions"; import { SubscriptionClient } from "subscriptions-transport-ws"; import WebSocket from "ws"; import { execute } from "apollo-link"; import { createHttpLink } from "apollo-link-http"; import { createPersistedQueryLink as createPersistedQuery, VERSION } from "apollo-link-persisted-queries"; import { createApolloFetch, ApolloFetch, GraphQLRequest, ParsedResponse } from "apollo-fetch"; import { AuthenticationError, UserInputError, gql, Config, ApolloServerBase, PluginDefinition, GraphQLService, GraphQLExecutor } from "apollo-server-core"; import { Headers } from "apollo-server-env"; import { GraphQLExtension, GraphQLResponse } from "graphql-extensions"; import { TracingFormat } from "apollo-tracing"; import ApolloServerPluginResponseCache from "apollo-server-plugin-response-cache"; import { GraphQLRequestContext } from "apollo-server-types"; import { EngineReportingOptions } from "apollo-engine-reporting"; import { mockDate, unmockDate, advanceTimeBy } from "../../__mocks__/date"; export function createServerInfo<AS extends ApolloServerBase>(server: AS, httpServer: http.Server): ServerInfo<AS> { const serverInfo: any = { ...httpServer.address(), server, httpServer }; // Convert IPs which mean "any address" (IPv4 or IPv6) into localhost // corresponding loopback ip. Note that the url field we're setting is // primarily for consumption by our test suite. If this heuristic is // wrong for your use case, explicitly specify a frontend host (in the // `frontends.host` field in your engine config, or in the `host` // option to ApolloServer.listen). let hostForUrl = serverInfo.address; if (serverInfo.address === `` || serverInfo.address === `::`) { hostForUrl = `localhost`; } serverInfo.url = require(`url`).format({ protocol: `http`, hostname: hostForUrl, port: serverInfo.port, pathname: server.graphqlPath }); return serverInfo; } const INTROSPECTION_QUERY = ` { __schema { directives { name } } } `; const TEST_STRING_QUERY = ` { testString } `; const queryType = new GraphQLObjectType({ name: `QueryType`, fields: { testString: { type: GraphQLString, resolve() { return `test string`; } } } }); const schema = new GraphQLSchema({ query: queryType }); const makeGatewayMock = ({ optionsSpy = _options => {}, unsubscribeSpy = () => {}, executor = () => ({}) }: { optionsSpy?: (_options: any) => void; unsubscribeSpy?: () => void; executor?: GraphQLExecutor; } = {}) => { const eventuallyAssigned = { resolveLoad: null as unknown as ({ schema, executor }) => void, rejectLoad: null as unknown as (err: Error) => void, triggerSchemaChange: null as unknown as (newSchema) => void }; const mockedLoadResults = new Promise<{ schema: GraphQLSchema; executor: GraphQLExecutor; }>((resolve, reject) => { eventuallyAssigned.resolveLoad = ({ schema, executor }) => { resolve({ schema, executor }); }; eventuallyAssigned.rejectLoad = (err: Error) => { reject(err); }; }); const mockedGateway: GraphQLService = { executor, load: options => { optionsSpy(options); return mockedLoadResults; }, onSchemaChange: callback => { eventuallyAssigned.triggerSchemaChange = callback; return unsubscribeSpy; } }; return { gateway: mockedGateway, triggers: eventuallyAssigned }; }; export interface ServerInfo<AS extends ApolloServerBase> { address: string; family: string; url: string; port: number | string; server: AS; httpServer: http.Server; } export interface CreateServerFunc<AS extends ApolloServerBase> { (config: Config): Promise<ServerInfo<AS>>; } export interface StopServerFunc { (): Promise<void>; } export function testApolloServer<AS extends ApolloServerBase>( createApolloServer: CreateServerFunc<AS>, stopServer: StopServerFunc ) { describe(`ApolloServer`, () => { afterEach(stopServer); describe(`constructor`, () => { describe(`validation rules`, () => { it(`accepts additional rules`, async () => { const NoTestString = (context: ValidationContext) => ({ Field(node: FieldDefinitionNode) { if (node.name.value === `testString`) { context.reportError(new GraphQLError(`Not allowed to use`, [node])); } } }); const formatError = jest.fn(error => { expect(error instanceof Error).toBe(true); return error; }); const { url: uri } = await createApolloServer({ schema, validationRules: [NoTestString], introspection: false, formatError }); const apolloFetch = createApolloFetch({ uri }); const introspectionResult = await apolloFetch({ query: INTROSPECTION_QUERY }); expect(introspectionResult.data).toBeUndefined(); expect(introspectionResult.errors).toBeDefined(); expect(introspectionResult.errors[0].message).toMatch(/introspection/); expect(formatError.mock.calls).toHaveLength(introspectionResult.errors.length); const result = await apolloFetch({ query: TEST_STRING_QUERY }); expect(result.data).toBeUndefined(); expect(result.errors).toBeDefined(); expect(result.errors[0].message).toMatch(/Not allowed/); expect(formatError.mock.calls).toHaveLength(introspectionResult.errors.length + result.errors.length); }); it(`allows introspection by default`, async () => { const nodeEnv = process.env.NODE_ENV; delete process.env.NODE_ENV; const { url: uri } = await createApolloServer({ schema }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: INTROSPECTION_QUERY }); expect(result.data).toBeDefined(); expect(result.errors).toBeUndefined(); process.env.NODE_ENV = nodeEnv; }); it(`prevents introspection by default during production`, async () => { const nodeEnv = process.env.NODE_ENV; process.env.NODE_ENV = `production`; const { url: uri } = await createApolloServer({ schema }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: INTROSPECTION_QUERY }); expect(result.data).toBeUndefined(); expect(result.errors).toBeDefined(); expect(result.errors).toHaveLength(1); expect(result.errors[0].extensions.code).toEqual(`GRAPHQL_VALIDATION_FAILED`); process.env.NODE_ENV = nodeEnv; }); it(`allows introspection to be enabled explicitly`, async () => { const nodeEnv = process.env.NODE_ENV; process.env.NODE_ENV = `production`; const { url: uri } = await createApolloServer({ schema, introspection: true }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: INTROSPECTION_QUERY }); expect(result.data).toBeDefined(); expect(result.errors).toBeUndefined(); process.env.NODE_ENV = nodeEnv; }); it(`prohibits providing a gateway in addition to schema/typedefs/resolvers`, async () => { const { gateway } = makeGatewayMock(); const incompatibleArgsSpy = jest.fn(); await createApolloServer({ gateway, schema }).catch(err => incompatibleArgsSpy(err.message)); expect(incompatibleArgsSpy.mock.calls[0][0]).toMatch(/Cannot define both/); await createApolloServer({ gateway, modules: {} as any }).catch(err => incompatibleArgsSpy(err.message)); expect(incompatibleArgsSpy.mock.calls[1][0]).toMatch(/Cannot define both/); await createApolloServer({ gateway, typeDefs: {} as any }).catch(err => incompatibleArgsSpy(err.message)); expect(incompatibleArgsSpy.mock.calls[2][0]).toMatch(/Cannot define both/); }); it(`prohibits providing a gateway in addition to subscription options`, async () => { const { gateway } = makeGatewayMock(); const expectedError = `Subscriptions are not yet compatible with the gateway`; const incompatibleArgsSpy = jest.fn(); await createApolloServer({ gateway, subscriptions: `pathToSubscriptions` }).catch(err => incompatibleArgsSpy(err.message)); expect(incompatibleArgsSpy.mock.calls[0][0]).toMatch(expectedError); await createApolloServer({ gateway, subscriptions: true as any }).catch(err => incompatibleArgsSpy(err.message)); expect(incompatibleArgsSpy.mock.calls[1][0]).toMatch(expectedError); await createApolloServer({ gateway, subscriptions: { path: `` } as any }).catch(err => incompatibleArgsSpy(err.message)); expect(incompatibleArgsSpy.mock.calls[2][0]).toMatch(expectedError); await createApolloServer({ gateway }).catch(err => incompatibleArgsSpy(err.message)); expect(incompatibleArgsSpy.mock.calls[3][0]).toMatch(expectedError); }); }); describe(`schema creation`, () => { it(`accepts typeDefs and resolvers`, async () => { const typeDefs = gql` type Query { hello: String } `; const resolvers = { Query: { hello: () => `hi` } }; const { url: uri } = await createApolloServer({ typeDefs, resolvers }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{hello}` }); expect(result.data).toEqual({ hello: `hi` }); expect(result.errors).toBeUndefined(); }); it(`accepts a gateway's schema and calls its executor`, async () => { const executor = jest.fn(); executor.mockReturnValue(Promise.resolve({ data: { testString: `hi - but federated!` } })); const { gateway, triggers } = makeGatewayMock({ executor }); triggers.resolveLoad({ schema, executor }); const { url: uri } = await createApolloServer({ gateway, subscriptions: false }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{testString}` }); expect(result.data).toEqual({ testString: `hi - but federated!` }); expect(result.errors).toBeUndefined(); expect(executor).toHaveBeenCalled(); }); it(`rejected load promise acts as an error boundary`, async () => { const executor = jest.fn(); executor.mockResolvedValueOnce({ data: { testString: `should not get this` } }); executor.mockRejectedValueOnce({ errors: [{ errorWhichShouldNot: `ever be triggered` }] }); const consoleErrorSpy = jest.spyOn(console, `error`).mockImplementation(); const { gateway, triggers } = makeGatewayMock({ executor }); triggers.rejectLoad(new Error(`load error which should be masked`)); const { url: uri } = await createApolloServer({ gateway, subscriptions: false }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{testString}` }); expect(result.data).toBeUndefined(); expect(result.errors).toContainEqual( expect.objectContaining({ extensions: expect.objectContaining({ code: `INTERNAL_SERVER_ERROR` }), message: `This data graph is missing a valid configuration. ` + `More details may be available in the server logs.` }) ); expect(consoleErrorSpy).toHaveBeenCalledWith( `This data graph is missing a valid configuration. ` + `load error which should be masked` ); expect(executor).not.toHaveBeenCalled(); }); it(`uses schema over resolvers + typeDefs`, async () => { const typeDefs = gql` type Query { hello: String } `; const resolvers = { Query: { hello: () => `hi` } }; const { url: uri } = await createApolloServer({ typeDefs, resolvers, schema }); const apolloFetch = createApolloFetch({ uri }); const typeDefResult = await apolloFetch({ query: `{hello}` }); expect(typeDefResult.data).toBeUndefined(); expect(typeDefResult.errors).toBeDefined(); const result = await apolloFetch({ query: `{testString}` }); expect(result.data).toEqual({ testString: `test string` }); expect(result.errors).toBeUndefined(); }); it(`allows mocks as boolean`, async () => { const typeDefs = gql` type Query { hello: String } `; const { url: uri } = await createApolloServer({ typeDefs, mocks: true }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{hello}` }); expect(result.data).toEqual({ hello: `Hello World` }); expect(result.errors).toBeUndefined(); }); it(`allows mocks as an object`, async () => { const typeDefs = gql` type Query { hello: String } `; const { url: uri } = await createApolloServer({ typeDefs, mocks: { String: () => `mock city` } }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{hello}` }); expect(result.data).toEqual({ hello: `mock city` }); expect(result.errors).toBeUndefined(); }); it(`allows mocks as an object without overriding the existing resolvers`, async () => { const typeDefs = gql` type User { first: String last: String } type Query { user: User } `; const resolvers = { Query: { user: () => ({ first: `James`, last: `Heinlen` }) } }; const { url: uri } = await createApolloServer({ typeDefs, resolvers, mocks: { User: () => ({ last: () => `mock city` }) } }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{user{first last}}` }); expect(result.data).toEqual({ user: { first: `Hello World`, last: `mock city` } }); expect(result.errors).toBeUndefined(); }); // Need to fix bug in graphql-tools to enable mocks to override the existing resolvers it.skip(`allows mocks as an object with overriding the existing resolvers`, async () => { const typeDefs = gql` type User { first: String last: String } type Query { user: User } `; const resolvers = { Query: { user: () => ({ first: `James`, last: `Heinlen` }) } }; const { url: uri } = await createApolloServer({ typeDefs, resolvers, mocks: { User: () => ({ last: () => `mock city` }) }, mockEntireSchema: false }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{user{first last}}` }); expect(result.data).toEqual({ user: { first: `James`, last: `mock city` } }); expect(result.errors).toBeUndefined(); }); }); }); describe(`Plugins`, () => { let apolloFetch: ApolloFetch; let apolloFetchResponse: ParsedResponse; let serverInstance: ApolloServerBase; const setupApolloServerAndFetchPairForPlugins = async (plugins: PluginDefinition[] = []) => { const { url: uri, server } = await createApolloServer({ context: { customContext: true }, typeDefs: gql` type Query { justAField: String } `, plugins }); serverInstance = server; apolloFetch = createApolloFetch({ uri }) // Store the response so we can inspect it. .useAfter(({ response }, next) => { apolloFetchResponse = response; next(); }); }; // Test for https://github.com/apollographql/apollo-server/issues/4170 it(`works when using executeOperation`, async () => { const encounteredFields: any = []; const encounteredContext: any = []; await setupApolloServerAndFetchPairForPlugins([ { requestDidStart: () => ({ executionDidStart: () => ({ willResolveField({ info, context }) { encounteredFields.push(info.path); encounteredContext.push(context); } }) }) } ]); // The bug in 4170 (linked above) was occurring because of a failure // to clone context in `executeOperation` in the same way that occurs // in `runHttpQuery` prior to entering the request pipeline. That // resulted in the inability to attach a symbol to the context because // the symbol already existed on the context. Of course, a context // is only created after the first invocation, so we'll run this twice // to encounter the error where it was in the way when we tried to set // it the second time. While we could have tested for the property // before assigning to it, that is not the contract we have with the // context, which should have been copied on `executeOperation` (which // is meant to be used by testing, currently). await serverInstance.executeOperation({ query: `{ justAField }` }); await serverInstance.executeOperation({ query: `{ justAField }` }); expect(encounteredFields).toStrictEqual([ { key: `justAField`, prev: undefined }, // eslint-disable-line { key: `justAField`, prev: undefined } // eslint-disable-line ]); // This bit is just to ensure that nobody removes `context` from the // `setupApolloServerAndFetchPairForPlugins` thinking it's unimportant. // When a custom context is not provided, a new one is initialized // on each request. expect(encounteredContext).toStrictEqual([ expect.objectContaining({ customContext: true }), expect.objectContaining({ customContext: true }) ]); }); it(`returns correct status code for a normal operation`, async () => { await setupApolloServerAndFetchPairForPlugins(); const result = await apolloFetch({ query: `{ justAField }` }); expect(result.errors).toBeUndefined(); expect(apolloFetchResponse.status).toEqual(200); }); it(`allows setting a custom status code for an error`, async () => { await setupApolloServerAndFetchPairForPlugins([ { requestDidStart() { return { didResolveOperation() { throw new Error(`known_error`); }, willSendResponse({ response: { http, errors } }) { if (errors[0].message === `known_error`) { http.status = 403; } } }; } } ]); const result = await apolloFetch({ query: `{ justAField }` }); expect(result.errors).toBeDefined(); expect(apolloFetchResponse.status).toEqual(403); }); it(`preserves user-added "extensions" in the response when parsing errors occur`, async () => { await setupApolloServerAndFetchPairForPlugins([ { requestDidStart() { return { willSendResponse({ response }) { response.extensions = { myExtension: true }; } }; } } ]); const result = await apolloFetch({ query: `{ 🦠` }); expect(result.errors).toBeDefined(); expect(result.extensions).toEqual( expect.objectContaining({ myExtension: true }) ); }); it(`preserves user-added "extensions" in the response when validation errors occur`, async () => { await setupApolloServerAndFetchPairForPlugins([ { requestDidStart() { return { willSendResponse({ response }) { response.extensions = { myExtension: true }; } }; } } ]); const result = await apolloFetch({ query: `{ missingFieldWhichWillNotValidate }` }); expect(result.errors).toBeDefined(); expect(result.extensions).toEqual( expect.objectContaining({ myExtension: true }) ); }); }); describe(`formatError`, () => { it(`wraps thrown error from validation rules`, async () => { const throwError = jest.fn(() => { throw new Error(`nope`); }); const formatError = jest.fn(error => { expect(error instanceof Error).toBe(true); expect(error.constructor.name).toEqual(`Error`); return error; }); const { url: uri } = await createApolloServer({ schema, validationRules: [throwError], introspection: true, formatError }); const apolloFetch = createApolloFetch({ uri }); const introspectionResult = await apolloFetch({ query: INTROSPECTION_QUERY }); expect(introspectionResult.data).toBeUndefined(); expect(introspectionResult.errors).toBeDefined(); expect(formatError).toHaveBeenCalledTimes(1); expect(throwError).toHaveBeenCalledTimes(1); const result = await apolloFetch({ query: TEST_STRING_QUERY }); expect(result.data).toBeUndefined(); expect(result.errors).toBeDefined(); expect(formatError).toHaveBeenCalledTimes(2); expect(throwError).toHaveBeenCalledTimes(2); }); it(`works with errors similar to GraphQL errors, such as yup`, async () => { // https://npm.im/yup is a package that produces a particular type of // error that we test compatibility with. This test was first brought // with https://github.com/apollographql/apollo-server/pull/1288. We // used to use the actual `yup` package to generate the error, but we // don't need to actually bundle that dependency just to test // compatibility with that particular error shape. To be honest, it's // not clear from the original PR which attribute of this error need be // mocked, but for the sake not not breaking anything, all of yup's // error properties have been reproduced here. const throwError = jest.fn(async () => { // Intentionally `any` because this is a custom Error class with // various custom properties (like `value` and `params`). const yuppieError: any = new Error(`email must be a valid email`); yuppieError.name = `ValidationError`; // Set `message` to enumerable, which `yup` does and `Error` doesn't. Object.defineProperty(yuppieError, `message`, { enumerable: true }); // Set other properties which `yup` sets. yuppieError.path = `email`; yuppieError.type = undefined; // eslint-disable-line yuppieError.value = { email: `invalid-email` }; yuppieError.errors = [`email must be a valid email`]; yuppieError.inner = []; yuppieError.params = { path: `email`, value: `invalid-email`, originalValue: `invalid-email`, label: undefined, // eslint-disable-line regex: /@/ }; // This stack is fake, but roughly what `yup` generates! yuppieError.stack = [ `ValidationError: email must be a valid email`, ` at createError (yup/lib/util/createValidation.js:64:35)`, ` at yup/lib/util/createValidation.js:113:108`, ` at process._tickCallback (internal/process/next_tick.js:68:7)` ].join(`\n`); throw yuppieError; }); const formatError = jest.fn(error => { expect(error instanceof Error).toBe(true); expect(error.extensions.code).toEqual(`INTERNAL_SERVER_ERROR`); expect(error.extensions.exception.name).toEqual(`ValidationError`); expect(error.extensions.exception.message).toBeDefined(); const inputError = new UserInputError(`User Input Error`); return { message: inputError.message, extensions: inputError.extensions }; }); const { url: uri } = await createApolloServer({ typeDefs: gql` type Query { fieldWhichWillError: String } `, resolvers: { Query: { fieldWhichWillError: () => throwError() } }, introspection: true, debug: true, formatError }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toEqual({ fieldWhichWillError: null }); expect(result.errors).toBeDefined(); expect(result.errors[0].extensions.code).toEqual(`BAD_USER_INPUT`); expect(result.errors[0].message).toEqual(`User Input Error`); expect(formatError).toHaveBeenCalledTimes(1); expect(throwError).toHaveBeenCalledTimes(1); }); }); describe(`lifecycle`, () => { describe(`for Apollo Graph Manager`, () => { let nodeEnv: string; let engineServer: EngineMockServer; class EngineMockServer { private app: express.Application; private server: http.Server; private reports: Report[] = []; public readonly promiseOfReports: Promise<Report[]>; constructor() { let reportResolver: (reports: Report[]) => void; this.promiseOfReports = new Promise<Report[]>(resolve => { reportResolver = resolve; }); this.app = express(); this.app.use((req, _res, next) => { // body parser requires a content-type req.headers[`content-type`] = `text/plain`; next(); }); this.app.use( bodyParser.raw({ inflate: true, type: `*/*` }) ); this.app.use((req, res) => { const report = Report.decode(req.body); this.reports.push(report); res.end(); // Resolve any outstanding Promises with our new report data. reportResolver(this.reports); }); } async listen(): Promise<http.Server> { return await new Promise(resolve => { const server = (this.server = this.app.listen( 0, // Intentionally IPv4. `127.0.0.1`, () => { resolve(server); } )); }); } async stop(): Promise<void> { if (!this.server) { return; } return new Promise(resolve => { this.server && this.server.close(() => resolve()); }); } public engineOptions(): Partial<EngineReportingOptions<any>> { return { tracesEndpointUrl: this.getUrl() }; } private getUrl(): string { if (!this.server) { throw new Error(`must listen before getting URL`); } const { family, address, port } = this.server.address(); if (family !== `IPv4`) { throw new Error(`The family was unexpectedly ${family}.`); } return new URL(`http://${address}:${port}`).toString(); } } beforeEach(async () => { nodeEnv = process.env.NODE_ENV; delete process.env.NODE_ENV; engineServer = new EngineMockServer(); return await engineServer.listen(); }); afterEach(done => { process.env.NODE_ENV = nodeEnv; (engineServer.stop() || Promise.resolve()).then(done); }); describe(`extensions`, () => { // While it's been broken down quite a bit, this test is still // overloaded and is a prime candidate for de-composition! it(`calls formatError and other overloaded client identity tests`, async () => { const throwError = jest.fn(() => { throw new Error(`nope`); }); const validationRule = jest.fn(() => { // formatError should be called after validation expect(formatError).not.toBeCalled(); // extension should be called after validation expect(willSendResponseInExtension).not.toBeCalled(); return true; }); const willSendResponseInExtension = jest.fn(); const formatError = jest.fn(error => { try { expect(error).toBeInstanceOf(Error); // extension should be called before formatError expect(willSendResponseInExtension).toHaveBeenCalledTimes(1); // validationRules should be called before formatError expect(validationRule).toHaveBeenCalledTimes(1); } finally { error.message = `masked`; return error; } }); class Extension<TContext = any> extends GraphQLExtension { willSendResponse(o: { graphqlResponse: GraphQLResponse; context: TContext }) { expect(o.graphqlResponse.errors).toHaveLength(1); // formatError should be called before willSendResponse expect(formatError).toHaveBeenCalledTimes(1); // validationRule should be called before willSendResponse expect(validationRule).toHaveBeenCalledTimes(1); willSendResponseInExtension(); } } const { url: uri } = await createApolloServer({ typeDefs: gql` type Query { fieldWhichWillError: String } `, resolvers: { Query: { fieldWhichWillError: () => { throwError(); } } }, validationRules: [validationRule], extensions: [() => new Extension()], engine: { ...engineServer.engineOptions(), apiKey: `service:my-app:secret`, maxUncompressedReportSize: 1, generateClientInfo: () => ({ clientName: `testing`, clientReferenceId: `1234`, clientVersion: `v1.0.1` }) }, formatError, debug: true }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toEqual({ fieldWhichWillError: null }); expect(result.errors).toBeDefined(); expect(result.errors[0].message).toEqual(`masked`); expect(validationRule).toHaveBeenCalledTimes(1); expect(throwError).toHaveBeenCalledTimes(1); expect(formatError).toHaveBeenCalledTimes(1); expect(willSendResponseInExtension).toHaveBeenCalledTimes(1); const reports = await engineServer.promiseOfReports; expect(reports).toHaveLength(1); const trace = Object.values(reports[0].tracesPerQuery)[0].trace[0]; expect(trace.clientReferenceId).toMatch(/1234/); expect(trace.clientName).toMatch(/testing/); expect(trace.clientVersion).toEqual(`v1.0.1`); expect(trace.root!.child![0].error![0].message).toMatch(/nope/); expect(trace.root!.child![0].error![0].message).not.toMatch(/masked/); }); }); describe(`traces`, () => { let throwError: jest.Mock; let apolloFetch: ApolloFetch; beforeEach(async () => { throwError = jest.fn(); }); const setupApolloServerAndFetchPair = async ( engineOptions: Partial<EngineReportingOptions<any>> = {}, constructorOptions: Partial<CreateServerFunc<AS>> = {} ) => { const { url: uri } = await createApolloServer({ typeDefs: gql` type Query { fieldWhichWillError: String justAField: String } `, resolvers: { Query: { fieldWhichWillError: () => { throwError(); }, justAField: () => `a string` } }, engine: { ...engineServer.engineOptions(), apiKey: `service:my-app:secret`, maxUncompressedReportSize: 1, ...engineOptions }, debug: true, ...constructorOptions }); apolloFetch = createApolloFetch({ uri }); }; it(`does not expose stack`, async () => { throwError.mockImplementationOnce(() => { throw new Error(`how do I stack up?`); }); await setupApolloServerAndFetchPair(); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toEqual({ fieldWhichWillError: null }); expect(result.errors).toBeDefined(); // The original error message should still be sent to the client. expect(result.errors[0].message).toEqual(`how do I stack up?`); expect(throwError).toHaveBeenCalledTimes(1); const reports = await engineServer.promiseOfReports; expect(reports).toHaveLength(1); const trace = Object.values(reports[0].tracesPerQuery)[0].trace[0]; // There should be no error at the root, our error is a child. expect(trace.root.error).toStrictEqual([]); // There should only be one child. expect(trace.root.child).toHaveLength(1); // The error should not have the stack in it. expect(trace.root.child[0].error[0]).not.toHaveProperty(`stack`); expect(JSON.parse(trace.root.child[0].error[0].json)).not.toHaveProperty(`stack`); }); it(`sets the trace key to operationName when it is defined`, async () => { await setupApolloServerAndFetchPair(); const result = await apolloFetch({ query: `query AnOperationName {justAField}` }); expect(result.data).toEqual({ justAField: `a string` }); expect(result.errors).not.toBeDefined(); const reports = await engineServer.promiseOfReports; expect(reports).toHaveLength(1); expect(Object.keys(reports[0].tracesPerQuery)[0]).toMatch(/^# AnOperationName\n/); }); it(`sets the trace key to "-" when operationName is undefined`, async () => { await setupApolloServerAndFetchPair(); const result = await apolloFetch({ query: `{justAField}` }); expect(result.data).toEqual({ justAField: `a string` }); expect(result.errors).not.toBeDefined(); const reports = await engineServer.promiseOfReports; expect(reports).toHaveLength(1); expect(Object.keys(reports[0].tracesPerQuery)[0]).toMatch(/^# -\n/); }); it("doesn't resort to query body signature on `didResolveOperation` error", async () => { await setupApolloServerAndFetchPair(Object.create(null), { plugins: [ { requestDidStart() { return { didResolveOperation() { throw new Error(`known_error`); } }; } } ] }); const result = await apolloFetch({ query: `{ aliasedField: justAField }` }); expect(result.errors).toBeDefined(); expect(result.errors[0].extensions).toBeDefined(); expect(result.errors[0].message).toEqual(`known_error`); const reports = await engineServer.promiseOfReports; expect(reports).toHaveLength(1); expect(Object.keys(reports[0].tracesPerQuery)[0]).not.toEqual(`# -\n{ aliasedField: justAField }`); }); it(`doesn't internal server error on an APQ`, async () => { await setupApolloServerAndFetchPair(); const TEST_STRING_QUERY = ` { onlyForThisApqTest${Math.random().toString().split(`.`)[1]}: justAField } `; const hash = sha256.create().update(TEST_STRING_QUERY).hex(); const result = await apolloFetch({ // @ts-ignore The `ApolloFetch` types don't allow `extensions` to be // passed in, in the same way as `variables`, with a request. This // is a typing omission in `apollo-fetch`, as can be seen here: // https://git.io/Jeb63 This will all be going away soon (and // that package is already archived and deprecated. extensions: { persistedQuery: { version: VERSION, sha256Hash: hash } } }); // Having a persisted query not found error is fine. expect(result.errors).toContainEqual( expect.objectContaining({ extensions: expect.objectContaining({ code: `PERSISTED_QUERY_NOT_FOUND` }) }) ); // However, having an internal server error is not okay! expect(result.errors).not.toContainEqual( expect.objectContaining({ extensions: expect.objectContaining({ code: `INTERNAL_SERVER_ERROR` }) }) ); }); describe(`error munging`, () => { describe(`rewriteError`, () => { it(`new error`, async () => { throwError.mockImplementationOnce(() => { throw new Error(`rewriteError nope`); }); await setupApolloServerAndFetchPair({ rewriteError: () => new GraphQLError(`rewritten as a new error`) }); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toEqual({ fieldWhichWillError: null }); expect(result.errors).toBeDefined(); // The original error message should be sent to the client. expect(result.errors[0].message).toEqual(`rewriteError nope`); expect(throwError).toHaveBeenCalledTimes(1); const reports = await engineServer.promiseOfReports; expect(reports).toHaveLength(1); const trace = Object.values(reports[0].tracesPerQuery)[0].trace[0]; // There should be no error at the root, our error is a child. expect(trace.root.error).toStrictEqual([]); // There should only be one child. expect(trace.root.child).toHaveLength(1); // The child should maintain the path, but have its message // rewritten. expect(trace.root.child[0].error).toMatchObject([ { json: `{"message":"rewritten as a new error","locations":[{"line":1,"column":2}],"path":["fieldWhichWillError"]}`, message: `rewritten as a new error`, location: [{ column: 2, line: 1 }] } ]); }); it(`modified error`, async () => { throwError.mockImplementationOnce(() => { throw new Error(`rewriteError mod nope`); }); await setupApolloServerAndFetchPair({ rewriteError: err => { err.message = `rewritten as a modified error`; return err; } }); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toEqual({ fieldWhichWillError: null }); expect(result.errors).toBeDefined(); expect(result.errors[0].message).toEqual(`rewriteError mod nope`); expect(throwError).toHaveBeenCalledTimes(1); const reports = await engineServer.promiseOfReports; expect(reports).toHaveLength(1); const trace = Object.values(reports[0].tracesPerQuery)[0].trace[0]; // There should be no error at the root, our error is a child. expect(trace.root.error).toStrictEqual([]); // There should only be one child. expect(trace.root.child).toHaveLength(1); // The child should maintain the path, but have its message // rewritten. expect(trace.root.child[0].error).toMatchObject([ { json: `{"message":"rewritten as a modified error","locations":[{"line":1,"column":2}],"path":["fieldWhichWillError"]}`, message: `rewritten as a modified error`, location: [{ column: 2, line: 1 }] } ]); }); it(`nulled error`, async () => { throwError.mockImplementationOnce(() => { throw new Error(`rewriteError null nope`); }); await setupApolloServerAndFetchPair({ rewriteError: () => null }); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toEqual({ fieldWhichWillError: null }); expect(result.errors).toBeDefined(); expect(result.errors[0].message).toEqual(`rewriteError null nope`); expect(throwError).toHaveBeenCalledTimes(1); const reports = await engineServer.promiseOfReports; expect(reports).toHaveLength(1); const trace = Object.values(reports[0].tracesPerQuery)[0].trace[0]; // There should be no error at the root, our error is a child. expect(trace.root.error).toStrictEqual([]); // There should only be one child. expect(trace.root.child).toHaveLength(1); // There should be no error in the trace for this property! expect(trace.root.child[0].error).toStrictEqual([]); }); }); it(`undefined error`, async () => { throwError.mockImplementationOnce(() => { throw new Error(`rewriteError undefined whoops`); }); await setupApolloServerAndFetchPair({ rewriteError: () => undefined }); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toEqual({ fieldWhichWillError: null }); expect(result.errors).toBeDefined(); expect(result.errors[0].message).toEqual(`rewriteError undefined whoops`); expect(throwError).toHaveBeenCalledTimes(1); const reports = await engineServer.promiseOfReports; expect(reports).toHaveLength(1); const trace = Object.values(reports[0].tracesPerQuery)[0].trace[0]; // There should be no error at the root, our error is a child. expect(trace.root.error).toStrictEqual([]); // There should only be one child. expect(trace.root.child).toHaveLength(1); // The child should maintain the path, but have its message // rewritten. expect(trace.root.child[0].error).toMatchObject([ { json: `{"message":"rewriteError undefined whoops","locations":[{"line":1,"column":2}],"path":["fieldWhichWillError"]}`, message: `rewriteError undefined whoops`, location: [{ column: 2, line: 1 }] } ]); }); // This is deprecated, but we'll test it until it's removed in // Apollo Server 3.x. it(`maskErrorDetails (legacy)`, async () => { throwError.mockImplementationOnce(() => { throw new Error(`maskErrorDetails nope`); }); await setupApolloServerAndFetchPair({ maskErrorDetails: true }); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toEqual({ fieldWhichWillError: null }); expect(result.errors).toBeDefined(); expect(result.errors[0].message).toEqual(`maskErrorDetails nope`); expect(throwError).toHaveBeenCalledTimes(1); const reports = await engineServer.promiseOfReports; expect(reports).toHaveLength(1); const trace = Object.values(reports[0].tracesPerQuery)[0].trace[0]; expect(trace.root.child[0].error).toMatchObject([ { json: `{"message":"<masked>","locations":[{"line":1,"column":2}],"path":["fieldWhichWillError"]}`, message: `<masked>`, location: [{ line: 1, column: 2 }] } ]); }); }); }); }); it(`errors thrown in extensions call formatError and are wrapped`, async () => { const extension = jest.fn(() => { throw new Error(`nope`); }); const formatError = jest.fn(error => { expect(error instanceof Error).toBe(true); // extension should be called before formatError expect(extension).toHaveBeenCalledTimes(1); error.message = `masked`; return error; }); class Extension<TContext = any> extends GraphQLExtension { willSendResponse(_o: { graphqlResponse: GraphQLResponse; context: TContext }) { // formatError should be called after extensions expect(formatError).not.toBeCalled(); extension(); } } const { url: uri } = await createApolloServer({ typeDefs: gql` type Query { fieldWhichWillError: String } `, resolvers: { Query: { fieldWhichWillError: () => {} } }, extensions: [() => new Extension()], formatError, debug: true }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toBeUndefined(); expect(result.errors).toBeDefined(); expect(result.errors[0].message).toEqual(`masked`); expect(formatError).toHaveBeenCalledTimes(1); }); describe(`context field`, () => { const typeDefs = gql` type Query { hello: String } `; const resolvers = { Query: { hello: () => `hi` } }; it(`defers context eval with thunk until after options creation`, async () => { const uniqueContext = { key: `major` }; const typeDefs = gql` type Query { hello: String } `; const resolvers = { Query: { hello: (_parent: any, _args: any, context: any) => { expect(context).toEqual(Promise.resolve(uniqueContext)); return `hi`; } } }; const spy = jest.fn(() => ({})); const { url: uri } = await createApolloServer({ typeDefs, resolvers, context: spy }); const apolloFetch = createApolloFetch({ uri }); expect(spy).not.toBeCalled(); await apolloFetch({ query: `{hello}` }); expect(spy).toHaveBeenCalledTimes(1); await apolloFetch({ query: `{hello}` }); expect(spy).toHaveBeenCalledTimes(2); }); describe(`context cloning`, () => { it(`clones the context for request pipeline requests`, async () => { const uniqueContext = { key: `major` }; const spy = jest.fn(() => `hi`); const typeDefs = gql` type Query { hello: String } `; const resolvers = { Query: { hello: (_parent: any, _args: any, context: any) => { expect(context.key).toEqual(`major`); context.key = `minor`; return spy(); } } }; const { url: uri } = await createApolloServer({ typeDefs, resolvers, context: uniqueContext }); const apolloFetch = createApolloFetch({ uri }); expect(spy).not.toBeCalled(); await apolloFetch({ query: `{hello}` }); expect(spy).toHaveBeenCalledTimes(1); await apolloFetch({ query: `{hello}` }); expect(spy).toHaveBeenCalledTimes(2); }); // https://github.com/apollographql/apollo-server/issues/4170 it(`for every request with executeOperation`, async () => { const uniqueContext = { key: `major` }; const spy = jest.fn(() => `hi`); const typeDefs = gql` type Query { hello: String } `; const resolvers = { Query: { hello: (_parent: any, _args: any, context: any) => { expect(context.key).toEqual(`major`); context.key = `minor`; return spy(); } } }; const { server } = await createApolloServer({ typeDefs, resolvers, context: uniqueContext }); expect(spy).not.toBeCalled(); await server.executeOperation({ query: `{hello}` }); expect(spy).toHaveBeenCalledTimes(1); await server.executeOperation({ query: `{hello}` }); expect(spy).toHaveBeenCalledTimes(2); }); }); describe(`as a function`, () => { it(`can accept and return \`req\``, async () => { expect( await createApolloServer({ typeDefs, resolvers, context: ({ req }) => ({ req }) }) ).not.toThrow; }); it(`can accept nothing and return an empty object`, async () => { expect( await createApolloServer({ typeDefs, resolvers, context: () => ({}) }) ).not.toThrow; }); it(`can be an async function`, async () => { const uniqueContext = { key: `major` }; const spy = jest.fn(() => `hi`); const typeDefs = gql` type Query { hello: String } `; const resolvers = { Query: { hello: (_parent: any, _args: any, context: any) => { expect(context.key).toEqual(`major`); return spy(); } } }; const { url: uri } = await createApolloServer({ typeDefs, resolvers, context: async () => uniqueContext }); const apolloFetch = createApolloFetch({ uri }); expect(spy).not.toBeCalled(); await apolloFetch({ query: `{hello}` }); expect(spy).toHaveBeenCalledTimes(1); }); it(`returns thrown context error as a valid graphql result`, async () => { const nodeEnv = process.env.NODE_ENV; delete process.env.NODE_ENV; const typeDefs = gql` type Query { hello: String } `; const resolvers = { Query: { hello: () => { throw Error(`never get here`); } } }; const { url: uri } = await createApolloServer({ typeDefs, resolvers, context: () => { throw new AuthenticationError(`valid result`); } }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{hello}` }); expect(result.errors).toHaveLength(1); expect(result.data).toBeUndefined(); const e = result.errors[0]; expect(e.message).toMatch(`valid result`); expect(e.extensions).toBeDefined(); expect(e.extensions.code).toEqual(`UNAUTHENTICATED`); expect(e.extensions.exception.stacktrace).toBeDefined(); process.env.NODE_ENV = nodeEnv; }); }); describe(`as an object`, () => { it(`can be an empty object`, async () => { expect( await createApolloServer({ typeDefs, resolvers, context: {} }) ).not.toThrow; }); it(`can contain arbitrary values`, async () => { expect( await createApolloServer({ typeDefs, resolvers, context: { value: `arbitrary` } }) ).not.toThrow; }); }); }); it(`propagates error codes in production`, async () => { const nodeEnv = process.env.NODE_ENV; process.env.NODE_ENV = `production`; const { url: uri } = await createApolloServer({ typeDefs: gql` type Query { fieldWhichWillError: String } `, resolvers: { Query: { fieldWhichWillError: () => { throw new AuthenticationError(`we the best music`); } } } }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toBeDefined(); expect(result.data).toEqual({ fieldWhichWillError: null }); expect(result.errors).toBeDefined(); expect(result.errors).toHaveLength(1); expect(result.errors[0].extensions.code).toEqual(`UNAUTHENTICATED`); expect(result.errors[0].extensions.exception).toBeUndefined(); process.env.NODE_ENV = nodeEnv; }); it(`propagates error codes with null response in production`, async () => { const nodeEnv = process.env.NODE_ENV; process.env.NODE_ENV = `production`; const { url: uri } = await createApolloServer({ typeDefs: gql` type Query { fieldWhichWillError: String! } `, resolvers: { Query: { fieldWhichWillError: () => { throw new AuthenticationError(`we the best music`); } } } }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{fieldWhichWillError}` }); expect(result.data).toBeNull(); expect(result.errors).toBeDefined(); expect(result.errors).toHaveLength(1); expect(result.errors[0].extensions.code).toEqual(`UNAUTHENTICATED`); expect(result.errors[0].extensions.exception).toBeUndefined(); process.env.NODE_ENV = nodeEnv; }); }); describe(`subscriptions`, () => { const SOMETHING_CHANGED_TOPIC = `something_changed`; const pubsub = new PubSub(); let subscription: | { unsubscribe: () => void; } | undefined; function createEvent(num: number) { return setTimeout( () => pubsub.publish(SOMETHING_CHANGED_TOPIC, { num }), num + 10 ); } afterEach(async () => { if (subscription) { try { await subscription.unsubscribe(); } catch (e) {} subscription = null; } }); it(`enables subscriptions after creating subscriptions server`, done => { const typeDefs = gql` type Query { hi: String } type Subscription { num: Int } `; const query = ` subscription { num } `; const resolvers = { Query: { hi: () => `here to placate graphql-js` }, Subscription: { num: { subscribe: () => { createEvent(1); createEvent(2); createEvent(3); return pubsub.asyncIterator(SOMETHING_CHANGED_TOPIC); } } } }; createApolloServer({ typeDefs, resolvers }).then(({ port, server, httpServer }) => { server.installSubscriptionHandlers(httpServer); const client = new SubscriptionClient(`ws://localhost:${port}${server.subscriptionsPath}`, {}, WebSocket); const observable = client.request({ query }); let i = 1; subscription = observable.subscribe({ next: ({ data }) => { try { expect(data.num).toEqual(i); if (i === 3) { done(); } i++; } catch (e) { done.fail(e); } }, error: done.fail, complete: () => { done.fail(new Error(`should not complete`)); } }); }); }); it(`disables subscriptions when option set to false`, done => { const typeDefs = gql` type Query { "graphql-js forces there to be a query type" hi: String } type Subscription { num: Int } `; const query = ` subscription { num } `; const resolvers = { Query: { hi: () => `here to placate graphql-js` }, Subscription: { num: { subscribe: () => { createEvent(1); return pubsub.asyncIterator(SOMETHING_CHANGED_TOPIC); } } } }; createApolloServer({ typeDefs, resolvers, subscriptions: false }).then(({ port, server, httpServer }) => { try { server.installSubscriptionHandlers(httpServer); done.fail(`subscription server creation should fail, since subscriptions are disabled`); } catch (e) { expect(e.message).toMatch(/disabled/); } const client = new SubscriptionClient( `ws://localhost:${port}${server.subscriptionsPath || ``}`, {}, WebSocket ); const observable = client.request({ query }); subscription = observable.subscribe({ next: () => { done.fail(new Error(`should not call next`)); }, error: () => { done.fail(new Error(`should not notify of error`)); }, complete: () => { done.fail(new Error(`should not complete`)); } }); // Unfortunately the error connection is not propagated to the // observable. What should happen is we provide a default onError // function that notifies the returned observable and can customize // the behavior with an option in the client constructor. If you're // available to make a PR to the following please do! // https://github.com/apollographql/subscriptions-transport-ws/blob/master/src/client.ts client.onError((_: Error) => { done(); }); }); }); it(`accepts subscriptions configuration`, done => { const onConnect = jest.fn(connectionParams => ({ ...connectionParams })); const typeDefs = gql` type Query { hi: String } type Subscription { num: Int } `; const query = ` subscription { num } `; const resolvers = { Query: { hi: () => `here to placate graphql-js` }, Subscription: { num: { subscribe: () => { createEvent(1); createEvent(2); createEvent(3); return pubsub.asyncIterator(SOMETHING_CHANGED_TOPIC); } } } }; const path = `/sub`; createApolloServer({ typeDefs, resolvers, subscriptions: { onConnect, path } }) .then(({ port, server, httpServer }) => { server.installSubscriptionHandlers(httpServer); expect(onConnect).not.toBeCalled(); expect(server.subscriptionsPath).toEqual(path); const client = new SubscriptionClient(`ws://localhost:${port}${server.subscriptionsPath}`, {}, WebSocket); const observable = client.request({ query }); let i = 1; subscription = observable.subscribe({ next: ({ data }) => { try { expect(onConnect).toHaveBeenCalledTimes(1); expect(data.num).toEqual(i); if (i === 3) { done(); } i++; } catch (e) { done.fail(e); } }, error: done.fail, complete: () => { done.fail(new Error(`should not complete`)); } }); }) .catch(done.fail); }); it(`takes websocket server subscriptions configuration`, done => { const onConnect = jest.fn(connectionParams => ({ ...connectionParams })); const typeDefs = gql` type Query { hi: String } type Subscription { num: Int } `; const query = ` subscription { num } `; const resolvers = { Query: { hi: () => `here to placate graphql-js` }, Subscription: { num: { subscribe: () => { createEvent(1); createEvent(2); createEvent(3); return pubsub.asyncIterator(SOMETHING_CHANGED_TOPIC); } } } }; const path = `/sub`; createApolloServer({ typeDefs, resolvers, subscriptions: { onConnect, path } }) .then(({ port, server }) => { const subPort = (typeof port === `number` ? port : parseInt(port)) + 1; const websocketServer = new WebSocket.Server({ port: subPort }); server.installSubscriptionHandlers(websocketServer); expect(onConnect).not.toBeCalled(); expect(server.subscriptionsPath).toEqual(path); const client = new SubscriptionClient( `ws://localhost:${subPort}${server.subscriptionsPath}`, {}, WebSocket ); const observable = client.request({ query }); let i = 1; subscription = observable.subscribe({ next: ({ data }) => { try { expect(onConnect).toHaveBeenCalledTimes(1); expect(data.num).toEqual(i); if (i === 3) { done(); } i++; } catch (e) { done.fail(e); } }, error: done.fail, complete: () => { done.fail(new Error(`should not complete`)); } }); }) .catch(done.fail); }); it(`allows introspection when introspection is enabled on ApolloServer`, done => { const typeDefs = gql` type Query { hi: String } type Subscription { num: Int } `; const query = getIntrospectionQuery(); const resolvers = { Query: { hi: () => `here to placate graphql-js` }, Subscription: { num: { subscribe: () => { createEvent(1); createEvent(2); createEvent(3); return pubsub.asyncIterator(SOMETHING_CHANGED_TOPIC); } } } }; createApolloServer({ typeDefs, resolvers, introspection: true }).then(({ port, server, httpServer }) => { server.installSubscriptionHandlers(httpServer); const client = new SubscriptionClient(`ws://localhost:${port}${server.subscriptionsPath}`, {}, WebSocket); const observable = client.request({ query }); subscription = observable.subscribe({ next: ({ data }) => { try { expect(data).toMatchObject({ __schema: expect.any(Object) }); } catch (e) { done.fail(e); } done(); } }); }); }); it(`disallows introspection when it's disabled on ApolloServer`, done => { const typeDefs = gql` type Query { hi: String } type Subscription { num: Int } `; const query = getIntrospectionQuery(); const resolvers = { Query: { hi: () => `here to placate graphql-js` }, Subscription: { num: { subscribe: () => { createEvent(1); createEvent(2); createEvent(3); return pubsub.asyncIterator(SOMETHING_CHANGED_TOPIC); } } } }; createApolloServer({ typeDefs, resolvers, introspection: false }).then(({ port, server, httpServer }) => { server.installSubscriptionHandlers(httpServer); const client = new SubscriptionClient(`ws://localhost:${port}${server.subscriptionsPath}`, {}, WebSocket); const observable = client.request({ query }); subscription = observable.subscribe({ next: ({ data }) => { try { expect(data).toBeUndefined(); } catch (e) { done.fail(e); } done(); } }); }); }); }); describe(`Persisted Queries`, () => { let uri: string; const query = gql` ${TEST_STRING_QUERY} `; const hash = sha256.create().update(TEST_STRING_QUERY).hex(); const extensions = { persistedQuery: { version: VERSION, sha256Hash: hash } }; beforeEach(async () => { const serverInfo = await createApolloServer({ schema, introspection: false, persistedQueries: { cache: new Map<string, string>() as any } }); uri = serverInfo.url; }); it(`returns PersistedQueryNotFound on the first try`, async () => { const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ extensions } as any); expect(result.data).toBeUndefined(); expect(result.errors).toHaveLength(1); expect(result.errors[0].message).toEqual(`PersistedQueryNotFound`); expect(result.errors[0].extensions.code).toEqual(`PERSISTED_QUERY_NOT_FOUND`); }); it(`returns result on the second try`, async () => { const apolloFetch = createApolloFetch({ uri }); await apolloFetch({ extensions } as any); const result = await apolloFetch({ extensions, query: TEST_STRING_QUERY } as any); expect(result.data).toEqual({ testString: `test string` }); expect(result.errors).toBeUndefined(); }); it(`returns result on the persisted query`, async () => { const apolloFetch = createApolloFetch({ uri }); await apolloFetch({ extensions } as any); await apolloFetch({ extensions, query: TEST_STRING_QUERY } as any); const result = await apolloFetch({ extensions } as any); expect(result.data).toEqual({ testString: `test string` }); expect(result.errors).toBeUndefined(); }); // Apollo Fetch's result depends on the server implementation, if the // statusText of the error is unparsable, then we'll fall into the catch, // such as with express. If it is parsable, then we'll use the afterware it(`returns error when hash does not match`, async () => { const apolloFetch = createApolloFetch({ uri }).useAfter((res, next) => { expect(res.response.status).toEqual(400); expect(res.response.raw).toMatch(/does not match query/); next(); }); try { await apolloFetch({ extensions: { persistedQuery: { version: VERSION, sha: `aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa` } }, query: TEST_STRING_QUERY } as any); } catch (e) { expect(e.response).toBeDefined(); expect(e.response.status).toEqual(400); expect(e.response.raw).toMatch(/does not match query/); } }); it(`returns correct result for persisted query link`, done => { const variables = { id: 1 }; const link = createPersistedQuery().concat(createHttpLink({ uri, fetch } as any)); execute(link, { query, variables } as any).subscribe(result => { expect(result.data).toEqual({ testString: `test string` }); done(); }, done.fail); }); it(`returns correct result for persisted query link using get request`, done => { const variables = { id: 1 }; const link = createPersistedQuery({ useGETForHashedQueries: true }).concat(createHttpLink({ uri, fetch } as any)); execute(link, { query, variables } as any).subscribe(result => { expect(result.data).toEqual({ testString: `test string` }); done(); }, done.fail); }); }); describe(`apollo-engine-reporting`, () => { async function makeFakeTestableEngineServer({ status, waitWriteResponse = false }: { status: number; waitWriteResponse?: boolean; }) { let writeResponseResolve: () => void; const writeResponsePromise = new Promise(resolve => (writeResponseResolve = resolve)); const fakeEngineServer = http.createServer(async (_, res) => { await writeResponsePromise; res.writeHead(status); res.end(`Important text in the body`); }); await new Promise(resolve => { fakeEngineServer.listen(0, `127.0.0.1`, () => { resolve(); }); }); async function closeServer() { await new Promise(resolve => fakeEngineServer.close(() => resolve())); } const { family, address, port } = fakeEngineServer.address(); if (family !== `IPv4`) { throw new Error(`The family was unexpectedly ${family}.`); } const fakeEngineUrl = `http://${address}:${port}`; if (!waitWriteResponse) { writeResponseResolve(); } return { closeServer, fakeEngineServer, fakeEngineUrl, writeResponseResolve }; } describe(`graphql server functions even when Apollo servers are down`, () => { async function testWithStatus(status: number, expectedRequestCount: number) { const networkError = status === 0; const { closeServer, fakeEngineUrl, writeResponseResolve } = await makeFakeTestableEngineServer({ status, waitWriteResponse: true }); try { // To simulate a network error, we create and close the server. // This lets us still generate a port that is hopefully unused. if (networkError) { await closeServer(); } let requestCount = 0; const requestAgent = new http.Agent({ keepAlive: false }); const realCreateConnection = (requestAgent as any).createConnection; (requestAgent as any).createConnection = function () { requestCount++; return realCreateConnection.apply(this, arguments); }; let reportErrorPromiseResolve: (error: Error) => void; const reportErrorPromise = new Promise<Error>(resolve => (reportErrorPromiseResolve = resolve)); const { url: uri } = await createApolloServer({ typeDefs: gql` type Query { something: String! } `, resolvers: { Query: { something: () => `hello` } }, engine: { apiKey: `service:my-app:secret`, tracesEndpointUrl: fakeEngineUrl, reportIntervalMs: 1, maxAttempts: 3, requestAgent, reportErrorFunction(error: Error) { reportErrorPromiseResolve(error); } } }); const apolloFetch = createApolloFetch({ uri }); // Run a GraphQL query. Ensure that it returns successfully even // though reporting is going to fail. (Note that reporting can't // actually have failed yet (except in the network-error case) // because we haven't let writeResponsePromise resolve.) const result = await apolloFetch({ query: `{ something }` }); expect(result.data.something).toBe(`hello`); if (!networkError) { // Allow reporting to return its response (for every retry). writeResponseResolve(); } // Make sure we can get the error from reporting. const sendingError = await reportErrorPromise; expect(sendingError).toBeTruthy(); if (networkError) { expect(sendingError.message).toContain(`Error sending report to Apollo Engine servers`); expect(sendingError.message).toContain(`ECONNREFUSED`); } else { expect(sendingError.message).toBe( `Error sending report to Apollo Engine servers: HTTP status ${status}, Important text in the body` ); } expect(requestCount).toBe(expectedRequestCount); } finally { if (!networkError) { await closeServer(); } } } it(`with retryable error`, async () => { await testWithStatus(500, 3); }); it(`with network error`, async () => { await testWithStatus(0, 3); }); it(`with non-retryable error`, async () => { await testWithStatus(400, 1); }); }); }); describe(`Tracing`, () => { const typeDefs = gql` type Book { title: String author: String } type Movie { title: String } type Query { books: [Book] movies: [Movie] } `; const resolvers = { Query: { books: () => new Promise(resolve => setTimeout(() => resolve([{ title: `H`, author: `J` }]), 10)), movies: () => new Promise(resolve => setTimeout(() => resolve([{ title: `H` }]), 12)) } }; it(`reports a total duration that is longer than the duration of its resolvers`, async () => { const { url: uri } = await createApolloServer({ typeDefs, resolvers, tracing: true }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{ books { title author } }` }); const tracing: TracingFormat = result.extensions.tracing; const earliestStartOffset = tracing.execution.resolvers .map(resolver => resolver.startOffset) .reduce((currentEarliestOffset, nextOffset) => Math.min(currentEarliestOffset, nextOffset)); const latestEndOffset = tracing.execution.resolvers .map(resolver => resolver.startOffset + resolver.duration) .reduce((currentLatestEndOffset, nextEndOffset) => Math.max(currentLatestEndOffset, nextEndOffset)); const resolverDuration = latestEndOffset - earliestStartOffset; expect(resolverDuration).not.toBeGreaterThan(tracing.duration); }); }); describe(`Federated tracing`, () => { // Enable federated tracing by pretending to be federated. const federationTypeDefs = gql` type _Service { sdl: String } `; const baseTypeDefs = gql` type Book { title: String author: String } type Movie { title: String } type Query { books: [Book] movies: [Movie] error: String } `; const allTypeDefs = [federationTypeDefs, baseTypeDefs]; const resolvers = { Query: { books: () => new Promise(resolve => setTimeout(() => resolve([{ title: `H`, author: `J` }]), 10)), movies: () => new Promise(resolve => setTimeout(() => resolve([{ title: `H` }]), 12)), error: () => { throw new GraphQLError(`It broke`); } } }; function createApolloFetchAsIfFromGateway(uri: string): ApolloFetch { return createApolloFetch({ uri }).use(({ options }, next) => { options.headers = { "apollo-federation-include-trace": `ftv1` }; next(); }); } it(`doesn't include federated trace without the special header`, async () => { const { url: uri } = await createApolloServer({ typeDefs: allTypeDefs, resolvers }); const apolloFetch = createApolloFetch({ uri }); const result = await apolloFetch({ query: `{ books { title author } }` }); expect(result.extensions).toBeUndefined(); }); it(`doesn't include federated trace without _Service in the schema`, async () => { const { url: uri } = await createApolloServer({ typeDefs: baseTypeDefs, resolvers }); const apolloFetch = createApolloFetchAsIfFromGateway(uri); const result = await apolloFetch({ query: `{ books { title author } }` }); expect(result.extensions).toBeUndefined(); }); it(`reports a total duration that is longer than the duration of its resolvers`, async () => { const { url: uri } = await createApolloServer({ typeDefs: allTypeDefs, resolvers }); const apolloFetch = createApolloFetchAsIfFromGateway(uri); const result = await apolloFetch({ query: `{ books { title author } }` }); const ftv1: string = result.extensions.ftv1; expect(ftv1).toBeTruthy(); const encoded = Buffer.from(ftv1, `base64`); const trace = Trace.decode(encoded); let earliestStartOffset = Infinity; let latestEndOffset = -Infinity; function walk(node: Trace.INode) { if (node.startTime !== 0 && node.endTime !== 0) { earliestStartOffset = Math.min(earliestStartOffset, node.startTime); latestEndOffset = Math.max(latestEndOffset, node.endTime); } node.child.forEach(n => walk(n)); } walk(trace.root); expect(earliestStartOffset).toBeLessThan(Infinity); expect(latestEndOffset).toBeGreaterThan(-Infinity); const resolverDuration = latestEndOffset - earliestStartOffset; expect(resolverDuration).toBeGreaterThan(0); expect(trace.durationNs).toBeGreaterThanOrEqual(resolverDuration); expect(trace.startTime.seconds).toBeLessThanOrEqual(trace.endTime.seconds); if (trace.startTime.seconds === trace.endTime.seconds) { expect(trace.startTime.nanos).toBeLessThanOrEqual(trace.endTime.nanos); } }); it(`includes errors in federated trace`, async () => { const { url: uri } = await createApolloServer({ typeDefs: allTypeDefs, resolvers, formatError(err) { err.message = `Formatted: ${err.message}`; return err; }, engine: { rewriteError(err) { err.message = `Rewritten for Engine: ${err.message}`; return err; } } }); const apolloFetch = createApolloFetchAsIfFromGateway(uri); const result = await apolloFetch({ query: `{ error }` }); expect(result.data).toStrictEqual({ error: null }); expect(result.errors).toBeTruthy(); expect(result.errors).toHaveLength(1); expect(result.errors[0].message).toBe(`Formatted: It broke`); const ftv1: string = result.extensions.ftv1; expect(ftv1).toBeTruthy(); const encoded = Buffer.from(ftv1, `base64`); const trace = Trace.decode(encoded); expect(trace.root.child[0].error[0].message).toBe(`Rewritten for Engine: It broke`); }); }); describe(`Response caching`, () => { beforeAll(() => { mockDate(); }); afterAll(() => { unmockDate(); }); it(`basic caching`, async () => { const typeDefs = gql` type Query { cached: String @cacheControl(maxAge: 10) uncached: String private: String @cacheControl(maxAge: 9, scope: PRIVATE) } `; type FieldName = "cached" | "uncached" | "private"; const fieldNames: FieldName[] = [`cached`, `uncached`, `private`]; const resolverCallCount: Partial<Record<FieldName, number>> = {}; const expectedResolverCallCount: Partial<Record<FieldName, number>> = {}; const expectCacheHit = (fn: FieldName) => expect(resolverCallCount[fn]).toBe(expectedResolverCallCount[fn]); const expectCacheMiss = (fn: FieldName) => expect(resolverCallCount[fn]).toBe(++expectedResolverCallCount[fn]); const resolvers = { Query: {} }; fieldNames.forEach(name => { resolverCallCount[name] = 0; expectedResolverCallCount[name] = 0; resolvers.Query[name] = () => { resolverCallCount[name]++; return `value:${name}`; }; }); const { url: uri } = await createApolloServer({ typeDefs, resolvers, plugins: [ ApolloServerPluginResponseCache({ sessionId: (requestContext: GraphQLRequestContext<any>) => requestContext.request.http.headers.get(`session-id`) || null, extraCacheKeyData: (requestContext: GraphQLRequestContext<any>) => requestContext.request.http.headers.get(`extra-cache-key-data`) || null, shouldReadFromCache: (requestContext: GraphQLRequestContext<any>) => !requestContext.request.http.headers.get(`no-read-from-cache`), shouldWriteToCache: (requestContext: GraphQLRequestContext<any>) => !requestContext.request.http.headers.get(`no-write-to-cache`) }) ] }); const apolloFetch = createApolloFetch({ uri }); apolloFetch.use(({ request, options }, next) => { const headers = (request as any).headers; if (headers) { if (!options.headers) { options.headers = {}; } for (const k in headers) { options.headers[k] = headers[k]; } } next(); }); // Make HTTP response headers visible on the result next to 'data'. apolloFetch.useAfter(({ response }, next) => { response.parsed.httpHeaders = response.headers; next(); }); // Use 'any' because we're sneaking httpHeaders onto response.parsed. function httpHeader(result: any, header: string): string | null { const value = (result.httpHeaders as Headers).get(header); // hack: hapi sets cache-control: no-cache by default; make it // look to our tests like the other servers. if (header === `cache-control` && value === `no-cache`) { return null; } return value; } // Just for the typing. function doFetch(options: GraphQLRequest & { headers?: Record<string, string> }) { return apolloFetch(options as any); } const basicQuery = `{ cached }`; const fetch = async () => { const result = await doFetch({ query: basicQuery }); expect(result.data.cached).toBe(`value:cached`); return result; }; // Cache miss { const result = await fetch(); expectCacheMiss(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); expect(httpHeader(result, `age`)).toBeNull(); } // Cache hit { const result = await fetch(); expectCacheHit(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); expect(httpHeader(result, `age`)).toBe(`0`); } // Cache hit partway to ttl. advanceTimeBy(5 * 1000); { const result = await fetch(); expectCacheHit(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); expect(httpHeader(result, `age`)).toBe(`5`); } // Cache miss after ttl. advanceTimeBy(6 * 1000); { const result = await fetch(); expectCacheMiss(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); expect(httpHeader(result, `age`)).toBeNull(); } // Cache hit. { const result = await fetch(); expectCacheHit(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); expect(httpHeader(result, `age`)).toBe(`0`); } // For now, caching is based on the original document text, not the AST, // so this should be a cache miss. { const result = await doFetch({ query: `{ cached }` }); expect(result.data.cached).toBe(`value:cached`); expectCacheMiss(`cached`); } // This definitely should be a cache miss because the output is different. { const result = await doFetch({ query: `{alias: cached}` }); expect(result.data.alias).toBe(`value:cached`); expectCacheMiss(`cached`); } // Reading both a cached and uncached data should not get cached (it's a // full response cache). { const result = await doFetch({ query: `{cached uncached}` }); expect(result.data.cached).toBe(`value:cached`); expect(result.data.uncached).toBe(`value:uncached`); expectCacheMiss(`cached`); expectCacheMiss(`uncached`); expect(httpHeader(result, `cache-control`)).toBeNull(); expect(httpHeader(result, `age`)).toBeNull(); } // Just double-checking that it didn't get cached. { const result = await doFetch({ query: `{cached uncached}` }); expect(result.data.cached).toBe(`value:cached`); expect(result.data.uncached).toBe(`value:uncached`); expectCacheMiss(`cached`); expectCacheMiss(`uncached`); expect(httpHeader(result, `cache-control`)).toBeNull(); expect(httpHeader(result, `age`)).toBeNull(); } // Let's just remind ourselves that the basic query is cacheable. { await doFetch({ query: basicQuery }); expectCacheHit(`cached`); } // But if we give it some extra cache key data, it'll be cached separately. { const result = await doFetch({ query: basicQuery, headers: { "extra-cache-key-data": `foo` } }); expect(result.data.cached).toBe(`value:cached`); expectCacheMiss(`cached`); } // But if we give it the same extra cache key data twice, it's a hit. { const result = await doFetch({ query: basicQuery, headers: { "extra-cache-key-data": `foo` } }); expect(result.data.cached).toBe(`value:cached`); expectCacheHit(`cached`); } // Without a session ID, private fields won't be cached. { const result = await doFetch({ query: `{private}` }); expect(result.data.private).toBe(`value:private`); expectCacheMiss(`private`); // Note that the HTTP header calculator doesn't know about session // IDs, so it'll still tell HTTP-level caches to cache this, albeit // privately. expect(httpHeader(result, `cache-control`)).toBe(`max-age=9, private`); expect(httpHeader(result, `age`)).toBeNull(); } // See? { const result = await doFetch({ query: `{private}` }); expect(result.data.private).toBe(`value:private`); expectCacheMiss(`private`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=9, private`); } // OK, how about with a session ID. First try should be a miss. { const result = await doFetch({ query: `{private}`, headers: { "session-id": `foo` } }); expect(result.data.private).toBe(`value:private`); expectCacheMiss(`private`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=9, private`); } // But next try should be a hit. { const result = await doFetch({ query: `{private}`, headers: { "session-id": `foo` } }); expect(result.data.private).toBe(`value:private`); expectCacheHit(`private`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=9, private`); } // But a different session ID should be a miss again. { const result = await doFetch({ query: `{private}`, headers: { "session-id": `bar` } }); expect(result.data.private).toBe(`value:private`); expectCacheMiss(`private`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=9, private`); } // As should be no session. { const result = await doFetch({ query: `{private}` }); expect(result.data.private).toBe(`value:private`); expectCacheMiss(`private`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=9, private`); } // Let's remind ourselves once again that the basic (public) query is *still* cached. { const result = await doFetch({ query: basicQuery }); expectCacheHit(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); } // If you're logged in, though, you get your own cache shared with all // other authenticated users (the "authenticated public" cache), so this // is a miss. It's still a public cache, though, for the HTTP header. // XXX Does that makes sense? Maybe this should be private, or maybe we // should drop the entire "authenticated public" concept. { const result = await doFetch({ query: basicQuery, headers: { "session-id": `bar` } }); expect(result.data.cached).toBe(`value:cached`); expectCacheMiss(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); } // See, this other session sees it! { const result = await doFetch({ query: basicQuery, headers: { "session-id": `baz` } }); expect(result.data.cached).toBe(`value:cached`); expectCacheHit(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); expect(httpHeader(result, `age`)).toBe(`0`); } // Let's continue to remind ourselves that the basic (public) query is *still* cached. { const result = await doFetch({ query: basicQuery }); expectCacheHit(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); } // But what if we specifically ask to not read from the cache? { const result = await doFetch({ query: basicQuery, headers: { "no-read-from-cache": `y` } }); expect(result.data.cached).toBe(`value:cached`); expectCacheMiss(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); } // Let's expire the cache, and run again, not writing to the cache. advanceTimeBy(15 * 1000); { const result = await doFetch({ query: basicQuery, headers: { "no-write-to-cache": `y` } }); expect(result.data.cached).toBe(`value:cached`); expectCacheMiss(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); } // And now verify that in fact we did not write! { const result = await doFetch({ query: basicQuery }); expect(result.data.cached).toBe(`value:cached`); expectCacheMiss(`cached`); expect(httpHeader(result, `cache-control`)).toBe(`max-age=10, public`); } }); }); describe(`Gateway`, () => { it(`receives schema updates from the gateway`, async () => { const makeQueryTypeWithField = fieldName => new GraphQLSchema({ query: new GraphQLObjectType({ name: `QueryType`, fields: { [fieldName]: { type: GraphQLString } } }) }); const executor = req => (req.source as string).match(/1/) ? Promise.resolve({ data: { testString1: `hello` } }) : Promise.resolve({ data: { testString2: `aloha` } }); const { gateway, triggers } = makeGatewayMock({ executor }); triggers.resolveLoad({ schema: makeQueryTypeWithField(`testString1`), executor }); const { url: uri } = await createApolloServer({ gateway, subscriptions: false }); const apolloFetch = createApolloFetch({ uri }); const result1 = await apolloFetch({ query: `{testString1}` }); expect(result1.data).toEqual({ testString1: `hello` }); expect(result1.errors).toBeUndefined(); triggers.triggerSchemaChange(makeQueryTypeWithField(`testString2`)); const result2 = await apolloFetch({ query: `{testString2}` }); expect(result2.data).toEqual({ testString2: `aloha` }); expect(result2.errors).toBeUndefined(); }); it(`passes engine data to the gateway`, async () => { const optionsSpy = jest.fn(); const { gateway, triggers } = makeGatewayMock({ optionsSpy }); triggers.resolveLoad({ schema, executor: () => {} }); await createApolloServer({ gateway, subscriptions: false, engine: { apiKey: `service:tester:1234abc`, schemaTag: `staging` } }); expect(optionsSpy).toHaveBeenLastCalledWith({ engine: { apiKeyHash: `0ca858e7fe8cffc01c5f1db917d2463b348b50d267427e54c1c8c99e557b242f4145930b949905ec430642467613610e471c40bb7a251b1e2248c399bb0498c4`, graphId: `tester`, graphVariant: `staging` } }); }); it(`unsubscribes from schema update on close`, async () => { const unsubscribeSpy = jest.fn(); const { gateway, triggers } = makeGatewayMock({ unsubscribeSpy }); triggers.resolveLoad({ schema, executor: () => {} }); await createApolloServer({ gateway, subscriptions: false }); expect(unsubscribeSpy).not.toHaveBeenCalled(); await stopServer(); expect(unsubscribeSpy).toHaveBeenCalled(); }); it(`waits until gateway has resolved a schema to respond to queries`, async () => { const wait = ms => new Promise(resolve => setTimeout(resolve, ms)); let resolveExecutor; const executor = () => new Promise(resolve => { resolveExecutor = () => { resolve({ data: { testString: `hi - but federated!` } }); }; }); const { gateway, triggers } = makeGatewayMock({ executor }); triggers.resolveLoad({ schema, executor }); const { url: uri } = await createApolloServer({ gateway, subscriptions: false }); const fetchComplete = jest.fn(); const apolloFetch = createApolloFetch({ uri }); const result = apolloFetch({ query: `{testString}` }).then(result => { fetchComplete(result); return result; }); expect(fetchComplete).not.toHaveBeenCalled(); await wait(100); //some bogus value to make sure we aren't returning early expect(fetchComplete).not.toHaveBeenCalled(); resolveExecutor(); const resolved = await result; expect(fetchComplete).toHaveBeenCalled(); expect(resolved.data).toEqual({ testString: `hi - but federated!` }); expect(resolved.errors).toBeUndefined(); }); it(`can serve multiple active schemas simultaneously during a schema rollover`, async () => { const wait = ms => new Promise(resolve => setTimeout(resolve, ms)); const makeQueryTypeWithField = fieldName => new GraphQLSchema({ query: new GraphQLObjectType({ name: `QueryType`, fields: { [fieldName]: { type: GraphQLString } } }) }); const makeEventuallyResolvingPromise = val => { let resolver; const promise = new Promise(resolve => (resolver = () => resolve(val))); return { resolver, promise }; }; const { resolver: r1, promise: p1 } = makeEventuallyResolvingPromise({ data: { testString1: `1` } }); const { resolver: r2, promise: p2 } = makeEventuallyResolvingPromise({ data: { testString2: `2` } }); const { resolver: r3, promise: p3 } = makeEventuallyResolvingPromise({ data: { testString3: `3` } }); const executor = req => ((req.source as string).match(/1/) ? p1 : (req.source as string).match(/2/) ? p2 : p3); const { gateway, triggers } = makeGatewayMock({ executor }); triggers.resolveLoad({ schema: makeQueryTypeWithField(`testString1`), executor }); const { url: uri } = await createApolloServer({ gateway, subscriptions: false }); // TODO: Remove these awaits... I think it may require the `onSchemaChange` to block? const apolloFetch = createApolloFetch({ uri }); const result1 = apolloFetch({ query: `{testString1}` }); await wait(100); triggers.triggerSchemaChange(makeQueryTypeWithField(`testString2`)); await wait(100); const result2 = apolloFetch({ query: `{testString2}` }); await wait(100); triggers.triggerSchemaChange(makeQueryTypeWithField(`testString3`)); await wait(100); const result3 = apolloFetch({ query: `{testString3}` }); await wait(100); r3(); await wait(100); r1(); await wait(100); r2(); await Promise.all([result1, result2, result3]).then(([v1, v2, v3]) => { expect(v1.errors).toBeUndefined(); expect(v2.errors).toBeUndefined(); expect(v3.errors).toBeUndefined(); expect(v1.data).toEqual({ testString1: `1` }); expect(v2.data).toEqual({ testString2: `2` }); expect(v3.data).toEqual({ testString3: `3` }); }); }); }); }); }
the_stack
import * as lsp from 'vscode-languageserver/node'; import tsp from 'typescript/lib/protocol'; import { URI } from 'vscode-uri'; import { LspDocuments } from './document'; const RE_PATHSEP_WINDOWS = /\\/g; export function uriToPath(stringUri: string): string | undefined { const uri = URI.parse(stringUri); if (uri.scheme !== 'file') { return undefined; } return normalizeFsPath(uri.fsPath); } function parsePathOrUri(filepath: string): URI { // handles valid URIs from yarn pnp, will error if doesn't have scheme // zipfile:/foo/bar/baz.zip::path/to/module if (filepath.startsWith('zipfile:')) { return URI.parse(filepath); } // handles valid filepaths from everything else /path/to/module return URI.file(filepath); } export function pathToUri(filepath: string, documents: LspDocuments | undefined): string { const fileUri = parsePathOrUri(filepath); const normalizedFilepath = normalizePath(fileUri.fsPath); const document = documents && documents.get(normalizedFilepath); return document ? document.uri : fileUri.toString(); } /** * Normalizes the file system path. * * On systems other than Windows it should be an no-op. * * On Windows, an input path in a format like "C:/path/file.ts" * will be normalized to "c:/path/file.ts". */ export function normalizePath(filePath: string): string { const fsPath = URI.file(filePath).fsPath; return normalizeFsPath(fsPath); } /** * Normalizes the path obtained through the "fsPath" property of the URI module. */ export function normalizeFsPath(fsPath: string): string { return fsPath.replace(RE_PATHSEP_WINDOWS, '/'); } function currentVersion(filepath: string, documents: LspDocuments | undefined): number | null { const fileUri = URI.file(filepath); const normalizedFilepath = normalizePath(fileUri.fsPath); const document = documents && documents.get(normalizedFilepath); return document ? document.version : null; } export function toPosition(location: tsp.Location): lsp.Position { return { line: location.line - 1, character: location.offset - 1 }; } export function toLocation(fileSpan: tsp.FileSpan, documents: LspDocuments | undefined): lsp.Location { return { uri: pathToUri(fileSpan.file, documents), range: { start: toPosition(fileSpan.start), end: toPosition(fileSpan.end) } }; } export function toFileRangeRequestArgs(file: string, range: lsp.Range): tsp.FileRangeRequestArgs { return { file, startLine: range.start.line + 1, startOffset: range.start.character + 1, endLine: range.end.line + 1, endOffset: range.end.character + 1 }; } const symbolKindsMapping: { [name: string]: lsp.SymbolKind; } = { 'enum member': lsp.SymbolKind.Constant, 'JSX attribute': lsp.SymbolKind.Property, 'local class': lsp.SymbolKind.Class, 'local function': lsp.SymbolKind.Function, 'local var': lsp.SymbolKind.Variable, 'type parameter': lsp.SymbolKind.Variable, alias: lsp.SymbolKind.Variable, class: lsp.SymbolKind.Class, const: lsp.SymbolKind.Constant, constructor: lsp.SymbolKind.Constructor, enum: lsp.SymbolKind.Enum, field: lsp.SymbolKind.Field, file: lsp.SymbolKind.File, function: lsp.SymbolKind.Function, getter: lsp.SymbolKind.Method, interface: lsp.SymbolKind.Interface, let: lsp.SymbolKind.Variable, method: lsp.SymbolKind.Method, module: lsp.SymbolKind.Module, parameter: lsp.SymbolKind.Variable, property: lsp.SymbolKind.Property, setter: lsp.SymbolKind.Method, var: lsp.SymbolKind.Variable }; export function toSymbolKind(tspKind: string): lsp.SymbolKind { return symbolKindsMapping[tspKind] || lsp.SymbolKind.Variable; } function toDiagnosticSeverity(category: string): lsp.DiagnosticSeverity { switch (category) { case 'error': return lsp.DiagnosticSeverity.Error; case 'warning': return lsp.DiagnosticSeverity.Warning; case 'suggestion': return lsp.DiagnosticSeverity.Hint; default: return lsp.DiagnosticSeverity.Error; } } export function toDiagnostic( diagnostic: tsp.Diagnostic, documents: LspDocuments | undefined, publishDiagnosticsCapabilities: lsp.TextDocumentClientCapabilities['publishDiagnostics'] ): lsp.Diagnostic { const lspDiagnostic: lsp.Diagnostic = { range: { start: toPosition(diagnostic.start), end: toPosition(diagnostic.end) }, message: diagnostic.text, severity: toDiagnosticSeverity(diagnostic.category), code: diagnostic.code, source: diagnostic.source || 'typescript', relatedInformation: asRelatedInformation(diagnostic.relatedInformation, documents) }; if (publishDiagnosticsCapabilities?.tagSupport) { lspDiagnostic.tags = getDiagnosticTags(diagnostic); } return lspDiagnostic; } function getDiagnosticTags(diagnostic: tsp.Diagnostic): lsp.DiagnosticTag[] { const tags: lsp.DiagnosticTag[] = []; if (diagnostic.reportsUnnecessary) { tags.push(lsp.DiagnosticTag.Unnecessary); } if (diagnostic.reportsDeprecated) { tags.push(lsp.DiagnosticTag.Deprecated); } return tags; } function asRelatedInformation(info: tsp.DiagnosticRelatedInformation[] | undefined, documents: LspDocuments | undefined): lsp.DiagnosticRelatedInformation[] | undefined { if (!info) { return undefined; } const result: lsp.DiagnosticRelatedInformation[] = []; for (const item of info) { const span = item.span; if (span) { result.push(lsp.DiagnosticRelatedInformation.create( toLocation(span, documents), item.message )); } } return result; } export function toTextEdit(edit: tsp.CodeEdit): lsp.TextEdit { return { range: { start: toPosition(edit.start), end: toPosition(edit.end) }, newText: edit.newText }; } export function toTextDocumentEdit(change: tsp.FileCodeEdits, documents: LspDocuments | undefined): lsp.TextDocumentEdit { return { textDocument: { uri: pathToUri(change.fileName, documents), version: currentVersion(change.fileName, documents) }, edits: change.textChanges.map(c => toTextEdit(c)) }; } export function toDocumentHighlight(item: tsp.DocumentHighlightsItem): lsp.DocumentHighlight[] { return item.highlightSpans.map(i => { return <lsp.DocumentHighlight>{ kind: toDocumentHighlightKind(i.kind), range: { start: toPosition(i.start), end: toPosition(i.end) } }; }); } // copied because the protocol module is not available at runtime (js version). enum HighlightSpanKind { none = 'none', definition = 'definition', reference = 'reference', writtenReference = 'writtenReference', } function toDocumentHighlightKind(kind: tsp.HighlightSpanKind): lsp.DocumentHighlightKind { switch (kind) { case HighlightSpanKind.definition: return lsp.DocumentHighlightKind.Write; case HighlightSpanKind.reference: case HighlightSpanKind.writtenReference: return lsp.DocumentHighlightKind.Read; default: return lsp.DocumentHighlightKind.Text; } } export function asRange(span: tsp.TextSpan): lsp.Range { return lsp.Range.create( Math.max(0, span.start.line - 1), Math.max(0, span.start.offset - 1), Math.max(0, span.end.line - 1), Math.max(0, span.end.offset - 1) ); } export function asDocumentation(data: { documentation?: tsp.SymbolDisplayPart[]; tags?: tsp.JSDocTagInfo[]; }): lsp.MarkupContent | undefined { let value = ''; if (data.documentation) { value += asPlainText(data.documentation); } if (data.tags) { const tagsDocumentation = asTagsDocumentation(data.tags); if (tagsDocumentation) { value += '\n\n' + tagsDocumentation; } } return value.length ? { kind: lsp.MarkupKind.Markdown, value } : undefined; } export function asTagsDocumentation(tags: tsp.JSDocTagInfo[]): string { return tags.map(asTagDocumentation).join(' \n\n'); } export function asTagDocumentation(tag: tsp.JSDocTagInfo): string { switch (tag.name) { case 'param': { if (!tag.text) { break; } const text = asPlainText(tag.text); const body = text.split(/^([\w.]+)\s*-?\s*/); if (body && body.length === 3) { const param = body[1]; const doc = body[2]; const label = `*@${tag.name}* \`${param}\``; if (!doc) { return label; } return label + (doc.match(/\r\n|\n/g) ? ' \n' + doc : ` — ${doc}`); } break; } } // Generic tag const label = `*@${tag.name}*`; const text = asTagBodyText(tag); if (!text) { return label; } return label + (text.match(/\r\n|\n/g) ? ' \n' + text : ` — ${text}`); } export function asTagBodyText(tag: tsp.JSDocTagInfo): string | undefined { if (!tag.text) { return undefined; } const text = asPlainText(tag.text); switch (tag.name) { case 'example': case 'default': // Convert to markdown code block if it not already one if (text.match(/^\s*[~`]{3}/g)) { return text; } return '```\n' + text + '\n```'; } return text; } export function asPlainText(parts: string | tsp.SymbolDisplayPart[]): string { if (typeof parts === 'string') { return parts; } return parts.map(part => part.text).join(''); } namespace Position { export function Min(): undefined; export function Min(...positions: lsp.Position[]): lsp.Position; export function Min(...positions: lsp.Position[]): lsp.Position | undefined { if (!positions.length) { return undefined; } let result = positions.pop()!; for (const p of positions) { if (isBefore(p, result)) { result = p; } } return result; } export function isBefore(one: lsp.Position, other: lsp.Position): boolean { if (one.line < other.line) { return true; } if (other.line < one.line) { return false; } return one.character < other.character; } export function Max(): undefined; export function Max(...positions: lsp.Position[]): lsp.Position; export function Max(...positions: lsp.Position[]): lsp.Position | undefined { if (!positions.length) { return undefined; } let result = positions.pop()!; for (const p of positions) { if (isAfter(p, result)) { result = p; } } return result; } export function isAfter(one: lsp.Position, other: lsp.Position): boolean { return !isBeforeOrEqual(one, other); } export function isBeforeOrEqual(one: lsp.Position, other: lsp.Position): boolean { if (one.line < other.line) { return true; } if (other.line < one.line) { return false; } return one.character <= other.character; } } export namespace Range { export function intersection(one: lsp.Range, other: lsp.Range): lsp.Range | undefined { const start = Position.Max(other.start, one.start); const end = Position.Min(other.end, one.end); if (Position.isAfter(start, end)) { // this happens when there is no overlap: // |-----| // |----| return undefined; } return lsp.Range.create(start, end); } }
the_stack
import { Component, ElementRef, EventEmitter, Input, OnDestroy, Output } from '@angular/core'; import { Title, Meta } from '@angular/platform-browser'; import { asapScheduler, Observable, of, timer } from 'rxjs'; import { catchError, observeOn, switchMap, takeUntil, tap } from 'rxjs/operators'; import { EMPTY_HTML, unwrapHtmlForSink } from 'safevalues'; import { DocumentContents, FILE_NOT_FOUND_ID, FETCHING_ERROR_ID } from 'app/documents/document.service'; import { Logger } from 'app/shared/logger.service'; import { TocService } from 'app/shared/toc.service'; import { ElementsLoader } from 'app/custom-elements/elements-loader'; import { fromInnerHTML } from 'app/shared/security'; // Constants export const NO_ANIMATIONS = 'no-animations'; // Initialization prevents flicker once pre-rendering is on const initialDocViewerElement = document.querySelector('aio-doc-viewer'); const initialDocViewerContent = initialDocViewerElement ? fromInnerHTML(initialDocViewerElement) : EMPTY_HTML; @Component({ selector: 'aio-doc-viewer', template: '' // TODO(robwormald): shadow DOM and emulated don't work here (?!) // encapsulation: ViewEncapsulation.ShadowDom }) export class DocViewerComponent implements OnDestroy { private hostElement: HTMLElement; private void$ = of<void>(undefined); private onDestroy$ = new EventEmitter<void>(); private docContents$ = new EventEmitter<DocumentContents>(); protected currViewContainer: HTMLElement = document.createElement('div'); protected nextViewContainer: HTMLElement = document.createElement('div'); @Input() set doc(newDoc: DocumentContents) { // Ignore `undefined` values that could happen if the host component // does not initially specify a value for the `doc` input. if (newDoc) { this.docContents$.emit(newDoc); } } // The new document is ready to be inserted into the viewer. // (Embedded components have been loaded and instantiated, if necessary.) @Output() docReady = new EventEmitter<void>(); // The previous document has been removed from the viewer. // (The leaving animation (if any) has been completed and the node has been removed from the DOM.) @Output() docRemoved = new EventEmitter<void>(); // The new document has been inserted into the viewer. // (The node has been inserted into the DOM, but the entering animation may still be in progress.) @Output() docInserted = new EventEmitter<void>(); // The new document has been fully rendered into the viewer. // (The entering animation has been completed.) @Output() docRendered = new EventEmitter<void>(); constructor( elementRef: ElementRef, private logger: Logger, private titleService: Title, private metaService: Meta, private tocService: TocService, private elementsLoader: ElementsLoader) { this.hostElement = elementRef.nativeElement; // Security: the initialDocViewerContent comes from the prerendered DOM and is considered to be secure this.hostElement.innerHTML = unwrapHtmlForSink(initialDocViewerContent); if (this.hostElement.firstElementChild) { this.currViewContainer = this.hostElement.firstElementChild as HTMLElement; } this.docContents$ .pipe( observeOn(asapScheduler), switchMap(newDoc => this.render(newDoc)), takeUntil(this.onDestroy$), ) .subscribe(); } ngOnDestroy() { this.onDestroy$.emit(); } /** * Prepare for setting the window title and ToC. * Return a function to actually set them. */ protected prepareTitleAndToc(targetElem: HTMLElement, docId: string): () => void { const titleEl = targetElem.querySelector('h1'); const needsToc = !!titleEl && !/no-?toc/i.test(titleEl.className); const embeddedToc = targetElem.querySelector('aio-toc.embedded'); if (titleEl && titleEl.parentNode && needsToc && !embeddedToc) { // Add an embedded ToC if it's needed and there isn't one in the content already. const toc = document.createElement('aio-toc'); toc.className = 'embedded'; titleEl.parentNode.insertBefore(toc, titleEl.nextSibling); } else if (!needsToc && embeddedToc && embeddedToc.parentNode !== null) { // Remove the embedded Toc if it's there and not needed. // We cannot use ChildNode.remove() because of IE11 embeddedToc.parentNode.removeChild(embeddedToc); } return () => { this.tocService.reset(); let title: string|null = ''; // Only create ToC for docs with an `<h1>` heading. // If you don't want a ToC, add "no-toc" class to `<h1>`. if (titleEl) { title = (typeof titleEl.innerText === 'string') ? titleEl.innerText : titleEl.textContent; if (needsToc) { this.tocService.genToc(targetElem, docId); } } this.titleService.setTitle(title ? `Angular - ${title}` : 'Angular'); }; } /** * Add doc content to host element and build it out with embedded components. */ protected render(doc: DocumentContents): Observable<void> { let addTitleAndToc: () => void; this.setNoIndex(doc.id === FILE_NOT_FOUND_ID || doc.id === FETCHING_ERROR_ID); return this.void$.pipe( tap(() => { if (doc.contents === null) { this.nextViewContainer.textContent = ''; } else { // Security: `doc.contents` is always authored by the documentation team // and is considered to be safe. this.nextViewContainer.innerHTML = unwrapHtmlForSink(doc.contents); } }), tap(() => addTitleAndToc = this.prepareTitleAndToc(this.nextViewContainer, doc.id)), switchMap(() => this.elementsLoader.loadContainedCustomElements(this.nextViewContainer)), tap(() => this.docReady.emit()), switchMap(() => this.swapViews(addTitleAndToc)), tap(() => this.docRendered.emit()), catchError(err => { const errorMessage = `${(err instanceof Error) ? err.stack : err}`; this.logger.error(new Error(`[DocViewer] Error preparing document '${doc.id}': ${errorMessage}`)); this.nextViewContainer.textContent = ''; this.setNoIndex(true); // TODO(gkalpak): Remove this once gathering debug info is no longer needed. if (/loading chunk \S+ failed/i.test(errorMessage)) { // Print some info to help with debugging. // (There is no reason to wait for this async call to complete before continuing.) printSwDebugInfo(); } return this.void$; }), ); } /** * Tell search engine crawlers whether to index this page */ private setNoIndex(val: boolean) { if (val) { this.metaService.addTag({ name: 'robots', content: 'noindex' }); } else { this.metaService.removeTag('name="robots"'); } } /** * Swap the views, removing `currViewContainer` and inserting `nextViewContainer`. * (At this point all content should be ready, including having loaded and instantiated embedded * components.) * * Optionally, run a callback as soon as `nextViewContainer` has been inserted, but before the * entering animation has been completed. This is useful for work that needs to be done as soon as * the element has been attached to the DOM. */ protected swapViews(onInsertedCb = () => {}): Observable<void> { const raf$ = new Observable<void>(subscriber => { const rafId = requestAnimationFrame(() => { subscriber.next(); subscriber.complete(); }); return () => cancelAnimationFrame(rafId); }); // Get the actual transition duration (taking global styles into account). // According to the [CSSOM spec](https://drafts.csswg.org/cssom/#serializing-css-values), // `time` values should be returned in seconds. const getActualDuration = (elem: HTMLElement) => { const cssValue = getComputedStyle(elem).transitionDuration || ''; const seconds = Number(cssValue.replace(/s$/, '')); return 1000 * seconds; }; // Some properties are not assignable and thus cannot be animated. // Example methods, readonly and CSS properties: // "length", "parentRule", "getPropertyPriority", "getPropertyValue", "item", "removeProperty", "setProperty" type StringValueCSSStyleDeclaration = Exclude< { [K in keyof CSSStyleDeclaration]: CSSStyleDeclaration[K] extends string ? K : never; }[keyof CSSStyleDeclaration], number >; const animateProp = (elem: HTMLElement, prop: StringValueCSSStyleDeclaration, from: string, to: string, duration = 200) => { const animationsDisabled = this.hostElement.classList.contains(NO_ANIMATIONS); elem.style.transition = ''; return animationsDisabled ? this.void$.pipe(tap(() => elem.style[prop] = to)) : this.void$.pipe( // In order to ensure that the `from` value will be applied immediately (i.e. // without transition) and that the `to` value will be affected by the // `transition` style, we need to ensure an animation frame has passed between // setting each style. switchMap(() => raf$), tap(() => elem.style[prop] = from), switchMap(() => raf$), tap(() => elem.style.transition = `all ${duration}ms ease-in-out`), switchMap(() => raf$), tap(() => elem.style[prop] = to), switchMap(() => timer(getActualDuration(elem))), switchMap(() => this.void$), ); }; const animateLeave = (elem: HTMLElement) => animateProp(elem, 'opacity', '1', '0.1'); const animateEnter = (elem: HTMLElement) => animateProp(elem, 'opacity', '0.1', '1'); let done$ = this.void$; if (this.currViewContainer.parentElement) { done$ = done$.pipe( // Remove the current view from the viewer. switchMap(() => animateLeave(this.currViewContainer)), tap(() => (this.currViewContainer.parentElement as HTMLElement).removeChild(this.currViewContainer)), tap(() => this.docRemoved.emit()), ); } return done$.pipe( // Insert the next view into the viewer. tap(() => this.hostElement.appendChild(this.nextViewContainer)), tap(() => onInsertedCb()), tap(() => this.docInserted.emit()), switchMap(() => animateEnter(this.nextViewContainer)), // Update the view references and clean up unused nodes. tap(() => { const prevViewContainer = this.currViewContainer; this.currViewContainer = this.nextViewContainer; this.nextViewContainer = prevViewContainer; this.nextViewContainer.textContent = ''; // Empty to release memory. }), ); } } // Helpers /** * Print some info regarding the ServiceWorker and the caches contents to help debugging potential * issues with failing to find resources in the cache. * (See https://github.com/angular/angular/issues/28114.) */ async function printSwDebugInfo(): Promise<void> { const sep = '\n----------'; const swState = navigator.serviceWorker?.controller?.state ?? 'N/A'; console.log(`\nServiceWorker: ${swState}`); if (typeof caches === 'undefined') { console.log(`${sep}\nCaches: N/A`); } else { const allCacheNames = await caches.keys(); const swCacheNames = allCacheNames.filter(name => name.startsWith('ngsw:/:')); await findCachesAndPrintEntries(swCacheNames, 'db:control', true, ['manifests']); await findCachesAndPrintEntries(swCacheNames, 'assets:app-shell:cache', false); await findCachesAndPrintEntries(swCacheNames, 'assets:app-shell:meta', true); } if (swState === 'activated') { console.log(sep); await fetchAndPrintSwInternalDebugInfo(); } console.warn( `${sep}\nIf you see this error, please report an issue at ` + 'https://github.com/angular/angular/issues/new?template=3-docs-bug.md including the above logs.'); // Internal helpers async function fetchAndPrintSwInternalDebugInfo() { try { const res = await fetch('/ngsw/state'); if (!res.ok) { throw new Error(`Response ${res.status} ${res.statusText}`); } console.log(await res.text()); } catch (err) { console.log(`Failed to retrieve debug info from '/ngsw/state': ${err.message || err}`); } } async function findCachesAndPrintEntries( swCacheNames: string[], nameSuffix: string, includeValues: boolean, ignoredKeys: string[] = []): Promise<void> { const cacheNames = swCacheNames.filter(name => name.endsWith(nameSuffix)); for (const cacheName of cacheNames) { const cacheEntries = await getCacheEntries(cacheName, includeValues, ignoredKeys); await printCacheEntries(cacheName, cacheEntries); } } async function getCacheEntries( name: string, includeValues: boolean, ignoredKeys: string[] = []): Promise<{key: string, value?: unknown}[]> { const ignoredUrls = new Set(ignoredKeys.map(key => new Request(key).url)); const cache = await caches.open(name); const keys = (await cache.keys()).map(req => req.url).filter(url => !ignoredUrls.has(url)); const entries = await Promise.all(keys.map(async key => ({ key, value: !includeValues ? undefined : await (await cache.match(key))?.json(), }))); return entries; } function printCacheEntries(name: string, entries: {key: string, value?: unknown}[]): void { const entriesStr = entries .map(({key, value}) => ` - ${key}${!value ? '' : `: ${JSON.stringify(value)}`}`) .join('\n'); console.log(`\nCache: ${name} (${entries.length} entries)\n${entriesStr}`); } }
the_stack
import * as ec2 from '@aws-cdk/aws-ec2'; import * as iam from '@aws-cdk/aws-iam'; import * as kms from '@aws-cdk/aws-kms'; import { ArnFormat, ConcreteDependable, IDependable, IResource, RemovalPolicy, Resource, Size, Stack, Tags } from '@aws-cdk/core'; // keep this import separate from other imports to reduce chance for merge conflicts with v2-main // eslint-disable-next-line no-duplicate-imports import { FeatureFlags } from '@aws-cdk/core'; import * as cxapi from '@aws-cdk/cx-api'; import { Construct } from 'constructs'; import { AccessPoint, AccessPointOptions } from './access-point'; import { CfnFileSystem, CfnMountTarget } from './efs.generated'; /** * EFS Lifecycle Policy, if a file is not accessed for given days, it will move to EFS Infrequent Access. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-efs-filesystem.html#cfn-elasticfilesystem-filesystem-lifecyclepolicies */ export enum LifecyclePolicy { /** * After 7 days of not being accessed. */ AFTER_7_DAYS = 'AFTER_7_DAYS', /** * After 14 days of not being accessed. */ AFTER_14_DAYS = 'AFTER_14_DAYS', /** * After 30 days of not being accessed. */ AFTER_30_DAYS = 'AFTER_30_DAYS', /** * After 60 days of not being accessed. */ AFTER_60_DAYS = 'AFTER_60_DAYS', /** * After 90 days of not being accessed. */ AFTER_90_DAYS = 'AFTER_90_DAYS' } /** * EFS Out Of Infrequent Access Policy, if a file is accessed given times, it will move back to primary * storage class. * * @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-efs-filesystem-lifecyclepolicy.html#cfn-efs-filesystem-lifecyclepolicy-transitiontoprimarystorageclass */ export enum OutOfInfrequentAccessPolicy { /** * After 1 access */ AFTER_1_ACCESS = 'AFTER_1_ACCESS' } /** * EFS Performance mode. * * @see https://docs.aws.amazon.com/efs/latest/ug/performance.html#performancemodes */ export enum PerformanceMode { /** * General Purpose is ideal for latency-sensitive use cases, like web serving * environments, content management systems, home directories, and general file serving. * Recommended for the majority of Amazon EFS file systems. */ GENERAL_PURPOSE = 'generalPurpose', /** * File systems in the Max I/O mode can scale to higher levels of aggregate * throughput and operations per second. This scaling is done with a tradeoff * of slightly higher latencies for file metadata operations. * Highly parallelized applications and workloads, such as big data analysis, * media processing, and genomics analysis, can benefit from this mode. */ MAX_IO = 'maxIO' } /** * EFS Throughput mode. * * @see https://docs.aws.amazon.com/efs/latest/ug/performance.html#throughput-modes */ export enum ThroughputMode { /** * This mode on Amazon EFS scales as the size of the file system in the standard storage class grows. */ BURSTING = 'bursting', /** * This mode can instantly provision the throughput of the file system (in MiB/s) independent of the amount of data stored. */ PROVISIONED = 'provisioned' } /** * Represents an Amazon EFS file system */ export interface IFileSystem extends ec2.IConnectable, IResource { /** * The ID of the file system, assigned by Amazon EFS. * * @attribute */ readonly fileSystemId: string; /** * The ARN of the file system. * * @attribute */ readonly fileSystemArn: string; /** * Dependable that can be depended upon to ensure the mount targets of the filesystem are ready */ readonly mountTargetsAvailable: IDependable; /** * Grant the actions defined in actions to the given grantee * on this File System resource. */ grant(grantee: iam.IGrantable, ...actions: string[]): iam.Grant; } /** * Properties of EFS FileSystem. */ export interface FileSystemProps { /** * VPC to launch the file system in. */ readonly vpc: ec2.IVpc; /** * Security Group to assign to this file system. * * @default - creates new security group which allows all outbound traffic */ readonly securityGroup?: ec2.ISecurityGroup; /** * Which subnets to place the mount target in the VPC. * * @default - the Vpc default strategy if not specified */ readonly vpcSubnets?: ec2.SubnetSelection; /** * Defines if the data at rest in the file system is encrypted or not. * * @default - If your application has the '@aws-cdk/aws-efs:defaultEncryptionAtRest' feature flag set, the default is true, otherwise, the default is false. * @link https://docs.aws.amazon.com/cdk/latest/guide/featureflags.html */ readonly encrypted?: boolean; /** * The file system's name. * * @default - CDK generated name */ readonly fileSystemName?: string; /** * The KMS key used for encryption. This is required to encrypt the data at rest if @encrypted is set to true. * * @default - if 'encrypted' is true, the default key for EFS (/aws/elasticfilesystem) is used */ readonly kmsKey?: kms.IKey; /** * A policy used by EFS lifecycle management to transition files to the Infrequent Access (IA) storage class. * * @default - None. EFS will not transition files to the IA storage class. */ readonly lifecyclePolicy?: LifecyclePolicy; /** * A policy used by EFS lifecycle management to transition files from Infrequent Access (IA) storage class to * primary storage class. * * @default - None. EFS will not transition files from IA storage to primary storage. */ readonly outOfInfrequentAccessPolicy?: OutOfInfrequentAccessPolicy; /** * The performance mode that the file system will operate under. * An Amazon EFS file system's performance mode can't be changed after the file system has been created. * Updating this property will replace the file system. * * @default PerformanceMode.GENERAL_PURPOSE */ readonly performanceMode?: PerformanceMode; /** * Enum to mention the throughput mode of the file system. * * @default ThroughputMode.BURSTING */ readonly throughputMode?: ThroughputMode; /** * Provisioned throughput for the file system. * This is a required property if the throughput mode is set to PROVISIONED. * Must be at least 1MiB/s. * * @default - none, errors out */ readonly provisionedThroughputPerSecond?: Size; /** * The removal policy to apply to the file system. * * @default RemovalPolicy.RETAIN */ readonly removalPolicy?: RemovalPolicy; /** * Whether to enable automatic backups for the file system. * * @default false */ readonly enableAutomaticBackups?: boolean; } /** * Properties that describe an existing EFS file system. */ export interface FileSystemAttributes { /** * The security group of the file system */ readonly securityGroup: ec2.ISecurityGroup; /** * The File System's ID. * * @default - determined based on fileSystemArn */ readonly fileSystemId?: string; /** * The File System's Arn. * * @default - determined based on fileSystemId */ readonly fileSystemArn?: string; } abstract class FileSystemBase extends Resource implements IFileSystem { /** * The security groups/rules used to allow network connections to the file system. */ public abstract readonly connections: ec2.Connections; /** * @attribute */ public abstract readonly fileSystemId: string; /** * @attribute */ public abstract readonly fileSystemArn: string; /** * Dependable that can be depended upon to ensure the mount targets of the filesystem are ready */ public abstract readonly mountTargetsAvailable: IDependable; /** * Grant the actions defined in actions to the given grantee * on this File System resource. * * @param grantee Principal to grant right to * @param actions The actions to grant */ public grant(grantee: iam.IGrantable, ...actions: string[]): iam.Grant { return iam.Grant.addToPrincipal({ grantee: grantee, actions: actions, resourceArns: [this.fileSystemArn], }); } } /** * The Elastic File System implementation of IFileSystem. * It creates a new, empty file system in Amazon Elastic File System (Amazon EFS). * It also creates mount target (AWS::EFS::MountTarget) implicitly to mount the * EFS file system on an Amazon Elastic Compute Cloud (Amazon EC2) instance or another resource. * * @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-efs-filesystem.html * * @resource AWS::EFS::FileSystem */ export class FileSystem extends FileSystemBase { /** * The default port File System listens on. */ public static readonly DEFAULT_PORT: number = 2049; /** * Import an existing File System from the given properties. */ public static fromFileSystemAttributes(scope: Construct, id: string, attrs: FileSystemAttributes): IFileSystem { return new ImportedFileSystem(scope, id, attrs); } /** * The security groups/rules used to allow network connections to the file system. */ public readonly connections: ec2.Connections; /** * @attribute */ public readonly fileSystemId: string; /** * @attribute */ public readonly fileSystemArn: string; public readonly mountTargetsAvailable: IDependable; private readonly _mountTargetsAvailable = new ConcreteDependable(); /** * Constructor for creating a new EFS FileSystem. */ constructor(scope: Construct, id: string, props: FileSystemProps) { super(scope, id); if (props.throughputMode === ThroughputMode.PROVISIONED && props.provisionedThroughputPerSecond === undefined) { throw new Error('Property provisionedThroughputPerSecond is required when throughputMode is PROVISIONED'); } // we explictly use 'undefined' to represent 'false' to maintain backwards compatibility since // its considered an actual change in CloudFormations eyes, even though they have the same meaning. const encrypted = props.encrypted ?? (FeatureFlags.of(this).isEnabled( cxapi.EFS_DEFAULT_ENCRYPTION_AT_REST) ? true : undefined); const filesystem = new CfnFileSystem(this, 'Resource', { encrypted: encrypted, kmsKeyId: props.kmsKey?.keyArn, lifecyclePolicies: ( (props.lifecyclePolicy || props.outOfInfrequentAccessPolicy) ? [{ transitionToIa: props.lifecyclePolicy, transitionToPrimaryStorageClass: props.outOfInfrequentAccessPolicy, }] : undefined), performanceMode: props.performanceMode, throughputMode: props.throughputMode, provisionedThroughputInMibps: props.provisionedThroughputPerSecond?.toMebibytes(), backupPolicy: props.enableAutomaticBackups ? { status: 'ENABLED' } : undefined, }); filesystem.applyRemovalPolicy(props.removalPolicy); this.fileSystemId = filesystem.ref; this.fileSystemArn = filesystem.attrArn; Tags.of(this).add('Name', props.fileSystemName || this.node.path); const securityGroup = (props.securityGroup || new ec2.SecurityGroup(this, 'EfsSecurityGroup', { vpc: props.vpc, })); this.connections = new ec2.Connections({ securityGroups: [securityGroup], defaultPort: ec2.Port.tcp(FileSystem.DEFAULT_PORT), }); const subnets = props.vpc.selectSubnets(props.vpcSubnets ?? { onePerAz: true }); // We now have to create the mount target for each of the mentioned subnet let mountTargetCount = 0; this.mountTargetsAvailable = []; subnets.subnetIds.forEach((subnetId: string) => { const mountTarget = new CfnMountTarget(this, 'EfsMountTarget' + (++mountTargetCount), { fileSystemId: this.fileSystemId, securityGroups: Array.of(securityGroup.securityGroupId), subnetId, }); this._mountTargetsAvailable.add(mountTarget); }); this.mountTargetsAvailable = this._mountTargetsAvailable; } /** * create access point from this filesystem */ public addAccessPoint(id: string, accessPointOptions: AccessPointOptions = {}): AccessPoint { return new AccessPoint(this, id, { fileSystem: this, ...accessPointOptions, }); } } class ImportedFileSystem extends FileSystemBase { /** * The security groups/rules used to allow network connections to the file system. */ public readonly connections: ec2.Connections; /** * @attribute */ public readonly fileSystemId: string; /** * @attribute */ public readonly fileSystemArn: string; /** * Dependable that can be depended upon to ensure the mount targets of the filesystem are ready */ public readonly mountTargetsAvailable: IDependable; constructor(scope: Construct, id: string, attrs: FileSystemAttributes) { super(scope, id); if (!!attrs.fileSystemId === !!attrs.fileSystemArn) { throw new Error('One of fileSystemId or fileSystemArn, but not both, must be provided.'); } this.fileSystemArn = attrs.fileSystemArn ?? Stack.of(scope).formatArn({ service: 'elasticfilesystem', resource: 'file-system', resourceName: attrs.fileSystemId, }); const parsedArn = Stack.of(scope).splitArn(this.fileSystemArn, ArnFormat.SLASH_RESOURCE_NAME); if (!parsedArn.resourceName) { throw new Error(`Invalid FileSystem Arn ${this.fileSystemArn}`); } this.fileSystemId = attrs.fileSystemId ?? parsedArn.resourceName; this.connections = new ec2.Connections({ securityGroups: [attrs.securityGroup], defaultPort: ec2.Port.tcp(FileSystem.DEFAULT_PORT), }); this.mountTargetsAvailable = new ConcreteDependable(); } }
the_stack
import { KeyboardEventHandler, MouseEventHandler, FocusEventHandler, Ref, RefCallback, TouchEventHandler, useCallback, useEffect, useRef, useState, } from "react"; import { applyRef, useDir, useIsomorphicLayoutEffect } from "@react-md/utils"; import { DEFAULT_SLIDER_ANIMATION_TIME } from "./constants"; import { DefinedSliderValueOptions, SliderControls, SliderDragEvent, SliderDraggingBy, SliderEventHandlers, SliderPresentation, SliderThumbIndex, ThumbIndex, } from "./types"; import { CombinedSliderControls, getDragPercentage, getDragValue, isMouseEvent, isRangeSlider, isTouchEvent, SliderDragValues, } from "./utils"; /** * @internal * @remarks \@since 2.5.0 */ export type SliderControlsOptions = CombinedSliderControls & SliderPresentation & SliderEventHandlers & DefinedSliderValueOptions & { ref?: Ref<HTMLSpanElement | null>; thumb1Ref?: Ref<HTMLSpanElement | null>; thumb2Ref?: Ref<HTMLSpanElement | null>; animationDuration?: number; }; const VALID_KEYS = [ "ArrowDown", "ArrowUp", "ArrowLeft", "ArrowRight", "Home", "End", "PageUp", "PageDown", ]; /** * @internal * @remarks \@since 2.5.0 */ export interface SliderAndRangeSliderControls { thumb1Ref: RefCallback<HTMLSpanElement | null>; thumb1Value: number; thumb1Percentage: string; thumb2Ref: RefCallback<HTMLSpanElement | null>; thumb2Value?: number; thumb2Percentage?: string; dragging: boolean; draggingIndex: SliderThumbIndex; ref: RefCallback<HTMLSpanElement | null>; onBlur: FocusEventHandler<HTMLSpanElement>; onKeyDown: KeyboardEventHandler<HTMLSpanElement>; onMouseDown: MouseEventHandler<HTMLSpanElement>; onTouchStart: TouchEventHandler<HTMLSpanElement>; } /** * This hook provides all the logic for updating the slider's when the user * intracts with the slider. * * @internal * @remarks \@since 2.5.0 */ export function useSliderControls({ ref, thumb1Ref: propThumb1Ref, thumb2Ref: propThumb2Ref, min, max, step, disabled = false, vertical = false, onBlur, onKeyDown, onMouseDown, onTouchStart, animationDuration = DEFAULT_SLIDER_ANIMATION_TIME, ...controls }: SliderControlsOptions): SliderAndRangeSliderControls { const trackRef = useRef<HTMLSpanElement | null>(null); const thumb1Ref = useRef<HTMLSpanElement | null>(null); const thumb2Ref = useRef<HTMLSpanElement | null>(null); const [dragging, setDragging] = useState(false); const [dragValue, setDragValue] = useState<number>(min); const [draggingBy, setDraggingBy] = useState<SliderDraggingBy>(null); const [draggingIndex, setDraggingIndex] = useState<SliderThumbIndex>(null); const controlsRef = useRef(controls); useIsomorphicLayoutEffect(() => { controlsRef.current = controls; }); const { dir } = useDir(); const isRtl = dir === "rtl"; let thumb1Value: number; let thumb1Percentage: string; let thumb2Value: number | undefined; let thumb2Percentage: string | undefined; if (isRangeSlider(controls)) { [thumb1Value, thumb2Value] = controls.value; ({ thumb1Percentage, thumb2Percentage } = getDragPercentage({ min, max, thumb1Value, thumb2Value, dragging, dragValue, draggingIndex, })); } else { thumb1Value = controls.value; ({ thumb1Percentage } = getDragPercentage({ min, max, thumb1Value, dragging, dragValue, draggingIndex, })); } /** * The main handler for updating the value of the slider. To help keep the * drag experience smooth, some values are stored in refs to prevent the * `useEffect` from being run during renders which adds and removes the move * event handlers */ const drag = useCallback( (event: SliderDragEvent) => { const track = trackRef.current; const slider1 = thumb1Ref.current; const slider2 = thumb2Ref.current; const { altKey, ctrlKey, metaKey, shiftKey } = event; if ( altKey || ctrlKey || metaKey || shiftKey || disabled || !track || !slider1 || (isMouseEvent(event) && event.button !== 0) || (isTouchEvent(event) && event.changedTouches.length !== 1) || (!isMouseEvent(event) && !isTouchEvent(event)) ) { return; } // prevent text from being highlighted while dragging the slider // can't do it on touch events due to being passive events in later // versions of React if (!isTouchEvent(event)) { event.preventDefault(); } event.stopPropagation(); // get the current mouse/touch position to help determine hwo far the // slider is being dragged let clientX: number; let clientY: number; if (isMouseEvent(event)) { ({ clientX, clientY } = event); } else { const touch = event.changedTouches[0]; ({ clientX, clientY } = touch); } let index: ThumbIndex = 0; let slider: HTMLSpanElement = slider1; if (slider2) { // if we aren't dragging yet, try to find the slider closest to the // mouse/touch position and use that one if (draggingIndex === null) { const x1 = slider1.getBoundingClientRect().x; const x2 = slider2.getBoundingClientRect().x; const y1 = slider1.getBoundingClientRect().y; const y2 = slider2.getBoundingClientRect().y; if (vertical) { index = Math.abs(clientY - y1) < Math.abs(clientY - y2) ? 0 : 1; } else { index = Math.abs(clientX - x1) < Math.abs(clientX - x2) ? 0 : 1; } } else { index = draggingIndex; } slider = index === 0 ? slider1 : slider2; } // if we aren't dragging yet, want to focus the slider element to make it // easier to switch between mouse dragging and keyboard "dragging" if (draggingIndex !== index) { slider.focus(); setDraggingIndex(index); } setDraggingBy(isMouseEvent(event) ? "mouse" : "touch"); const { left, top, height, width } = track.getBoundingClientRect(); const options: SliderDragValues = { min, max, step, vertical, clientX, clientY, left, top, height, width, isRtl, minValue: min, maxValue: max, }; const controls = controlsRef.current; if (isRangeSlider(controls)) { const [thumb1Value, thumb2Value] = controls.value; const { value, current } = getDragValue({ ...options, minValue: index === 0 ? min : thumb1Value + step, maxValue: index === 1 ? max : thumb2Value - step, }); setDragValue(current); controls.setValue( index === 0 ? [value, thumb2Value] : [thumb1Value, value] ); } else { const { value, current } = getDragValue(options); setDragValue(current); controls.setValue(value); } }, [disabled, isRtl, draggingIndex, max, min, step, vertical] ); const stop = useCallback(() => { controlsRef.current.persist(); setDragging(false); setDraggingIndex(null); setDraggingBy(null); }, []); useEffect(() => { if (draggingBy === null) { return; } if (draggingBy === "mouse") { window.addEventListener("mousemove", drag); window.addEventListener("mouseup", stop); } else { window.addEventListener("touchmove", drag); window.addEventListener("touchend", stop); } return () => { if (draggingBy === "mouse") { window.removeEventListener("mousemove", drag); window.removeEventListener("mouseup", stop); } else { window.removeEventListener("touchmove", drag); window.removeEventListener("touchend", stop); } }; }, [draggingBy, drag, stop]); useEffect(() => { if (draggingIndex === null && draggingBy === null) { return; } // I don't know how to reach this flow.. so maybe can be removed? /* istanbul ignore if */ if (draggingIndex === null) { setDragging(false); return; } const timeout = window.setTimeout(() => { setDragging(true); }, animationDuration); return () => { window.clearTimeout(timeout); }; }, [draggingIndex, draggingBy, animationDuration]); const handleBlur = useCallback<FocusEventHandler<HTMLSpanElement>>( (event) => { if (onBlur) { onBlur(event); } controlsRef.current.persist(); }, [onBlur] ); /** * Note: this should be attached to the `SliderTrack` component. */ const handleMouseDown = useCallback<MouseEventHandler<HTMLSpanElement>>( (event) => { if (onMouseDown) { onMouseDown(event); } // only call drag again when the dragging by isn't null since it can cause // the "drag" events to be re-started if the mouse appears over the slider // thumb again if (draggingBy === null) { drag(event); } }, [drag, draggingBy, onMouseDown] ); /** * Note: this should be attached to the `SliderTrack` component. */ const handleTouchStart = useCallback<TouchEventHandler<HTMLSpanElement>>( (event) => { if (onTouchStart) { onTouchStart(event); } // only call drag again when the dragging by isn't null since it can cause // the "drag" events to be re-started if the user's finger appears over // the slider thumb again if (draggingBy === null) { drag(event); } }, [drag, draggingBy, onTouchStart] ); /** * Note: this should be attached to each `SliderThumb` component. */ const handleKeyDown = useCallback<KeyboardEventHandler<HTMLSpanElement>>( (event) => { if (onKeyDown) { onKeyDown(event); } const { key, altKey, ctrlKey, metaKey, shiftKey } = event; if ( altKey || ctrlKey || metaKey || shiftKey || disabled || !VALID_KEYS.includes(key) ) { return; } let controls: Omit<SliderControls, "setValue" | "value" | "persist">; if (isRangeSlider(controlsRef.current)) { const { increment, incrementJump, decrement, decrementJump, minimum, maximum, } = controlsRef.current; const index = event.currentTarget === thumb2Ref.current ? 1 : 0; controls = { increment: increment.bind(null, index), incrementJump: incrementJump.bind(null, index), decrement: decrement.bind(null, index), decrementJump: decrementJump.bind(null, index), minimum: minimum.bind(null, index), maximum: maximum.bind(null, index), }; } else { controls = controlsRef.current; } const { increment, incrementJump, decrement, decrementJump, minimum, maximum, } = controls; event.preventDefault(); event.stopPropagation(); switch (key) { case "ArrowUp": case "ArrowRight": increment(); break; case "ArrowDown": case "ArrowLeft": decrement(); break; case "Home": minimum(); break; case "End": maximum(); break; case "PageUp": incrementJump(); break; case "PageDown": decrementJump(); break; } }, [onKeyDown, disabled] ); const trackRefHandler = useCallback( (instance: HTMLSpanElement | null) => { applyRef(instance, ref); trackRef.current = instance; }, [ref] ); const thumb1RefHandler = useCallback( (instance: HTMLSpanElement | null) => { applyRef(instance, propThumb1Ref); thumb1Ref.current = instance; }, [propThumb1Ref] ); const thumb2RefHandler = useCallback( (instance: HTMLSpanElement | null) => { applyRef(instance, propThumb2Ref); thumb2Ref.current = instance; }, [propThumb2Ref] ); return { thumb1Ref: thumb1RefHandler, thumb1Value, thumb1Percentage, thumb2Ref: thumb2RefHandler, thumb2Value, thumb2Percentage, dragging, draggingIndex, ref: trackRefHandler, onBlur: handleBlur, onKeyDown: handleKeyDown, onMouseDown: handleMouseDown, onTouchStart: handleTouchStart, }; }
the_stack
import { User } from '../auth/user'; import { SnapshotVersion } from '../core/snapshot_version'; import { OnlineState, TargetId } from '../core/types'; import { LocalStore } from '../local/local_store'; import { localStoreGetLastRemoteSnapshotVersion, localStoreGetNextMutationBatch } from '../local/local_store_impl'; import { isIndexedDbTransactionError } from '../local/simple_db'; import { TargetData, TargetPurpose } from '../local/target_data'; import { MutationResult } from '../model/mutation'; import { MutationBatch, MutationBatchResult } from '../model/mutation_batch'; import { debugAssert, debugCast } from '../util/assert'; import { AsyncQueue } from '../util/async_queue'; import { ByteString } from '../util/byte_string'; import { FirestoreError } from '../util/error'; import { logDebug } from '../util/log'; import { BATCHID_UNKNOWN } from '../util/types'; import { ConnectivityMonitor, NetworkStatus } from './connectivity_monitor'; import { Datastore, newPersistentWatchStream, newPersistentWriteStream } from './datastore'; import { OnlineStateTracker } from './online_state_tracker'; import { PersistentListenStream, PersistentWriteStream } from './persistent_stream'; import { RemoteSyncer } from './remote_syncer'; import { isPermanentWriteError } from './rpc_error'; import { DocumentWatchChange, ExistenceFilterChange, WatchChange, WatchChangeAggregator, WatchTargetChange, WatchTargetChangeState } from './watch_change'; const LOG_TAG = 'RemoteStore'; // TODO(b/35853402): Negotiate this with the stream. const MAX_PENDING_WRITES = 10; /** Reasons for why the RemoteStore may be offline. */ const enum OfflineCause { /** The user has explicitly disabled the network (via `disableNetwork()`). */ UserDisabled, /** An IndexedDb failure occurred while persisting a stream update. */ IndexedDbFailed, /** The tab is not the primary tab (only relevant with multi-tab). */ IsSecondary, /** We are restarting the streams due to an Auth credential change. */ CredentialChange, /** The connectivity state of the environment has changed. */ ConnectivityChange, /** The RemoteStore has been shut down. */ Shutdown } /** * RemoteStore - An interface to remotely stored data, basically providing a * wrapper around the Datastore that is more reliable for the rest of the * system. * * RemoteStore is responsible for maintaining the connection to the server. * - maintaining a list of active listens. * - reconnecting when the connection is dropped. * - resuming all the active listens on reconnect. * * RemoteStore handles all incoming events from the Datastore. * - listening to the watch stream and repackaging the events as RemoteEvents * - notifying SyncEngine of any changes to the active listens. * * RemoteStore takes writes from other components and handles them reliably. * - pulling pending mutations from LocalStore and sending them to Datastore. * - retrying mutations that failed because of network problems. * - acking mutations to the SyncEngine once they are accepted or rejected. */ export interface RemoteStore { /** * SyncEngine to notify of watch and write events. This must be set * immediately after construction. */ remoteSyncer: RemoteSyncer; } class RemoteStoreImpl implements RemoteStore { remoteSyncer: RemoteSyncer = {}; /** * A list of up to MAX_PENDING_WRITES writes that we have fetched from the * LocalStore via fillWritePipeline() and have or will send to the write * stream. * * Whenever writePipeline.length > 0 the RemoteStore will attempt to start or * restart the write stream. When the stream is established the writes in the * pipeline will be sent in order. * * Writes remain in writePipeline until they are acknowledged by the backend * and thus will automatically be re-sent if the stream is interrupted / * restarted before they're acknowledged. * * Write responses from the backend are linked to their originating request * purely based on order, and so we can just shift() writes from the front of * the writePipeline as we receive responses. */ writePipeline: MutationBatch[] = []; /** * A mapping of watched targets that the client cares about tracking and the * user has explicitly called a 'listen' for this target. * * These targets may or may not have been sent to or acknowledged by the * server. On re-establishing the listen stream, these targets should be sent * to the server. The targets removed with unlistens are removed eagerly * without waiting for confirmation from the listen stream. */ listenTargets = new Map<TargetId, TargetData>(); connectivityMonitor: ConnectivityMonitor; watchStream?: PersistentListenStream; writeStream?: PersistentWriteStream; watchChangeAggregator?: WatchChangeAggregator; /** * A set of reasons for why the RemoteStore may be offline. If empty, the * RemoteStore may start its network connections. */ offlineCauses = new Set<OfflineCause>(); /** * Event handlers that get called when the network is disabled or enabled. * * PORTING NOTE: These functions are used on the Web client to create the * underlying streams (to support tree-shakeable streams). On Android and iOS, * the streams are created during construction of RemoteStore. */ onNetworkStatusChange: Array<(enabled: boolean) => Promise<void>> = []; onlineStateTracker: OnlineStateTracker; constructor( /** * The local store, used to fill the write pipeline with outbound mutations. */ readonly localStore: LocalStore, /** The client-side proxy for interacting with the backend. */ readonly datastore: Datastore, readonly asyncQueue: AsyncQueue, onlineStateHandler: (onlineState: OnlineState) => void, connectivityMonitor: ConnectivityMonitor ) { this.connectivityMonitor = connectivityMonitor; this.connectivityMonitor.addCallback((_: NetworkStatus) => { asyncQueue.enqueueAndForget(async () => { // Porting Note: Unlike iOS, `restartNetwork()` is called even when the // network becomes unreachable as we don't have any other way to tear // down our streams. if (canUseNetwork(this)) { logDebug( LOG_TAG, 'Restarting streams for network reachability change.' ); await restartNetwork(this); } }); }); this.onlineStateTracker = new OnlineStateTracker( asyncQueue, onlineStateHandler ); } } export function newRemoteStore( localStore: LocalStore, datastore: Datastore, asyncQueue: AsyncQueue, onlineStateHandler: (onlineState: OnlineState) => void, connectivityMonitor: ConnectivityMonitor ): RemoteStore { return new RemoteStoreImpl( localStore, datastore, asyncQueue, onlineStateHandler, connectivityMonitor ); } /** Re-enables the network. Idempotent. */ export function remoteStoreEnableNetwork( remoteStore: RemoteStore ): Promise<void> { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); remoteStoreImpl.offlineCauses.delete(OfflineCause.UserDisabled); return enableNetworkInternal(remoteStoreImpl); } async function enableNetworkInternal( remoteStoreImpl: RemoteStoreImpl ): Promise<void> { if (canUseNetwork(remoteStoreImpl)) { for (const networkStatusHandler of remoteStoreImpl.onNetworkStatusChange) { await networkStatusHandler(/* enabled= */ true); } } } /** * Temporarily disables the network. The network can be re-enabled using * enableNetwork(). */ export async function remoteStoreDisableNetwork( remoteStore: RemoteStore ): Promise<void> { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); remoteStoreImpl.offlineCauses.add(OfflineCause.UserDisabled); await disableNetworkInternal(remoteStoreImpl); // Set the OnlineState to Offline so get()s return from cache, etc. remoteStoreImpl.onlineStateTracker.set(OnlineState.Offline); } async function disableNetworkInternal( remoteStoreImpl: RemoteStoreImpl ): Promise<void> { for (const networkStatusHandler of remoteStoreImpl.onNetworkStatusChange) { await networkStatusHandler(/* enabled= */ false); } } export async function remoteStoreShutdown( remoteStore: RemoteStore ): Promise<void> { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); logDebug(LOG_TAG, 'RemoteStore shutting down.'); remoteStoreImpl.offlineCauses.add(OfflineCause.Shutdown); await disableNetworkInternal(remoteStoreImpl); remoteStoreImpl.connectivityMonitor.shutdown(); // Set the OnlineState to Unknown (rather than Offline) to avoid potentially // triggering spurious listener events with cached data, etc. remoteStoreImpl.onlineStateTracker.set(OnlineState.Unknown); } /** * Starts new listen for the given target. Uses resume token if provided. It * is a no-op if the target of given `TargetData` is already being listened to. */ export function remoteStoreListen( remoteStore: RemoteStore, targetData: TargetData ): void { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); if (remoteStoreImpl.listenTargets.has(targetData.targetId)) { return; } // Mark this as something the client is currently listening for. remoteStoreImpl.listenTargets.set(targetData.targetId, targetData); if (shouldStartWatchStream(remoteStoreImpl)) { // The listen will be sent in onWatchStreamOpen startWatchStream(remoteStoreImpl); } else if (ensureWatchStream(remoteStoreImpl).isOpen()) { sendWatchRequest(remoteStoreImpl, targetData); } } /** * Removes the listen from server. It is a no-op if the given target id is * not being listened to. */ export function remoteStoreUnlisten( remoteStore: RemoteStore, targetId: TargetId ): void { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); const watchStream = ensureWatchStream(remoteStoreImpl); debugAssert( remoteStoreImpl.listenTargets.has(targetId), `unlisten called on target no currently watched: ${targetId}` ); remoteStoreImpl.listenTargets.delete(targetId); if (watchStream.isOpen()) { sendUnwatchRequest(remoteStoreImpl, targetId); } if (remoteStoreImpl.listenTargets.size === 0) { if (watchStream.isOpen()) { watchStream.markIdle(); } else if (canUseNetwork(remoteStoreImpl)) { // Revert to OnlineState.Unknown if the watch stream is not open and we // have no listeners, since without any listens to send we cannot // confirm if the stream is healthy and upgrade to OnlineState.Online. remoteStoreImpl.onlineStateTracker.set(OnlineState.Unknown); } } } /** * We need to increment the the expected number of pending responses we're due * from watch so we wait for the ack to process any messages from this target. */ function sendWatchRequest( remoteStoreImpl: RemoteStoreImpl, targetData: TargetData ): void { remoteStoreImpl.watchChangeAggregator!.recordPendingTargetRequest( targetData.targetId ); ensureWatchStream(remoteStoreImpl).watch(targetData); } /** * We need to increment the expected number of pending responses we're due * from watch so we wait for the removal on the server before we process any * messages from this target. */ function sendUnwatchRequest( remoteStoreImpl: RemoteStoreImpl, targetId: TargetId ): void { remoteStoreImpl.watchChangeAggregator!.recordPendingTargetRequest(targetId); ensureWatchStream(remoteStoreImpl).unwatch(targetId); } function startWatchStream(remoteStoreImpl: RemoteStoreImpl): void { debugAssert( shouldStartWatchStream(remoteStoreImpl), 'startWatchStream() called when shouldStartWatchStream() is false.' ); debugAssert( !!remoteStoreImpl.remoteSyncer.getRemoteKeysForTarget, 'getRemoteKeysForTarget() not set' ); remoteStoreImpl.watchChangeAggregator = new WatchChangeAggregator({ getRemoteKeysForTarget: targetId => remoteStoreImpl.remoteSyncer.getRemoteKeysForTarget!(targetId), getTargetDataForTarget: targetId => remoteStoreImpl.listenTargets.get(targetId) || null }); ensureWatchStream(remoteStoreImpl).start(); remoteStoreImpl.onlineStateTracker.handleWatchStreamStart(); } /** * Returns whether the watch stream should be started because it's necessary * and has not yet been started. */ function shouldStartWatchStream(remoteStoreImpl: RemoteStoreImpl): boolean { return ( canUseNetwork(remoteStoreImpl) && !ensureWatchStream(remoteStoreImpl).isStarted() && remoteStoreImpl.listenTargets.size > 0 ); } export function canUseNetwork(remoteStore: RemoteStore): boolean { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); return remoteStoreImpl.offlineCauses.size === 0; } function cleanUpWatchStreamState(remoteStoreImpl: RemoteStoreImpl): void { remoteStoreImpl.watchChangeAggregator = undefined; } async function onWatchStreamOpen( remoteStoreImpl: RemoteStoreImpl ): Promise<void> { remoteStoreImpl.listenTargets.forEach((targetData, targetId) => { sendWatchRequest(remoteStoreImpl, targetData); }); } async function onWatchStreamClose( remoteStoreImpl: RemoteStoreImpl, error?: FirestoreError ): Promise<void> { if (error === undefined) { // Graceful stop (due to stop() or idle timeout). Make sure that's // desirable. debugAssert( !shouldStartWatchStream(remoteStoreImpl), 'Watch stream was stopped gracefully while still needed.' ); } cleanUpWatchStreamState(remoteStoreImpl); // If we still need the watch stream, retry the connection. if (shouldStartWatchStream(remoteStoreImpl)) { remoteStoreImpl.onlineStateTracker.handleWatchStreamFailure(error!); startWatchStream(remoteStoreImpl); } else { // No need to restart watch stream because there are no active targets. // The online state is set to unknown because there is no active attempt // at establishing a connection remoteStoreImpl.onlineStateTracker.set(OnlineState.Unknown); } } async function onWatchStreamChange( remoteStoreImpl: RemoteStoreImpl, watchChange: WatchChange, snapshotVersion: SnapshotVersion ): Promise<void> { // Mark the client as online since we got a message from the server remoteStoreImpl.onlineStateTracker.set(OnlineState.Online); if ( watchChange instanceof WatchTargetChange && watchChange.state === WatchTargetChangeState.Removed && watchChange.cause ) { // There was an error on a target, don't wait for a consistent snapshot // to raise events try { await handleTargetError(remoteStoreImpl, watchChange); } catch (e) { logDebug( LOG_TAG, 'Failed to remove targets %s: %s ', watchChange.targetIds.join(','), e ); await disableNetworkUntilRecovery(remoteStoreImpl, e); } return; } if (watchChange instanceof DocumentWatchChange) { remoteStoreImpl.watchChangeAggregator!.handleDocumentChange(watchChange); } else if (watchChange instanceof ExistenceFilterChange) { remoteStoreImpl.watchChangeAggregator!.handleExistenceFilter(watchChange); } else { debugAssert( watchChange instanceof WatchTargetChange, 'Expected watchChange to be an instance of WatchTargetChange' ); remoteStoreImpl.watchChangeAggregator!.handleTargetChange(watchChange); } if (!snapshotVersion.isEqual(SnapshotVersion.min())) { try { const lastRemoteSnapshotVersion = await localStoreGetLastRemoteSnapshotVersion( remoteStoreImpl.localStore ); if (snapshotVersion.compareTo(lastRemoteSnapshotVersion) >= 0) { // We have received a target change with a global snapshot if the snapshot // version is not equal to SnapshotVersion.min(). await raiseWatchSnapshot(remoteStoreImpl, snapshotVersion); } } catch (e) { logDebug(LOG_TAG, 'Failed to raise snapshot:', e); await disableNetworkUntilRecovery(remoteStoreImpl, e); } } } /** * Recovery logic for IndexedDB errors that takes the network offline until * `op` succeeds. Retries are scheduled with backoff using * `enqueueRetryable()`. If `op()` is not provided, IndexedDB access is * validated via a generic operation. * * The returned Promise is resolved once the network is disabled and before * any retry attempt. */ async function disableNetworkUntilRecovery( remoteStoreImpl: RemoteStoreImpl, e: FirestoreError, op?: () => Promise<unknown> ): Promise<void> { if (isIndexedDbTransactionError(e)) { debugAssert( !remoteStoreImpl.offlineCauses.has(OfflineCause.IndexedDbFailed), 'Unexpected network event when IndexedDB was marked failed.' ); remoteStoreImpl.offlineCauses.add(OfflineCause.IndexedDbFailed); // Disable network and raise offline snapshots await disableNetworkInternal(remoteStoreImpl); remoteStoreImpl.onlineStateTracker.set(OnlineState.Offline); if (!op) { // Use a simple read operation to determine if IndexedDB recovered. // Ideally, we would expose a health check directly on SimpleDb, but // RemoteStore only has access to persistence through LocalStore. op = () => localStoreGetLastRemoteSnapshotVersion(remoteStoreImpl.localStore); } // Probe IndexedDB periodically and re-enable network remoteStoreImpl.asyncQueue.enqueueRetryable(async () => { logDebug(LOG_TAG, 'Retrying IndexedDB access'); await op!(); remoteStoreImpl.offlineCauses.delete(OfflineCause.IndexedDbFailed); await enableNetworkInternal(remoteStoreImpl); }); } else { throw e; } } /** * Executes `op`. If `op` fails, takes the network offline until `op` * succeeds. Returns after the first attempt. */ function executeWithRecovery( remoteStoreImpl: RemoteStoreImpl, op: () => Promise<void> ): Promise<void> { return op().catch(e => disableNetworkUntilRecovery(remoteStoreImpl, e, op)); } /** * Takes a batch of changes from the Datastore, repackages them as a * RemoteEvent, and passes that on to the listener, which is typically the * SyncEngine. */ function raiseWatchSnapshot( remoteStoreImpl: RemoteStoreImpl, snapshotVersion: SnapshotVersion ): Promise<void> { debugAssert( !snapshotVersion.isEqual(SnapshotVersion.min()), "Can't raise event for unknown SnapshotVersion" ); const remoteEvent = remoteStoreImpl.watchChangeAggregator!.createRemoteEvent(snapshotVersion); // Update in-memory resume tokens. LocalStore will update the // persistent view of these when applying the completed RemoteEvent. remoteEvent.targetChanges.forEach((change, targetId) => { if (change.resumeToken.approximateByteSize() > 0) { const targetData = remoteStoreImpl.listenTargets.get(targetId); // A watched target might have been removed already. if (targetData) { remoteStoreImpl.listenTargets.set( targetId, targetData.withResumeToken(change.resumeToken, snapshotVersion) ); } } }); // Re-establish listens for the targets that have been invalidated by // existence filter mismatches. remoteEvent.targetMismatches.forEach(targetId => { const targetData = remoteStoreImpl.listenTargets.get(targetId); if (!targetData) { // A watched target might have been removed already. return; } // Clear the resume token for the target, since we're in a known mismatch // state. remoteStoreImpl.listenTargets.set( targetId, targetData.withResumeToken( ByteString.EMPTY_BYTE_STRING, targetData.snapshotVersion ) ); // Cause a hard reset by unwatching and rewatching immediately, but // deliberately don't send a resume token so that we get a full update. sendUnwatchRequest(remoteStoreImpl, targetId); // Mark the target we send as being on behalf of an existence filter // mismatch, but don't actually retain that in listenTargets. This ensures // that we flag the first re-listen this way without impacting future // listens of this target (that might happen e.g. on reconnect). const requestTargetData = new TargetData( targetData.target, targetId, TargetPurpose.ExistenceFilterMismatch, targetData.sequenceNumber ); sendWatchRequest(remoteStoreImpl, requestTargetData); }); // Finally raise remote event debugAssert( !!remoteStoreImpl.remoteSyncer.applyRemoteEvent, 'applyRemoteEvent() not set' ); return remoteStoreImpl.remoteSyncer.applyRemoteEvent(remoteEvent); } /** Handles an error on a target */ async function handleTargetError( remoteStoreImpl: RemoteStoreImpl, watchChange: WatchTargetChange ): Promise<void> { debugAssert( !!remoteStoreImpl.remoteSyncer.rejectListen, 'rejectListen() not set' ); debugAssert(!!watchChange.cause, 'Handling target error without a cause'); const error = watchChange.cause!; for (const targetId of watchChange.targetIds) { // A watched target might have been removed already. if (remoteStoreImpl.listenTargets.has(targetId)) { await remoteStoreImpl.remoteSyncer.rejectListen(targetId, error); remoteStoreImpl.listenTargets.delete(targetId); remoteStoreImpl.watchChangeAggregator!.removeTarget(targetId); } } } /** * Attempts to fill our write pipeline with writes from the LocalStore. * * Called internally to bootstrap or refill the write pipeline and by * SyncEngine whenever there are new mutations to process. * * Starts the write stream if necessary. */ export async function fillWritePipeline( remoteStore: RemoteStore ): Promise<void> { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); const writeStream = ensureWriteStream(remoteStoreImpl); let lastBatchIdRetrieved = remoteStoreImpl.writePipeline.length > 0 ? remoteStoreImpl.writePipeline[remoteStoreImpl.writePipeline.length - 1] .batchId : BATCHID_UNKNOWN; while (canAddToWritePipeline(remoteStoreImpl)) { try { const batch = await localStoreGetNextMutationBatch( remoteStoreImpl.localStore, lastBatchIdRetrieved ); if (batch === null) { if (remoteStoreImpl.writePipeline.length === 0) { writeStream.markIdle(); } break; } else { lastBatchIdRetrieved = batch.batchId; addToWritePipeline(remoteStoreImpl, batch); } } catch (e) { await disableNetworkUntilRecovery(remoteStoreImpl, e); } } if (shouldStartWriteStream(remoteStoreImpl)) { startWriteStream(remoteStoreImpl); } } /** * Returns true if we can add to the write pipeline (i.e. the network is * enabled and the write pipeline is not full). */ function canAddToWritePipeline(remoteStoreImpl: RemoteStoreImpl): boolean { return ( canUseNetwork(remoteStoreImpl) && remoteStoreImpl.writePipeline.length < MAX_PENDING_WRITES ); } // For testing export function outstandingWrites(remoteStore: RemoteStore): number { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); return remoteStoreImpl.writePipeline.length; } /** * Queues additional writes to be sent to the write stream, sending them * immediately if the write stream is established. */ function addToWritePipeline( remoteStoreImpl: RemoteStoreImpl, batch: MutationBatch ): void { debugAssert( canAddToWritePipeline(remoteStoreImpl), 'addToWritePipeline called when pipeline is full' ); remoteStoreImpl.writePipeline.push(batch); const writeStream = ensureWriteStream(remoteStoreImpl); if (writeStream.isOpen() && writeStream.handshakeComplete) { writeStream.writeMutations(batch.mutations); } } function shouldStartWriteStream(remoteStoreImpl: RemoteStoreImpl): boolean { return ( canUseNetwork(remoteStoreImpl) && !ensureWriteStream(remoteStoreImpl).isStarted() && remoteStoreImpl.writePipeline.length > 0 ); } function startWriteStream(remoteStoreImpl: RemoteStoreImpl): void { debugAssert( shouldStartWriteStream(remoteStoreImpl), 'startWriteStream() called when shouldStartWriteStream() is false.' ); ensureWriteStream(remoteStoreImpl).start(); } async function onWriteStreamOpen( remoteStoreImpl: RemoteStoreImpl ): Promise<void> { ensureWriteStream(remoteStoreImpl).writeHandshake(); } async function onWriteHandshakeComplete( remoteStoreImpl: RemoteStoreImpl ): Promise<void> { const writeStream = ensureWriteStream(remoteStoreImpl); // Send the write pipeline now that the stream is established. for (const batch of remoteStoreImpl.writePipeline) { writeStream.writeMutations(batch.mutations); } } async function onMutationResult( remoteStoreImpl: RemoteStoreImpl, commitVersion: SnapshotVersion, results: MutationResult[] ): Promise<void> { // This is a response to a write containing mutations and should be // correlated to the first write in our write pipeline. debugAssert( remoteStoreImpl.writePipeline.length > 0, 'Got result for empty write pipeline' ); const batch = remoteStoreImpl.writePipeline.shift()!; const success = MutationBatchResult.from(batch, commitVersion, results); debugAssert( !!remoteStoreImpl.remoteSyncer.applySuccessfulWrite, 'applySuccessfulWrite() not set' ); await executeWithRecovery(remoteStoreImpl, () => remoteStoreImpl.remoteSyncer.applySuccessfulWrite!(success) ); // It's possible that with the completion of this mutation another // slot has freed up. await fillWritePipeline(remoteStoreImpl); } async function onWriteStreamClose( remoteStoreImpl: RemoteStoreImpl, error?: FirestoreError ): Promise<void> { if (error === undefined) { // Graceful stop (due to stop() or idle timeout). Make sure that's // desirable. debugAssert( !shouldStartWriteStream(remoteStoreImpl), 'Write stream was stopped gracefully while still needed.' ); } // If the write stream closed after the write handshake completes, a write // operation failed and we fail the pending operation. if (error && ensureWriteStream(remoteStoreImpl).handshakeComplete) { // This error affects the actual write. await handleWriteError(remoteStoreImpl, error!); } // The write stream might have been started by refilling the write // pipeline for failed writes if (shouldStartWriteStream(remoteStoreImpl)) { startWriteStream(remoteStoreImpl); } } async function handleWriteError( remoteStoreImpl: RemoteStoreImpl, error: FirestoreError ): Promise<void> { // Only handle permanent errors here. If it's transient, just let the retry // logic kick in. if (isPermanentWriteError(error.code)) { // This was a permanent error, the request itself was the problem // so it's not going to succeed if we resend it. const batch = remoteStoreImpl.writePipeline.shift()!; // In this case it's also unlikely that the server itself is melting // down -- this was just a bad request so inhibit backoff on the next // restart. ensureWriteStream(remoteStoreImpl).inhibitBackoff(); debugAssert( !!remoteStoreImpl.remoteSyncer.rejectFailedWrite, 'rejectFailedWrite() not set' ); await executeWithRecovery(remoteStoreImpl, () => remoteStoreImpl.remoteSyncer.rejectFailedWrite!(batch.batchId, error) ); // It's possible that with the completion of this mutation // another slot has freed up. await fillWritePipeline(remoteStoreImpl); } else { // Transient error, just let the retry logic kick in. } } async function restartNetwork(remoteStore: RemoteStore): Promise<void> { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); remoteStoreImpl.offlineCauses.add(OfflineCause.ConnectivityChange); await disableNetworkInternal(remoteStoreImpl); remoteStoreImpl.onlineStateTracker.set(OnlineState.Unknown); remoteStoreImpl.offlineCauses.delete(OfflineCause.ConnectivityChange); await enableNetworkInternal(remoteStoreImpl); } export async function remoteStoreHandleCredentialChange( remoteStore: RemoteStore, user: User ): Promise<void> { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); remoteStoreImpl.asyncQueue.verifyOperationInProgress(); debugAssert( !!remoteStoreImpl.remoteSyncer.handleCredentialChange, 'handleCredentialChange() not set' ); logDebug(LOG_TAG, 'RemoteStore received new credentials'); const usesNetwork = canUseNetwork(remoteStoreImpl); // Tear down and re-create our network streams. This will ensure we get a // fresh auth token for the new user and re-fill the write pipeline with // new mutations from the LocalStore (since mutations are per-user). remoteStoreImpl.offlineCauses.add(OfflineCause.CredentialChange); await disableNetworkInternal(remoteStoreImpl); if (usesNetwork) { // Don't set the network status to Unknown if we are offline. remoteStoreImpl.onlineStateTracker.set(OnlineState.Unknown); } await remoteStoreImpl.remoteSyncer.handleCredentialChange(user); remoteStoreImpl.offlineCauses.delete(OfflineCause.CredentialChange); await enableNetworkInternal(remoteStoreImpl); } /** * Toggles the network state when the client gains or loses its primary lease. */ export async function remoteStoreApplyPrimaryState( remoteStore: RemoteStore, isPrimary: boolean ): Promise<void> { const remoteStoreImpl = debugCast(remoteStore, RemoteStoreImpl); if (isPrimary) { remoteStoreImpl.offlineCauses.delete(OfflineCause.IsSecondary); await enableNetworkInternal(remoteStoreImpl); } else if (!isPrimary) { remoteStoreImpl.offlineCauses.add(OfflineCause.IsSecondary); await disableNetworkInternal(remoteStoreImpl); remoteStoreImpl.onlineStateTracker.set(OnlineState.Unknown); } } /** * If not yet initialized, registers the WatchStream and its network state * callback with `remoteStoreImpl`. Returns the existing stream if one is * already available. * * PORTING NOTE: On iOS and Android, the WatchStream gets registered on startup. * This is not done on Web to allow it to be tree-shaken. */ function ensureWatchStream( remoteStoreImpl: RemoteStoreImpl ): PersistentListenStream { if (!remoteStoreImpl.watchStream) { // Create stream (but note that it is not started yet). remoteStoreImpl.watchStream = newPersistentWatchStream( remoteStoreImpl.datastore, remoteStoreImpl.asyncQueue, { onOpen: onWatchStreamOpen.bind(null, remoteStoreImpl), onClose: onWatchStreamClose.bind(null, remoteStoreImpl), onWatchChange: onWatchStreamChange.bind(null, remoteStoreImpl) } ); remoteStoreImpl.onNetworkStatusChange.push(async enabled => { if (enabled) { remoteStoreImpl.watchStream!.inhibitBackoff(); if (shouldStartWatchStream(remoteStoreImpl)) { startWatchStream(remoteStoreImpl); } else { remoteStoreImpl.onlineStateTracker.set(OnlineState.Unknown); } } else { await remoteStoreImpl.watchStream!.stop(); cleanUpWatchStreamState(remoteStoreImpl); } }); } return remoteStoreImpl.watchStream; } /** * If not yet initialized, registers the WriteStream and its network state * callback with `remoteStoreImpl`. Returns the existing stream if one is * already available. * * PORTING NOTE: On iOS and Android, the WriteStream gets registered on startup. * This is not done on Web to allow it to be tree-shaken. */ function ensureWriteStream( remoteStoreImpl: RemoteStoreImpl ): PersistentWriteStream { if (!remoteStoreImpl.writeStream) { debugAssert( remoteStoreImpl.writePipeline.length === 0, 'Should not issue writes before WriteStream is enabled' ); // Create stream (but note that it is not started yet). remoteStoreImpl.writeStream = newPersistentWriteStream( remoteStoreImpl.datastore, remoteStoreImpl.asyncQueue, { onOpen: onWriteStreamOpen.bind(null, remoteStoreImpl), onClose: onWriteStreamClose.bind(null, remoteStoreImpl), onHandshakeComplete: onWriteHandshakeComplete.bind( null, remoteStoreImpl ), onMutationResult: onMutationResult.bind(null, remoteStoreImpl) } ); remoteStoreImpl.onNetworkStatusChange.push(async enabled => { if (enabled) { remoteStoreImpl.writeStream!.inhibitBackoff(); // This will start the write stream if necessary. await fillWritePipeline(remoteStoreImpl); } else { await remoteStoreImpl.writeStream!.stop(); if (remoteStoreImpl.writePipeline.length > 0) { logDebug( LOG_TAG, `Stopping write stream with ${remoteStoreImpl.writePipeline.length} pending writes` ); remoteStoreImpl.writePipeline = []; } } }); } return remoteStoreImpl.writeStream; }
the_stack
* @module Toolbar */ import * as React from "react"; import { Logger } from "@itwin/core-bentley"; import { ActionButton, CommonToolbarItem, ConditionalBooleanValue, GroupButton, ToolbarItemsManager, ToolbarItemUtilities, ToolbarOrientation, ToolbarUsage, } from "@itwin/appui-abstract"; import { Orientation } from "@itwin/core-react"; import { ToolbarItem, ToolbarOpacitySetting, ToolbarWithOverflow } from "@itwin/components-react"; import { Direction, Toolbar, ToolbarPanelAlignment } from "@itwin/appui-layout-react"; import { FrontstageManager, ToolActivatedEventArgs } from "../frontstage/FrontstageManager"; import { useFrameworkVersion } from "../hooks/useFrameworkVersion"; import { SyncUiEventArgs, SyncUiEventDispatcher } from "../syncui/SyncUiEventDispatcher"; import { UiFramework } from "../UiFramework"; import { UiShowHideManager } from "../utils/UiShowHideManager"; import { ToolbarDragInteractionContext } from "./DragInteraction"; import { ToolbarHelper } from "./ToolbarHelper"; import { useDefaultToolbarItems } from "./useDefaultToolbarItems"; import { useUiItemsProviderToolbarItems } from "./useUiItemsProviderToolbarItems"; /** Private function to set up sync event monitoring of toolbar items */ function useToolbarItemSyncEffect(uiDataProvider: ToolbarItemsManager, syncIdsOfInterest: string[]) { React.useEffect(() => { const handleSyncUiEvent = (args: SyncUiEventArgs) => { if (0 === syncIdsOfInterest.length) return; // istanbul ignore else if (syncIdsOfInterest.some((value: string): boolean => args.eventIds.has(value.toLowerCase()))) { // process each item that has interest uiDataProvider.refreshAffectedItems(args.eventIds); } }; SyncUiEventDispatcher.onSyncUiEvent.addListener(handleSyncUiEvent); return () => { SyncUiEventDispatcher.onSyncUiEvent.removeListener(handleSyncUiEvent); }; }, [uiDataProvider, syncIdsOfInterest, uiDataProvider.items]); React.useEffect(() => { const handleToolActivatedEvent = ({ toolId }: ToolActivatedEventArgs) => { uiDataProvider.setActiveToolId(toolId); }; FrontstageManager.onToolActivatedEvent.addListener(handleToolActivatedEvent); return () => { FrontstageManager.onToolActivatedEvent.removeListener(handleToolActivatedEvent); }; }, [uiDataProvider, uiDataProvider.items]); } function nestedAddItemToSpecifiedParentGroup(items: ReadonlyArray<ActionButton | GroupButton>, groupChildren: Array<ActionButton | GroupButton>): Array<ActionButton | GroupButton> { const outItems: Array<ActionButton | GroupButton> = []; for (const toolbarItem of items) { if (!ToolbarItemUtilities.isGroupButton(toolbarItem)) { outItems.push(toolbarItem); continue; } const newChildren: Array<ActionButton | GroupButton> = nestedAddItemToSpecifiedParentGroup(toolbarItem.items, groupChildren); const foundIndices: number[] = []; groupChildren.forEach((entry, index) => { if (entry.parentToolGroupId === toolbarItem.id) { foundIndices.push(index); } }); // istanbul ignore else if (foundIndices.length) { // process in reverse order so groupChildren can be reduced as we find matches foundIndices.sort( // istanbul ignore next (a, b) => a - b, ).reverse().forEach((foundIndex) => { newChildren.push(groupChildren[foundIndex]); groupChildren.splice(foundIndex); }); } outItems.push({ ...toolbarItem, items: newChildren }); } return outItems; } function addItemToSpecifiedParentGroup(items: readonly CommonToolbarItem[], groupChildren: Array<ActionButton | GroupButton>): CommonToolbarItem[] { const outItems: CommonToolbarItem[] = []; for (const toolbarItem of items) { if (!ToolbarItemUtilities.isGroupButton(toolbarItem)) { outItems.push(toolbarItem); continue; } const newChildren: Array<ActionButton | GroupButton> = nestedAddItemToSpecifiedParentGroup(toolbarItem.items, groupChildren); const foundIndices: number[] = []; groupChildren.forEach((entry, index) => { if (entry.parentToolGroupId === toolbarItem.id) { foundIndices.push(index); } }); // istanbul ignore else if (foundIndices.length) { // process in reverse order so groupChildren can be reduced as we find matches foundIndices.sort( // istanbul ignore next (a, b) => a - b, ).reverse().forEach((foundIndex) => { newChildren.push(groupChildren[foundIndex]); groupChildren.splice(foundIndex); }); } outItems.push({ ...toolbarItem, items: newChildren }); } return outItems; } function cloneGroup(inGroup: GroupButton): GroupButton { const childItems: Array<ActionButton | GroupButton> = []; inGroup.items.forEach((item) => { if (ToolbarItemUtilities.isGroupButton(item)) childItems.push(cloneGroup(item)); else childItems.push(item); }); const clonedGroup = { ...inGroup, items: childItems }; return clonedGroup; } function getItemSortValue(item: ToolbarItem) { const groupValue = undefined === item.groupPriority ? 0 : /* istanbul ignore next */ item.groupPriority; return groupValue * 10000 + item.itemPriority; } function getSortedChildren(group: GroupButton): ReadonlyArray<ActionButton | GroupButton> { const sortedChildren = group.items .filter((item) => !(ConditionalBooleanValue.getValue(item.isHidden))) .sort((a, b) => getItemSortValue(a) - getItemSortValue(b)) .map((i) => { if (ToolbarItemUtilities.isGroupButton(i)) { return { ...i, items: getSortedChildren(i) }; } return i; }); return sortedChildren; } /** local function to combine items from Stage and from Extensions */ function combineItems(defaultItems: ReadonlyArray<CommonToolbarItem>, addonItems: ReadonlyArray<CommonToolbarItem>) { let items: CommonToolbarItem[] = []; const groupChildren: Array<ActionButton | GroupButton> = []; // istanbul ignore else if (defaultItems.length) { defaultItems.forEach((srcItem: CommonToolbarItem) => { // if the default item is a group that an addon may insert into copy it so we don't mess with original const toolbarItem = ToolbarItemUtilities.isGroupButton(srcItem) ? cloneGroup(srcItem) : srcItem; if (toolbarItem.parentToolGroupId && (ToolbarItemUtilities.isGroupButton(toolbarItem) || ToolbarItemUtilities.isActionButton(toolbarItem))) groupChildren.push(toolbarItem); else items.push(toolbarItem); }); } // istanbul ignore else if (addonItems.length) { addonItems.forEach((srcItem: CommonToolbarItem) => { // if the default item is a group that an addon may insert into copy it so we don't mess with original const toolbarItem = ToolbarItemUtilities.isGroupButton(srcItem) ? cloneGroup(srcItem) : srcItem; if (toolbarItem.parentToolGroupId && (ToolbarItemUtilities.isGroupButton(toolbarItem) || ToolbarItemUtilities.isActionButton(toolbarItem))) groupChildren.push(toolbarItem); else items.push(toolbarItem); }); } // if an item from an addon has specified a parent group then try to find it and insert it. If no parent is found, add item at root level. if (groupChildren.length) { items = addItemToSpecifiedParentGroup(items, groupChildren); if (groupChildren.length) { groupChildren.forEach((toolbarItem: ActionButton | GroupButton) => { Logger.logWarning("ToolbarComposer", `Requested Parent Group [${toolbarItem.parentToolGroupId!}] not found, so item [${toolbarItem.id}] is added directly to toolbar.`); items.push(toolbarItem); }); } } const availableItems = items .filter((item) => !(ConditionalBooleanValue.getValue(item.isHidden))) .sort((a, b) => getItemSortValue(a) - getItemSortValue(b)) .map((i) => { if (ToolbarItemUtilities.isGroupButton(i)) { return { ...i, items: getSortedChildren(i) }; } return i; }); return availableItems; } const useProximityOpacitySetting = () => { const [proximityOpacity, setProximityOpacity] = React.useState(UiShowHideManager.useProximityOpacity); React.useEffect(() => { // istanbul ignore next const handleUiVisibilityChanged = () => { setProximityOpacity(UiShowHideManager.useProximityOpacity); }; UiFramework.onUiVisibilityChanged.addListener(handleUiVisibilityChanged); return () => { UiFramework.onUiVisibilityChanged.removeListener(handleUiVisibilityChanged); }; }, []); return proximityOpacity; }; /** Properties for the [[ToolbarComposer]] React components * @public */ export interface ExtensibleToolbarProps { items: CommonToolbarItem[]; usage: ToolbarUsage; /** Toolbar orientation. */ orientation: ToolbarOrientation; } /** * Toolbar that is populated and maintained by item managers. * @public */ export function ToolbarComposer(props: ExtensibleToolbarProps) { const { usage, orientation } = props; const [defaultItemsManager] = React.useState(() => new ToolbarItemsManager(props.items)); const isInitialMount = React.useRef(true); React.useEffect(() => { if (isInitialMount.current) isInitialMount.current = false; else { defaultItemsManager.items = props.items; } }, [props.items]); // eslint-disable-line react-hooks/exhaustive-deps // process default items const defaultItems = useDefaultToolbarItems(defaultItemsManager); const syncIdsOfInterest = React.useMemo(() => ToolbarItemsManager.getSyncIdsOfInterest(defaultItems), [defaultItems]); useToolbarItemSyncEffect(defaultItemsManager, syncIdsOfInterest); // process items from addon UI providers const [addonItemsManager] = React.useState(() => new ToolbarItemsManager()); const addonItems = useUiItemsProviderToolbarItems(addonItemsManager, usage, orientation); const addonSyncIdsOfInterest = React.useMemo(() => ToolbarItemsManager.getSyncIdsOfInterest(addonItems), [addonItems]); useToolbarItemSyncEffect(addonItemsManager, addonSyncIdsOfInterest); const toolbarItems = React.useMemo(() => combineItems(defaultItems, addonItems), [defaultItems, addonItems]); const toolbarOrientation = orientation === ToolbarOrientation.Horizontal ? Orientation.Horizontal : Orientation.Vertical; const expandsTo = toolbarOrientation === Orientation.Horizontal ? Direction.Bottom : usage === ToolbarUsage.ViewNavigation ? Direction.Left : Direction.Right; const panelAlignment = (toolbarOrientation === Orientation.Horizontal && usage === ToolbarUsage.ViewNavigation) ? ToolbarPanelAlignment.End : ToolbarPanelAlignment.Start; const version = useFrameworkVersion(); const isDragEnabled = React.useContext(ToolbarDragInteractionContext); const useProximityOpacity = useProximityOpacitySetting(); if ("1" === version) { return ( <ToolbarUi1 items={toolbarItems} expandsTo={expandsTo} panelAlignment={panelAlignment} /> ); } return <ToolbarWithOverflow expandsTo={expandsTo} panelAlignment={panelAlignment} items={toolbarItems} useDragInteraction={isDragEnabled} toolbarOpacitySetting={useProximityOpacity && !UiFramework.isMobile() ? ToolbarOpacitySetting.Proximity : /* istanbul ignore next */ ToolbarOpacitySetting.Defaults} />; } interface ToolbarUi1Props { items: CommonToolbarItem[]; expandsTo: Direction; panelAlignment: ToolbarPanelAlignment; } /** Toolbar rendered in 1.0 mode. * @internal */ const ToolbarUi1 = React.memo<ToolbarUi1Props>(function ToolbarUi1({ items, expandsTo, panelAlignment, }) { const createReactNodes = (): React.ReactNode => { if (0 === items.length) return null; const createdNodes = items.map((item: CommonToolbarItem) => { return ToolbarHelper.createNodeForToolbarItem(item); }); return createdNodes; }; return <Toolbar expandsTo={expandsTo} panelAlignment={panelAlignment} items={ <> {createReactNodes()} </> } />; });
the_stack
import {connect} from 'react-redux'; import {ActionCreatorsMapObject, bindActionCreators, Dispatch} from 'redux'; import {GlobalState} from 'types/store/index.js'; import {Post} from 'mattermost-redux/types/posts.js'; import {FileInfo} from 'mattermost-redux/types/files.js'; import {ActionResult, GenericAction} from 'mattermost-redux/types/actions.js'; import {CommandArgs} from 'mattermost-redux/types/integrations.js'; import {PostDraft} from 'types/store/rhs.js'; import {ModalData} from 'types/actions.js'; import {getConfig, getLicense} from 'mattermost-redux/selectors/entities/general'; import {getCurrentTeamId} from 'mattermost-redux/selectors/entities/teams'; import {getCurrentChannel, getCurrentChannelStats, getChannelMemberCountsByGroup as selectChannelMemberCountsByGroup} from 'mattermost-redux/selectors/entities/channels'; import {getCurrentUserId, getStatusForUserId, getUser} from 'mattermost-redux/selectors/entities/users'; import {haveICurrentChannelPermission} from 'mattermost-redux/selectors/entities/roles'; import {getChannelTimezones, getChannelMemberCountsByGroup} from 'mattermost-redux/actions/channels'; import {get, getInt, getBool, getPrewrittenMessagesTreatment} from 'mattermost-redux/selectors/entities/preferences'; import {PreferenceType} from 'mattermost-redux/types/preferences'; import {savePreferences} from 'mattermost-redux/actions/preferences'; import { getCurrentUsersLatestPost, getLatestReplyablePostId, makeGetMessageInHistoryItem, } from 'mattermost-redux/selectors/entities/posts'; import {getAssociatedGroupsForReferenceByMention} from 'mattermost-redux/selectors/entities/groups'; import { addMessageIntoHistory, moveHistoryIndexBack, moveHistoryIndexForward, removeReaction, } from 'mattermost-redux/actions/posts'; import {Permissions, Posts, Preferences as PreferencesRedux} from 'mattermost-redux/constants'; import {connectionErrorCount} from 'selectors/views/system'; import {addReaction, createPost, setEditingPost, emitShortcutReactToLastPostFrom} from 'actions/post_actions.jsx'; import {scrollPostListToBottom} from 'actions/views/channel'; import {selectPostFromRightHandSideSearchByPostId} from 'actions/views/rhs'; import {setShowPreviewOnCreatePost} from 'actions/views/textbox'; import {executeCommand} from 'actions/command'; import {runMessageWillBePostedHooks, runSlashCommandWillBePostedHooks} from 'actions/hooks'; import {getPostDraft, getIsRhsExpanded} from 'selectors/rhs'; import {showPreviewOnCreatePost} from 'selectors/views/textbox'; import {getCurrentLocale} from 'selectors/i18n'; import {getEmojiMap, getShortcutReactToLastPostEmittedFrom} from 'selectors/emojis'; import {setGlobalItem, actionOnGlobalItemsWithPrefix} from 'actions/storage'; import {openModal, closeModal} from 'actions/views/modals'; import {Constants, Preferences, StoragePrefixes, TutorialSteps, UserStatuses} from 'utils/constants'; import {canUploadFiles} from 'utils/file_utils'; import {PrewrittenMessagesTreatments} from 'mattermost-redux/constants/config'; import CreatePost from './create_post'; function makeMapStateToProps() { const getMessageInHistoryItem = makeGetMessageInHistoryItem(Posts.MESSAGE_TYPES.POST as any); return (state: GlobalState) => { const config = getConfig(state); const license = getLicense(state); const currentChannel = getCurrentChannel(state) || {}; const currentChannelTeammateUsername = getUser(state, currentChannel.teammate_id || '')?.username; const draft = getPostDraft(state, StoragePrefixes.DRAFT, currentChannel.id); const latestReplyablePostId = getLatestReplyablePostId(state); const currentChannelMembersCount = getCurrentChannelStats(state) ? getCurrentChannelStats(state).member_count : 1; const tutorialStep = getInt(state, Preferences.TUTORIAL_STEP, getCurrentUserId(state), TutorialSteps.FINISHED); const enableEmojiPicker = config.EnableEmojiPicker === 'true'; const enableGifPicker = config.EnableGifPicker === 'true'; const enableConfirmNotificationsToChannel = config.EnableConfirmNotificationsToChannel === 'true'; const currentUserId = getCurrentUserId(state); const userIsOutOfOffice = getStatusForUserId(state, currentUserId) === UserStatuses.OUT_OF_OFFICE; const badConnection = connectionErrorCount(state) > 1; const isTimezoneEnabled = config.ExperimentalTimezone === 'true'; const shortcutReactToLastPostEmittedFrom = getShortcutReactToLastPostEmittedFrom(state); const canPost = haveICurrentChannelPermission(state, Permissions.CREATE_POST); const useChannelMentions = haveICurrentChannelPermission(state, Permissions.USE_CHANNEL_MENTIONS); const isLDAPEnabled = license?.IsLicensed === 'true' && license?.LDAPGroups === 'true'; const useGroupMentions = isLDAPEnabled && haveICurrentChannelPermission(state, Permissions.USE_GROUP_MENTIONS); const channelMemberCountsByGroup = selectChannelMemberCountsByGroup(state, currentChannel.id); const currentTeamId = getCurrentTeamId(state); const groupsWithAllowReference = useGroupMentions ? getAssociatedGroupsForReferenceByMention(state, currentTeamId, currentChannel.id) : null; const prewrittenMessages = getPrewrittenMessagesTreatment(state); const enableTutorial = config.EnableTutorial === 'true'; const showTutorialTip = enableTutorial && tutorialStep === TutorialSteps.POST_POPOVER && prewrittenMessages !== PrewrittenMessagesTreatments.AROUND_INPUT; return { currentTeamId, currentChannel, currentChannelTeammateUsername, currentChannelMembersCount, currentUserId, codeBlockOnCtrlEnter: getBool(state, PreferencesRedux.CATEGORY_ADVANCED_SETTINGS, 'code_block_ctrl_enter', true), ctrlSend: getBool(state, Preferences.CATEGORY_ADVANCED_SETTINGS, 'send_on_ctrl_enter'), fullWidthTextBox: get(state, Preferences.CATEGORY_DISPLAY_SETTINGS, Preferences.CHANNEL_DISPLAY_MODE, Preferences.CHANNEL_DISPLAY_MODE_DEFAULT) === Preferences.CHANNEL_DISPLAY_MODE_FULL_SCREEN, showTutorialTip, messageInHistoryItem: getMessageInHistoryItem(state), draft, latestReplyablePostId, locale: getCurrentLocale(state), currentUsersLatestPost: getCurrentUsersLatestPost(state, ''), canUploadFiles: canUploadFiles(config), enableEmojiPicker, enableGifPicker, enableConfirmNotificationsToChannel, maxPostSize: parseInt(config.MaxPostSize || '', 10) || Constants.DEFAULT_CHARACTER_LIMIT, userIsOutOfOffice, rhsExpanded: getIsRhsExpanded(state), emojiMap: getEmojiMap(state), badConnection, isTimezoneEnabled, shortcutReactToLastPostEmittedFrom, canPost, useChannelMentions, shouldShowPreview: showPreviewOnCreatePost(state), groupsWithAllowReference, useGroupMentions, channelMemberCountsByGroup, isLDAPEnabled, prewrittenMessages, tutorialStep, }; }; } function onSubmitPost(post: Post, fileInfos: FileInfo[]) { return (dispatch: Dispatch<GenericAction>) => { dispatch(createPost(post, fileInfos) as any); }; } type Actions = { setShowPreview: (showPreview: boolean) => void; addMessageIntoHistory: (message: string) => void; moveHistoryIndexBack: (index: string) => Promise<void>; moveHistoryIndexForward: (index: string) => Promise<void>; addReaction: (postId: string, emojiName: string) => void; onSubmitPost: (post: Post, fileInfos: FileInfo[]) => void; removeReaction: (postId: string, emojiName: string) => void; clearDraftUploads: () => void; runMessageWillBePostedHooks: (originalPost: Post) => ActionResult; runSlashCommandWillBePostedHooks: (originalMessage: string, originalArgs: CommandArgs) => ActionResult; setDraft: (name: string, value: PostDraft | null) => void; setEditingPost: (postId?: string, refocusId?: string, title?: string, isRHS?: boolean) => void; selectPostFromRightHandSideSearchByPostId: (postId: string) => void; openModal: (modalData: ModalData) => void; closeModal: (modalId: string) => void; executeCommand: (message: string, args: CommandArgs) => ActionResult; getChannelTimezones: (channelId: string) => ActionResult; scrollPostListToBottom: () => void; emitShortcutReactToLastPostFrom: (emittedFrom: string) => void; getChannelMemberCountsByGroup: (channelId: string, includeTimezones: boolean) => void; savePreferences: (userId: string, preferences: PreferenceType[]) => ActionResult; } // Temporarily store draft manually in localStorage since the current version of redux-persist // we're on will not save the draft quickly enough on page unload. function setDraft(key: string, value: PostDraft) { if (value) { localStorage.setItem(key, JSON.stringify(value)); } else { localStorage.removeItem(key); } return setGlobalItem(key, value); } function clearDraftUploads() { return actionOnGlobalItemsWithPrefix(StoragePrefixes.DRAFT, (_key: string, draft: PostDraft) => { if (!draft || !draft.uploadsInProgress || draft.uploadsInProgress.length === 0) { return draft; } return {...draft, uploadsInProgress: []}; }); } function mapDispatchToProps(dispatch: Dispatch<GenericAction>) { return { actions: bindActionCreators<ActionCreatorsMapObject<any>, Actions>({ addMessageIntoHistory, onSubmitPost, moveHistoryIndexBack, moveHistoryIndexForward, addReaction, removeReaction, setDraft, clearDraftUploads, selectPostFromRightHandSideSearchByPostId, setEditingPost, emitShortcutReactToLastPostFrom, openModal, closeModal, executeCommand, getChannelTimezones, runMessageWillBePostedHooks, runSlashCommandWillBePostedHooks, scrollPostListToBottom, setShowPreview: setShowPreviewOnCreatePost, getChannelMemberCountsByGroup, savePreferences, }, dispatch), }; } export default connect(makeMapStateToProps, mapDispatchToProps)(CreatePost);
the_stack
import type { JsonSQLite, JsonTable, JsonColumn, JsonIndex, JsonTrigger, JsonView, } from '../../../../src/definitions'; import { UtilsSQLite } from '../utilsSQLite'; import { UtilsJson } from './utilsJson'; export class ExportToJson { private _uSQLite: UtilsSQLite = new UtilsSQLite(); private _uJson: UtilsJson = new UtilsJson(); /** * CreateExportObject * @param mDB * @param sqlObj */ public async createExportObject( mDB: any, sqlObj: JsonSQLite, ): Promise<JsonSQLite> { const retObj: JsonSQLite = {} as JsonSQLite; let tables: JsonTable[] = []; let views: JsonView[] = []; let errmsg = ''; try { // get View's name views = await this.getViewsName(mDB); // get Table's name const resTables: any[] = await this.getTablesNameSQL(mDB); if (resTables.length === 0) { return Promise.reject("createExportObject: table's names failed"); } else { switch (sqlObj.mode) { case 'partial': { tables = await this.getTablesPartial(mDB, resTables); break; } case 'full': { tables = await this.getTablesFull(mDB, resTables); break; } default: { errmsg = 'createExportObject: expMode ' + sqlObj.mode + ' not defined'; break; } } if (errmsg.length > 0) { return Promise.reject(errmsg); } if (tables.length > 0) { retObj.database = sqlObj.database; retObj.version = sqlObj.version; retObj.encrypted = sqlObj.encrypted; retObj.mode = sqlObj.mode; retObj.tables = tables; if (views.length > 0) { retObj.views = views; } } return Promise.resolve(retObj); } } catch (err) { return Promise.reject('createExportObject: ' + err); } } /** * GetTablesNameSQL * @param mDb */ public async getTablesNameSQL(mDb: any): Promise<any[]> { let sql = 'SELECT name,sql FROM sqlite_master WHERE '; sql += "type='table' AND name NOT LIKE 'sync_table' "; sql += "AND name NOT LIKE '_temp_%' "; sql += "AND name NOT LIKE 'sqlite_%';"; let retQuery: any[] = []; try { retQuery = await this._uSQLite.queryAll(mDb, sql, []); return Promise.resolve(retQuery); } catch (err) { return Promise.reject(`getTablesNameSQL: ${err}`); } } /** * GetViewsNameSQL * @param mDb */ public async getViewsName(mDb: any): Promise<JsonView[]> { const views: JsonView[] = []; let sql = 'SELECT name,sql FROM sqlite_master WHERE '; sql += "type='view' AND name NOT LIKE 'sqlite_%';"; let retQuery: any[] = []; try { retQuery = await this._uSQLite.queryAll(mDb, sql, []); for (const query of retQuery) { const view: JsonView = {} as JsonView; view.name = query.name; view.value = query.sql.substring(query.sql.indexOf('AS ') + 3); views.push(view); } return Promise.resolve(views); } catch (err) { return Promise.reject(`getViewsName: ${err}`); } } /** * GetSyncDate * @param mDb */ public async getSyncDate(mDb: any): Promise<number> { return new Promise((resolve, reject) => { let retDate = -1; // get the last sync date const stmt = `SELECT sync_date FROM sync_table;`; mDb.get(stmt, [], (err: Error, row: any) => { // process the row here if (err) { reject(`GetSyncDate: ${err.message}`); } else { if (row != null) { const key: any = Object.keys(row)[0]; retDate = row[key]; resolve(retDate); } else { reject(`GetSyncDate: no syncDate`); } } }); }); } /** * GetTablesFull * @param mDb * @param resTables */ private async getTablesFull( mDb: any, resTables: any[], ): Promise<JsonTable[]> { const tables: JsonTable[] = []; let errmsg = ''; try { // Loop through the tables for (const rTable of resTables) { let tableName: string; let sqlStmt: string; if (rTable.name) { tableName = rTable.name; } else { errmsg = 'GetTablesFull: no name'; break; } if (rTable.sql) { sqlStmt = rTable.sql; } else { errmsg = 'GetTablesFull: no sql'; break; } const table: JsonTable = {} as JsonTable; // create Table's Schema const schema: JsonColumn[] = await this.getSchema(sqlStmt); if (schema.length === 0) { errmsg = 'GetTablesFull: no Schema returned'; break; } // check schema validity await this._uJson.checkSchemaValidity(schema); // create Table's indexes if any const indexes: JsonIndex[] = await this.getIndexes(mDb, tableName); if (indexes.length > 0) { // check indexes validity await this._uJson.checkIndexesValidity(indexes); } // create Table's triggers if any const triggers: JsonTrigger[] = await this.getTriggers(mDb, tableName); if (triggers.length > 0) { // check triggers validity await this._uJson.checkTriggersValidity(triggers); } // create Table's Data const query = `SELECT * FROM ${tableName};`; const values: any[] = await this.getValues(mDb, query, tableName); table.name = tableName; if (schema.length > 0) { table.schema = schema; } else { errmsg = `GetTablesFull: must contain schema`; break; } if (indexes.length > 0) { table.indexes = indexes; } if (triggers.length > 0) { table.triggers = triggers; } if (values.length > 0) { table.values = values; } if (Object.keys(table).length <= 1) { errmsg = `GetTablesFull: table ${tableName} is not a jsonTable`; break; } tables.push(table); } if (errmsg.length > 0) { return Promise.reject(errmsg); } return Promise.resolve(tables); } catch (err) { return Promise.reject(`GetTablesFull: ${err}`); } } /** * GetSchema * @param mDb * @param sqlStmt * @param tableName */ private async getSchema( sqlStmt: string /*,tableName: string,*/, ): Promise<JsonColumn[]> { const schema: JsonColumn[] = []; // take the substring between parenthesis const openPar: number = sqlStmt.indexOf('('); const closePar: number = sqlStmt.lastIndexOf(')'); let sstr: string = sqlStmt.substring(openPar + 1, closePar); // check if there is other parenthesis and replace the ',' by '§' try { sstr = await this.modEmbeddedParentheses(sstr); const sch: string[] = sstr.split(','); // for each element of the array split the // first word as key for (const sc of sch) { const row: string[] = []; const scht: string = sc.trim(); row[0] = scht.substring(0, scht.indexOf(' ')); row[1] = scht.substring(scht.indexOf(' ') + 1); const jsonRow: JsonColumn = {} as JsonColumn; if (row[0].toUpperCase() === 'FOREIGN') { const oPar: number = sc.indexOf('('); const cPar: number = sc.indexOf(')'); row[0] = sc.substring(oPar + 1, cPar); row[1] = sc.substring(cPar + 2); jsonRow['foreignkey'] = row[0]; } else if (row[0].toUpperCase() === 'CONSTRAINT') { const tRow: string[] = []; const row1t: string = row[1].trim(); tRow[0] = row1t.substring(0, row1t.indexOf(' ')); tRow[1] = row1t.substring(row1t.indexOf(' ') + 1); row[0] = tRow[0]; jsonRow['constraint'] = row[0]; row[1] = tRow[1]; } else { jsonRow['column'] = row[0]; } jsonRow['value'] = row[1].replace(/§/g, ','); schema.push(jsonRow); } return Promise.resolve(schema); } catch (err) { return Promise.reject(err); } } /** * GetIndexes * @param mDb * @param sqlStmt * @param tableName */ private async getIndexes(mDb: any, tableName: string): Promise<JsonIndex[]> { const indexes: JsonIndex[] = []; let errmsg = ''; try { let stmt = 'SELECT name,tbl_name,sql FROM sqlite_master WHERE '; stmt += `type = 'index' AND tbl_name = '${tableName}' `; stmt += `AND sql NOTNULL;`; const retIndexes = await this._uSQLite.queryAll(mDb, stmt, []); if (retIndexes.length > 0) { for (const rIndex of retIndexes) { const keys: string[] = Object.keys(rIndex); if (keys.length === 3) { if (rIndex['tbl_name'] === tableName) { const sql: string = rIndex['sql']; const mode: string = sql.includes('UNIQUE') ? 'UNIQUE' : ''; const oPar: number = sql.lastIndexOf('('); const cPar: number = sql.lastIndexOf(')'); const index: JsonIndex = {} as JsonIndex; index.name = rIndex['name']; index.value = sql.slice(oPar + 1, cPar); if (mode.length > 0) index.mode = mode; indexes.push(index); } else { errmsg = `GetIndexes: Table ${tableName} doesn't match`; break; } } else { errmsg = `GetIndexes: Table ${tableName} creating indexes`; break; } } if (errmsg.length > 0) { return Promise.reject(errmsg); } } return Promise.resolve(indexes); } catch (err) { return Promise.reject(`GetIndexes: ${err}`); } } /** * GetTriggers * @param mDb * @param sqlStmt * @param tableName */ private async getTriggers( mDb: any, tableName: string, ): Promise<JsonTrigger[]> { const triggers: JsonTrigger[] = []; try { let stmt = 'SELECT name,tbl_name,sql FROM sqlite_master WHERE '; stmt += `type = 'trigger' AND tbl_name = '${tableName}' `; stmt += `AND sql NOT NULL;`; const retTriggers = await this._uSQLite.queryAll(mDb, stmt, []); if (retTriggers.length > 0) { for (const rTrg of retTriggers) { const keys: string[] = Object.keys(rTrg); if (keys.length === 3) { if (rTrg['tbl_name'] === tableName) { const sql: string = rTrg['sql']; const name: string = rTrg['name']; let sqlArr: string[] = sql.split(name); if (sqlArr.length != 2) { return Promise.reject( `GetTriggers: sql split name does not return 2 values`, ); } if (!sqlArr[1].includes(tableName)) { return Promise.reject( `GetTriggers: sql split does not contains ${tableName}`, ); } const timeEvent = sqlArr[1].split(tableName, 1)[0].trim(); sqlArr = sqlArr[1].split(timeEvent + ' ' + tableName); if (sqlArr.length != 2) { return Promise.reject( `GetTriggers: sql split tableName does not return 2 values`, ); } let condition = ''; let logic = ''; if (sqlArr[1].trim().substring(0, 5).toUpperCase() !== 'BEGIN') { sqlArr = sqlArr[1].trim().split('BEGIN'); if (sqlArr.length != 2) { return Promise.reject( `GetTriggers: sql split BEGIN does not return 2 values`, ); } condition = sqlArr[0].trim(); logic = 'BEGIN' + sqlArr[1]; } else { logic = sqlArr[1].trim(); } const trigger: JsonTrigger = {} as JsonTrigger; trigger.name = name; trigger.logic = logic; if (condition.length > 0) trigger.condition = condition; trigger.timeevent = timeEvent; triggers.push(trigger); } else { return Promise.reject( `GetTriggers: Table ${tableName} doesn't match`, ); } } else { return Promise.reject( `GetTriggers: Table ${tableName} creating indexes`, ); } } } return Promise.resolve(triggers); } catch (err) { return Promise.reject(`GetTriggers: ${err}`); } } /** * GetValues * @param mDb * @param query * @param tableName */ private async getValues( mDb: any, query: string, tableName: string, ): Promise<any[]> { const values: any[] = []; try { // get table column names and types const tableNamesTypes = await this._uJson.getTableColumnNamesTypes( mDb, tableName, ); let rowNames: string[] = []; if (Object.keys(tableNamesTypes).includes('names')) { rowNames = tableNamesTypes.names; } else { return Promise.reject(`GetValues: Table ${tableName} no names`); } const retValues = await this._uSQLite.queryAll(mDb, query, []); for (const rValue of retValues) { const row: any[] = []; for (const rName of rowNames) { if (Object.keys(rValue).includes(rName)) { row.push(rValue[rName]); } else { row.push('NULL'); } } values.push(row); } return Promise.resolve(values); } catch (err) { return Promise.reject(`GetValues: ${err}`); } } /** * GetTablesPartial * @param mDb * @param resTables */ private async getTablesPartial( mDb: any, resTables: any[], ): Promise<JsonTable[]> { const tables: JsonTable[] = []; let modTables: any = {}; let syncDate = 0; let modTablesKeys: string[] = []; let errmsg = ''; try { // Get the syncDate and the Modified Tables const partialModeData: any = await this.getPartialModeData( mDb, resTables, ); if (Object.keys(partialModeData).includes('syncDate')) { syncDate = partialModeData.syncDate; } if (Object.keys(partialModeData).includes('modTables')) { modTables = partialModeData.modTables; modTablesKeys = Object.keys(modTables); } // Loop trough tables for (const rTable of resTables) { let tableName = ''; let sqlStmt = ''; if (rTable.name) { tableName = rTable.name; } else { errmsg = 'GetTablesFull: no name'; break; } if (rTable.sql) { sqlStmt = rTable.sql; } else { errmsg = 'GetTablesFull: no sql'; break; } if ( modTablesKeys.length == 0 || modTablesKeys.indexOf(tableName) === -1 || modTables[tableName] == 'No' ) { continue; } const table: JsonTable = {} as JsonTable; let schema: JsonColumn[] = []; let indexes: JsonIndex[] = []; let triggers: JsonTrigger[] = []; table.name = rTable; if (modTables[table.name] === 'Create') { // create Table's Schema schema = await this.getSchema(sqlStmt); if (schema.length > 0) { // check schema validity await this._uJson.checkSchemaValidity(schema); } // create Table's indexes if any indexes = await this.getIndexes(mDb, tableName); if (indexes.length > 0) { // check indexes validity await this._uJson.checkIndexesValidity(indexes); } // create Table's triggers if any triggers = await this.getTriggers(mDb, tableName); if (triggers.length > 0) { // check triggers validity await this._uJson.checkTriggersValidity(triggers); } } // create Table's Data let query = ''; if (modTables[tableName] === 'Create') { query = `SELECT * FROM ${tableName};`; } else { query = `SELECT * FROM ${tableName} ` + `WHERE last_modified > ${syncDate};`; } const values: any[] = await this.getValues(mDb, query, tableName); // check the table object validity table.name = tableName; if (schema.length > 0) { table.schema = schema; } if (indexes.length > 0) { table.indexes = indexes; } if (triggers.length > 0) { table.triggers = triggers; } if (values.length > 0) { table.values = values; } if (Object.keys(table).length <= 1) { errmsg = `GetTablesPartial: table ${tableName} is not a jsonTable`; break; } tables.push(table); } if (errmsg.length > 0) { return Promise.reject(errmsg); } return Promise.resolve(tables); } catch (err) { return Promise.reject(`GetTablesPartial: ${err}`); } } /** * GetPartialModeData * @param mDb * @param resTables */ private async getPartialModeData(mDb: any, resTables: any[]): Promise<any> { const retData: any = {}; try { // get the synchronization date const syncDate: number = await this.getSyncDate(mDb); if (syncDate <= 0) { return Promise.reject(`GetPartialModeData: no syncDate`); } // get the tables which have been updated // since last synchronization const modTables: any = await this.getTablesModified( mDb, resTables, syncDate, ); if (modTables.length <= 0) { return Promise.reject(`GetPartialModeData: no modTables`); } retData.syncDate = syncDate; retData.modTables = modTables; return Promise.resolve(retData); } catch (err) { return Promise.reject(`GetPartialModeData: ${err}`); } } private async getTablesModified( db: any, tables: any[], syncDate: number, ): Promise<any> { let errmsg = ''; try { const retModified: any = {}; for (const rTable of tables) { let mode: string; // get total count of the table let stmt = 'SELECT count(*) AS tcount '; stmt += `FROM ${rTable.name};`; let retQuery: any[] = await this._uSQLite.queryAll(db, stmt, []); if (retQuery.length != 1) { errmsg = 'GetTableModified: total ' + 'count not returned'; break; } const totalCount: number = retQuery[0]['tcount']; // get total count of modified since last sync stmt = 'SELECT count(*) AS mcount FROM '; stmt += `${rTable.name} WHERE last_modified > `; stmt += `${syncDate};`; retQuery = await this._uSQLite.queryAll(db, stmt, []); if (retQuery.length != 1) break; const totalModifiedCount: number = retQuery[0]['mcount']; if (totalModifiedCount === 0) { mode = 'No'; } else if (totalCount === totalModifiedCount) { mode = 'Create'; } else { mode = 'Modified'; } const key: string = rTable.name; retModified[key] = mode; } if (errmsg.length > 0) { return Promise.reject(errmsg); } return Promise.resolve(retModified); } catch (err) { return Promise.reject(`GetTableModified: ${err}`); } } private async modEmbeddedParentheses(sstr: string): Promise<string> { const oParArray: number[] = this.indexOfChar(sstr, '('); const cParArray: number[] = this.indexOfChar(sstr, ')'); if (oParArray.length != cParArray.length) { return Promise.reject( "ModEmbeddedParentheses: Not same number of '(' & ')'", ); } if (oParArray.length === 0) { return Promise.resolve(sstr); } let resStmt = sstr.substring(0, oParArray[0] - 1); for (let i = 0; i < oParArray.length; i++) { let str: string; if (i < oParArray.length - 1) { if (oParArray[i + 1] < cParArray[i]) { str = sstr.substring(oParArray[i] - 1, cParArray[i + 1]); i++; } else { str = sstr.substring(oParArray[i] - 1, cParArray[i]); } } else { str = sstr.substring(oParArray[i] - 1, cParArray[i]); } const newS = str.replace(/,/g, '§'); resStmt += newS; if (i < oParArray.length - 1) { resStmt += sstr.substring(cParArray[i], oParArray[i + 1] - 1); } } resStmt += sstr.substring(cParArray[cParArray.length - 1], sstr.length); return Promise.resolve(resStmt); } private indexOfChar(str: string, char: string): number[] { const tmpArr: string[] = [...str]; char = char.toLowerCase(); return tmpArr.reduce( (results: number[], elem: string, idx: number) => elem.toLowerCase() === char ? [...results, idx] : results, [], ); } }
the_stack
import {Client4} from '@client/rest'; import {getUser} from '@components/autocomplete/slash_suggestion/app_command_parser/app_command_parser_dependencies'; import {ChannelCategoryTypes, ChannelTypes} from '@mm-redux/action_types'; import {General} from '@mm-redux/constants'; import {CategoryTypes} from '@mm-redux/constants/channel_categories'; import {getAllCategoriesByIds, getCategory, getCategoryIdsForTeam, getCategoryInTeamByType, getCategoryInTeamWithChannel} from '@mm-redux/selectors/entities/channel_categories'; import {getCurrentUserId} from '@mm-redux/selectors/entities/common'; import {getUser as selectUser, getUserIdsInChannels} from '@mm-redux/selectors/entities/users'; import {ActionFunc, batchActions, DispatchFunc, GetStateFunc} from '@mm-redux/types/actions'; import {CategorySorting, ChannelCategory, OrderedChannelCategories} from '@mm-redux/types/channel_categories'; import {Channel} from '@mm-redux/types/channels'; import {UserProfile} from '@mm-redux/types/users'; import {$ID, RelationOneToMany} from '@mm-redux/types/utilities'; import {insertMultipleWithoutDuplicates, insertWithoutDuplicates, removeItem} from '@mm-redux/utils/array_utils'; import {getUserIdFromChannelName} from '@mm-redux/utils/channel_utils'; import {favoriteChannel, getChannelMembersByIds, unfavoriteChannel} from './channels'; import {logError} from './errors'; import {forceLogoutIfNecessary} from './helpers'; export function expandCategory(categoryId: string) { return setCategoryCollapsed(categoryId, false); } export function collapseCategory(categoryId: string) { return setCategoryCollapsed(categoryId, true); } export function setCategoryCollapsed(categoryId: string, collapsed: boolean): ActionFunc { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const state = getState(); const category = getCategory(state, categoryId); const patchedCategory = { ...category, collapsed, }; dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORY, data: patchedCategory, }); return {data: patchedCategory}; }; } export function setCategorySorting(categoryId: string, sorting: CategorySorting) { return patchCategory(categoryId, { sorting, }); } export function patchCategory(categoryId: string, patch: Partial<ChannelCategory>): ActionFunc { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const state = getState(); const currentUserId = getCurrentUserId(state); const category = getCategory(state, categoryId); const patchedCategory = { ...category, ...patch, }; dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORY, data: patchedCategory, }); try { Client4.updateChannelCategory(currentUserId, category.team_id, patchedCategory); } catch (error) { dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORY, data: category, }); forceLogoutIfNecessary(error, dispatch, getState); dispatch(logError(error)); return {error}; } return {data: patchedCategory}; }; } export function setCategoryMuted(categoryId: string, muted: boolean) { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const state = getState(); const category = getCategory(state, categoryId); const result = await dispatch(updateCategory({ ...category, muted, })); if ('error' in result) { return result; } const updated = result.data as ChannelCategory; return dispatch(batchActions([ { type: ChannelCategoryTypes.RECEIVED_CATEGORY, data: updated, }, ...(updated.channel_ids.map((channelId) => ({ type: ChannelTypes.SET_CHANNEL_MUTED, data: { channelId, muted, }, }))), ])); }; } function updateCategory(category: ChannelCategory) { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const state = getState(); const currentUserId = getCurrentUserId(state); let updatedCategory; try { updatedCategory = await Client4.updateChannelCategory(currentUserId, category.team_id, category); } catch (error) { forceLogoutIfNecessary(error, dispatch, getState); dispatch(logError(error)); return {error}; } // The updated category will be added to the state after receiving the corresponding websocket event. return {data: updatedCategory}; }; } export function fetchMyCategories(teamId: string) { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const currentUserId = getCurrentUserId(getState()); let data: OrderedChannelCategories; try { data = await Client4.getChannelCategories(currentUserId, teamId); } catch (error) { forceLogoutIfNecessary(error, dispatch, getState); dispatch(logError(error)); return {error}; } // Remove collapse state from server data data.categories = data.categories.map((cat) => { delete cat.collapsed; return cat; }); return dispatch(batchActions([ { type: ChannelCategoryTypes.RECEIVED_CATEGORIES, data: data.categories, }, { type: ChannelCategoryTypes.RECEIVED_CATEGORY_ORDER, data: { teamId, order: data.order, }, }, ])); }; } // addChannelToInitialCategory returns an action that can be dispatched to add a newly-joined or newly-created channel // to its either the Channels or Direct Messages category based on the type of channel. New DM and GM channels are // added to the Direct Messages category on each team. // // Unless setOnServer is true, this only affects the categories on this client. If it is set to true, this updates // categories on the server too. export function addChannelToInitialCategory(channel: Channel, setOnServer = false): ActionFunc { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const state = getState(); const categories = Object.values(getAllCategoriesByIds(state)); if (channel.type === General.DM_CHANNEL || channel.type === General.GM_CHANNEL) { const currentUserId = getCurrentUserId(state); if (channel.type === General.DM_CHANNEL) { const otherUserId = getUserIdFromChannelName(currentUserId, channel.name); const otherUser = selectUser(state, otherUserId); if (!otherUser) { dispatch(getUser(otherUserId)); } } if (channel.type === General.GM_CHANNEL) { // Get the user ids in the channel const allUsersInChannels: RelationOneToMany<Channel, UserProfile> = getUserIdsInChannels(state); const allUsersInGMChannel = Array.from(allUsersInChannels[channel.id] || []); const usersInGMChannel: string[] = allUsersInGMChannel.filter((u: string) => u !== currentUserId); // Filter and see if there are any missing in our state const missingUsers = usersInGMChannel.filter((id) => { if (selectUser(state, id)) { return false; } return true; }); // Fetch them if there are missing members if (missingUsers.length) { dispatch(getChannelMembersByIds(channel.id, missingUsers)); } } const allDmCategories = categories.filter((category) => category.type === CategoryTypes.DIRECT_MESSAGES); // Get all the categories in which channel exists const channelInCategories = categories.filter((category) => { return category.channel_ids.findIndex((channelId) => channelId === channel.id) !== -1; }); // Skip DM categories where channel already exists in a different category const dmCategories = allDmCategories.filter((dmCategory) => { return channelInCategories.findIndex((category) => dmCategory.team_id === category.team_id) === -1; }); const data = dmCategories.map((category) => ({ ...category, channel_ids: insertWithoutDuplicates(category.channel_ids, channel.id, 0), })); return dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORIES, data, }); } // Add the new channel to the Channels category on the channel's team if (categories.some((category) => category.channel_ids.some((channelId) => channelId === channel.id))) { return {data: false}; } const channelsCategory = getCategoryInTeamByType(state, channel.team_id, CategoryTypes.CHANNELS); if (!channelsCategory) { // No categories were found for this team, so the categories for this team haven't been loaded yet. // The channel will have been added to the category by the server, so we'll get it once the categories // are actually loaded. return {data: false}; } if (setOnServer) { return dispatch(addChannelToCategory(channelsCategory.id, channel.id)); } return dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORY, data: { ...channelsCategory, channel_ids: insertWithoutDuplicates(channelsCategory.channel_ids, channel.id, 0), }, }); }; } // addChannelToCategory returns an action that can be dispatched to add a channel to a given category without specifying // its order. The channel will be removed from its previous category (if any) on the given category's team and it will be // placed first in its new category. export function addChannelToCategory(categoryId: string, channelId: string): ActionFunc { return moveChannelToCategory(categoryId, channelId, 0, false); } // moveChannelToCategory returns an action that moves a channel into a category and puts it at the given index at the // category. The channel will also be removed from its previous category (if any) on that category's team. The category's // order will also be set to manual by default. export function moveChannelToCategory(categoryId: string, channelId: string, newIndex: number, setManualSorting = true) { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const state = getState(); const targetCategory = getCategory(state, categoryId); const currentUserId = getCurrentUserId(state); // The default sorting needs to behave like alphabetical sorting until the point that the user rearranges their // channels at which point, it becomes manual. Other than that, we never change the sorting method automatically. let sorting = targetCategory.sorting; if (setManualSorting && targetCategory.type !== CategoryTypes.DIRECT_MESSAGES && targetCategory.sorting === CategorySorting.Default) { sorting = CategorySorting.Manual; } // Add the channel to the new category const categories = [{ ...targetCategory, sorting, channel_ids: insertWithoutDuplicates(targetCategory.channel_ids, channelId, newIndex), }]; // And remove it from the old category const sourceCategory = getCategoryInTeamWithChannel(getState(), targetCategory.team_id, channelId); if (sourceCategory && sourceCategory.id !== targetCategory.id) { categories.push({ ...sourceCategory, channel_ids: removeItem(sourceCategory.channel_ids, channelId), }); } const result = dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORIES, data: categories, }); try { await Client4.updateChannelCategories(currentUserId, targetCategory.team_id, categories); } catch (error) { forceLogoutIfNecessary(error, dispatch, getState); dispatch(logError(error)); const originalCategories = [targetCategory]; if (sourceCategory && sourceCategory.id !== targetCategory.id) { originalCategories.push(sourceCategory); } dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORIES, data: originalCategories, }); return {error}; } // Update the favorite preferences locally on the client in case we have any logic relying on that if (targetCategory.type === CategoryTypes.FAVORITES) { await dispatch(favoriteChannel(channelId, false)); } else if (sourceCategory && sourceCategory.type === CategoryTypes.FAVORITES) { await dispatch(unfavoriteChannel(channelId, false)); } return result; }; } export function moveChannelsToCategory(categoryId: string, channelIds: string[], newIndex: number, setManualSorting = true) { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const state = getState(); const targetCategory = getCategory(state, categoryId); const currentUserId = getCurrentUserId(state); // The default sorting needs to behave like alphabetical sorting until the point that the user rearranges their // channels at which point, it becomes manual. Other than that, we never change the sorting method automatically. let sorting = targetCategory.sorting; if (setManualSorting && targetCategory.type !== CategoryTypes.DIRECT_MESSAGES && targetCategory.sorting === CategorySorting.Default) { sorting = CategorySorting.Manual; } // Add the channels to the new category let categories = { [targetCategory.id]: { ...targetCategory, sorting, channel_ids: insertMultipleWithoutDuplicates(targetCategory.channel_ids, channelIds, newIndex), }, }; // Needed if we have to revert categories and for checking for favourites let unmodifiedCategories = {[targetCategory.id]: targetCategory}; let sourceCategories: Record<string, string> = {}; // And remove it from the old categories channelIds.forEach((channelId) => { const sourceCategory = getCategoryInTeamWithChannel(getState(), targetCategory.team_id, channelId); if (sourceCategory && sourceCategory.id !== targetCategory.id) { unmodifiedCategories = { ...unmodifiedCategories, [sourceCategory.id]: sourceCategory, }; sourceCategories = {...sourceCategories, [channelId]: sourceCategory.id}; categories = { ...categories, [sourceCategory.id]: { ...(categories[sourceCategory.id] || sourceCategory), channel_ids: removeItem((categories[sourceCategory.id] || sourceCategory).channel_ids, channelId), }, }; } }); const categoriesArray = Object.values(categories).reduce((allCategories: ChannelCategory[], category) => { allCategories.push(category); return allCategories; }, []); const result = dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORIES, data: categoriesArray, }); try { await Client4.updateChannelCategories(currentUserId, targetCategory.team_id, categoriesArray); } catch (error) { forceLogoutIfNecessary(error, dispatch, getState); dispatch(logError(error)); const originalCategories = Object.values(unmodifiedCategories).reduce((allCategories: ChannelCategory[], category) => { allCategories.push(category); return allCategories; }, []); dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORIES, data: originalCategories, }); return {error}; } // Update the favorite preferences locally on the client in case we have any logic relying on that await Promise.all(channelIds.map(async (channelId) => { const sourceCategory = unmodifiedCategories[sourceCategories[channelId]]; if (targetCategory.type === CategoryTypes.FAVORITES) { await dispatch(favoriteChannel(channelId, false)); } else if (sourceCategory && sourceCategory.type === CategoryTypes.FAVORITES) { await dispatch(unfavoriteChannel(channelId, false)); } })); return result; }; } export function moveCategory(teamId: string, categoryId: string, newIndex: number) { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const state = getState(); const order = getCategoryIdsForTeam(state, teamId)!; const currentUserId = getCurrentUserId(state); const newOrder = insertWithoutDuplicates(order, categoryId, newIndex); // Optimistically update the category order const result = dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORY_ORDER, data: { teamId, order: newOrder, }, }); try { await Client4.updateChannelCategoryOrder(currentUserId, teamId, newOrder); } catch (error) { forceLogoutIfNecessary(error, dispatch, getState); dispatch(logError(error)); // Restore original order dispatch({ type: ChannelCategoryTypes.RECEIVED_CATEGORY_ORDER, data: { teamId, order, }, }); return {error}; } return result; }; } export function receivedCategoryOrder(teamId: string, order: string[]) { return { type: ChannelCategoryTypes.RECEIVED_CATEGORY_ORDER, data: { teamId, order, }, }; } export function createCategory(teamId: string, displayName: string, channelIds: Array<$ID<Channel>> = []): ActionFunc { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const currentUserId = getCurrentUserId(getState()); let newCategory; try { newCategory = await Client4.createChannelCategory(currentUserId, teamId, { team_id: teamId, user_id: currentUserId, display_name: displayName, channel_ids: channelIds, }); } catch (error) { forceLogoutIfNecessary(error, dispatch, getState); dispatch(logError(error)); return {error}; } // The new category will be added to the state after receiving the corresponding websocket event. return {data: newCategory}; }; } export function renameCategory(categoryId: string, displayName: string): ActionFunc { return patchCategory(categoryId, { display_name: displayName, }); } export function deleteCategory(categoryId: string): ActionFunc { return async (dispatch: DispatchFunc, getState: GetStateFunc) => { const state = getState(); const category = getCategory(state, categoryId); const currentUserId = getCurrentUserId(state); try { await Client4.deleteChannelCategory(currentUserId, category.team_id, category.id); } catch (error) { forceLogoutIfNecessary(error, dispatch, getState); dispatch(logError(error)); return {error}; } // The category will be deleted from the state after receiving the corresponding websocket event. return {data: true}; }; }
the_stack
* @module Topology */ import { ConvexClipPlaneSet } from "../clipping/ConvexClipPlaneSet"; import { LineSegment3d } from "../curve/LineSegment3d"; import { LineString3d } from "../curve/LineString3d"; import { Geometry, PlaneAltitudeEvaluator } from "../Geometry"; import { GrowableXYZArray } from "../geometry3d/GrowableXYZArray"; import { Point3d, Vector3d } from "../geometry3d/Point3dVector3d"; import { HalfEdge, HalfEdgeGraph, HalfEdgeMask } from "./Graph"; import { HalfEdgeGraphOps } from "./Merging"; /** * interface containing various options appropriate to merging lines segments into chains. * @internal */ export interface ChainMergeContextOptions { /** * Tolerance for declaring points equal * * Default is `Geometry.smallMetricDistance` */ tolerance?: number; /** * Direction for primary sort. * * Default is `Vector3d.create(0.294234298, 0.72391399, 0.45234328798)` * * this vector should NOT be along a principal x,y,z direction. * * The internal form will be normalized. */ primarySortDirection?: Vector3d; } /** Internal form of ChainMergeContextOptions -- same field names, but required to have contents. */ class ChainMergeContextValidatedOptions { /** * Tolerance for declaring points equal. */ public tolerance: number; /** UNNORMALIZED base vector for sorting. * * Actual vector hoisted into an instance is normalized. */ private static readonly _defaultPrimarySortDirection = Vector3d.create(0.294234298, 0.72391399, 0.45234328798); /** * Direction for primary sort. This is normalized !!! */ public primarySortDirection: Vector3d; /** return a unit vector aligned with optional direction. * * Default return is into the first quadrant at a quirky angle so any perpendicular plane is unlikely to hit many points */ public static createPrimarySortVector(vector?: Vector3d): Vector3d { if (vector === undefined) { vector = this._defaultPrimarySortDirection.clone(); } return vector.normalizeWithDefault( ChainMergeContextValidatedOptions._defaultPrimarySortDirection.x, ChainMergeContextValidatedOptions._defaultPrimarySortDirection.y, ChainMergeContextValidatedOptions._defaultPrimarySortDirection.z); } /** * PRIVATE constructor -- assumes all inputs are validated in public create method !!!! * @param tolerance * @param unitVectorForPrimarySort */ private constructor(tolerance: number, unitVectorForPrimarySort: Vector3d) { this.tolerance = tolerance; this.primarySortDirection = unitVectorForPrimarySort; } /** return the default option set. */ public static createFromUnValidated(options?: ChainMergeContextOptions): ChainMergeContextValidatedOptions { const result = new ChainMergeContextValidatedOptions(Geometry.smallMetricDistance, ChainMergeContextValidatedOptions.createPrimarySortVector()); if (options !== undefined) { if (options.tolerance !== undefined) result.tolerance = options.tolerance; if (options.primarySortDirection !== undefined) result.primarySortDirection = ChainMergeContextValidatedOptions.createPrimarySortVector(); } return result; } /** Clone this context. */ public clone(): ChainMergeContextValidatedOptions { return new ChainMergeContextValidatedOptions(this.tolerance, this.primarySortDirection); } } /** * * Context for assembling line segments into chains. * * Use the context in phases: * * Create the context: `context = ChainMergeContext.create ()` * * Add line with any combination of: * * `context.addSegment(pointA, pointB)` * * `context.addLineSegment3dArray (segments)` * * Scan all coordinate data for common coordinates. Twist nodes together to form chains: * * `context.clusterAndMergeVerticesXYZ ()` * * Collect the chains: * * myLinestringArray = context.collectMaximalChains(); * * The context carries an optional plane which is used by addSegmentsOnPlane * * @internal */ export class ChainMergeContext { private _graph: HalfEdgeGraph; private _options: ChainMergeContextValidatedOptions; private _plane?: PlaneAltitudeEvaluator; private _convexClipper?: ConvexClipPlaneSet; private constructor(options: ChainMergeContextValidatedOptions) { this._graph = new HalfEdgeGraph(); this._options = options; } /** Save a reference plane for later use, e.g. in addSegmentsOnPlane */ public set plane(value: PlaneAltitudeEvaluator | undefined) { this._plane = value; } /** Property access for the reference plane. */ public get plane(): PlaneAltitudeEvaluator | undefined { return this._plane; } /** Save a reference plane for later use, e.g. in addSegmentsOnPlane */ public set convexClipper(value: ConvexClipPlaneSet | undefined) { this._convexClipper = value; } /** Property access for the reference plane. */ public get convexClipper(): ConvexClipPlaneSet | undefined { return this._convexClipper; } /** * * Construct an empty chain merge graph. * * The options parameter may contain any combination of the options values. * * tolerance = absolute tolerance for declaring points equal. * * Default is `Geometry.smallMetricDistance` * * primarySortDirection = direction for first sort. * * To minimize clash among points on primary sort, this should NOT be perpendicular to any principal plane. * * The default points into the first octant with non-obvious components. */ public static create(options?: ChainMergeContextOptions): ChainMergeContext { const validatedOptions = ChainMergeContextValidatedOptions.createFromUnValidated(options); return new ChainMergeContext(validatedOptions); } /** Add a segment to the evolving graph. */ public addSegment(pointA: Point3d, pointB: Point3d) { this._graph.createEdgeXYZXYZ(pointA.x, pointA.y, pointA.z, 0, pointB.x, pointB.y, pointB.z, 0); } /** Add all segments from an array to the graph. */ public addLineSegment3dArray(data: LineSegment3d[]) { for (const segment of data) { this.addSegment(segment.point0Ref, segment.point1Ref); } } /** Add edges for all segments that are "on" the plane. * * No action if `this.plane` is undefined. */ public addSegmentsOnPlane(points: GrowableXYZArray, addClosure: boolean = false) { if (!this._plane) return; const plane = this._plane; let i0 = addClosure ? points.length - 1 : 0; let a0 = points.evaluateUncheckedIndexPlaneAltitude(i0, plane); let i1 = addClosure ? 0 : 1; let a1; for (; i1 < points.length; i0 = i1++ , a0 = a1) { a1 = points.evaluateUncheckedIndexPlaneAltitude(i1, plane); if (Geometry.isSmallMetricDistance(a0) && Geometry.isSmallMetricDistance(a1)) this._graph.createEdgeXYZXYZ( points.getXAtUncheckedPointIndex(i0), points.getYAtUncheckedPointIndex(i0), points.getZAtUncheckedPointIndex(i0), 0, points.getXAtUncheckedPointIndex(i1), points.getYAtUncheckedPointIndex(i1), points.getZAtUncheckedPointIndex(i1), 0); } } /** * Return a numeric value to be used for sorting, with little chance widely separated nodes will have identical key. * * Any single x,y,z component is a poor choice because horizontal and vertical alignment is common. * * Hence take dot product of x,y,z with non-trivial fraction values. * @param node node with x,y,z coordinates */ private primarySortKey(node: HalfEdge): number { return this._options.primarySortDirection.dotProductXYZ(node.x, node.y, node.z); } /** Return difference of sortData members as sort comparison */ private static nodeCompareSortData(nodeA: HalfEdge, nodeB: HalfEdge): number { return nodeA.sortData! - nodeB.sortData!; } /** test if nodeA is a dangling edge end (i.e. edges around vertex equal 1, but detect it without walking all the way around. */ private static isIsolatedEnd(nodeA: HalfEdge): boolean { return nodeA.vertexSuccessor === nodeA; } /** test if nodeA is at a vertex with exactly 2 edges (i.e. edges around vertex equal w, but detect it without walking all the way around. */ private static isChainInteriorVertex(nodeA: HalfEdge): boolean { const nodeB = nodeA.vertexSuccessor; return nodeB !== nodeA && nodeB.vertexSuccessor === nodeA; } /** * * isolate all edge ends. * * perform cluster analysis to determine nearly coincident points. * * pinch all edges together at each vertex. */ public clusterAndMergeVerticesXYZ() { HalfEdgeGraphOps.isolateAllEdges(this._graph); for (const p of this._graph.allHalfEdges) { p.sortData = this.primarySortKey(p); } const sortArray = this._graph.allHalfEdges.slice(); sortArray.sort(ChainMergeContext.nodeCompareSortData); const xyzTolerance = this._options.tolerance; // A node is unvisited if it is its own vertex successor !!! // otherwise the node has already been twisted into a base vertex. const n = sortArray.length; for (let i0 = 0; i0 < n; i0++) { const node0 = sortArray[i0]; const qMin = node0.sortData!; const qMax = qMin + xyzTolerance; if (ChainMergeContext.isIsolatedEnd(node0)) { for (let i1 = i0 + 1; i1 < n; i1++) { const node1 = sortArray[i1]; if (ChainMergeContext.isIsolatedEnd(node1)) { if (node1.sortData! > qMax) break; if (node0.distanceXYZ(node1) <= xyzTolerance) { HalfEdge.pinch(node0, node1); node1.setXYZFrom(node0); // force true equal coordinates. } } } } } } /** * If node0 is not visited, creating a linestring with that starting edge and all successive edges along a chain. * @param chains growing array of chains. * @param node0 start node for search. */ private collectMaximalLineString3dFromStartNode(chains: LineString3d[], node0: HalfEdge, visitMask: HalfEdgeMask) { if (!node0.isMaskSet(visitMask)) { const ls = LineString3d.create(); ls.addPointXYZ(node0.x, node0.y, node0.z); for (; ;) { node0.setMask(visitMask); node0.edgeMate.setMask(visitMask); node0 = node0.faceSuccessor; ls.addPointXYZ(node0.x, node0.y, node0.z); if (node0.isMaskSet(visitMask) || !ChainMergeContext.isChainInteriorVertex(node0)) break; } chains.push(ls); } } /** * If node0 is not visited, creating a linestring with that starting edge and all successive edges along a chain. * @param chains growing array of chains. * @param node0 start node for search. */ private collectMaximalGrowableXYXArrayFromStartNode(result: GrowableXYZArray[], node0: HalfEdge, visitMask: HalfEdgeMask) { if (!node0.isMaskSet(visitMask)) { const points = new GrowableXYZArray(); points.pushXYZ(node0.x, node0.y, node0.z); for (; ;) { node0.setMask(visitMask); node0.edgeMate.setMask(visitMask); node0 = node0.faceSuccessor; points.pushXYZ(node0.x, node0.y, node0.z); if (node0.isMaskSet(visitMask) || !ChainMergeContext.isChainInteriorVertex(node0)) break; } if (points.length > 0) result.push(points); } } /** * * find edges with start, end in same vertex loop. * * pinch them away from the loop * * set mask on both sides. * * Return the number of excisions. */ private exciseAndMarkSlingEdges(mask: HalfEdgeMask): number { let n = 0; for (const p of this._graph.allHalfEdges) { if (p.distanceXYZ(p.edgeMate) < this._options.tolerance && !p.isMaskSet(mask)) { const q = p.edgeMate; HalfEdge.pinch(p, p.vertexPredecessor); HalfEdge.pinch(q, q.vertexPredecessor); p.setMask(mask); q.setMask(mask); n++; } } return n; } /** Collect chains which have maximum edge count, broken at an vertex with other than 2 edges. * * This is assumed to be preceded by a call to a vertex-cluster step such as `clusterAndMergeVerticesYXZ` */ public collectMaximalChains(): LineString3d[] { const result: LineString3d[] = []; const visitMask = HalfEdgeMask.VISITED; // Pass 0: excise and mark zero-length edges. this.exciseAndMarkSlingEdges(visitMask); this._graph.clearMask(visitMask); // Pass 1: only start at non-interior edges -- vertices with one edge or more than 2 edges. // (Note that collectMaximalChain checks the visit mask.) for (const node0 of this._graph.allHalfEdges) { if (!ChainMergeContext.isChainInteriorVertex(node0)) { this.collectMaximalLineString3dFromStartNode(result, node0, visitMask); } } // Pass 2: start anywhere in an unvisited loop. for (const node0 of this._graph.allHalfEdges) { this.collectMaximalLineString3dFromStartNode(result, node0, visitMask); } return result; } public collectMaximalGrowableXYZArrays(): GrowableXYZArray[] { const result: GrowableXYZArray[] = []; const visitMask = HalfEdgeMask.VISITED; // Pass 0: excise and mark zero-length edges. this.exciseAndMarkSlingEdges(visitMask); this._graph.clearMask(visitMask); // Pass 1: only start at non-interior edges -- vertices with one edge or more than 2 edges. // (Note that collectMaximalChain checks the visit mask.) for (const node0 of this._graph.allHalfEdges) { if (!ChainMergeContext.isChainInteriorVertex(node0)) { this.collectMaximalGrowableXYXArrayFromStartNode(result, node0, visitMask); } } // Pass 2: start anywhere in an unvisited loop. for (const node0 of this._graph.allHalfEdges) { this.collectMaximalGrowableXYXArrayFromStartNode(result, node0, visitMask); } return result; } }
the_stack
import React from 'react'; import TabBar from './TabBar' import ColorPicker from './ColorPicker' import '../css/GameModder.css'; import '../css/icons.css'; import '../css/SpriteEditor.css'; import { imageLiteralToBitmap, Bitmap } from '../sprite-editor/bitmap'; import { textToBitmap, createPngImg, updatePngImg, bitmapToBinHex, bitmapToText, isEmptyBitmap } from '../bitmap_helpers'; import { tickEvent } from '../telemetry/appinsights'; import { UserProject } from './util'; import { bunny_hop_bin_js } from '../games/bunny_hop/bin.js'; import { bunny_hop_main_ts } from '../games/bunny_hop/main.ts'; import { bunny_hop_main_blocks } from '../games/bunny_hop/main.blocks'; import { gameModderState } from '../App'; import { SpriteEditorComp } from './SpriteEditor'; import * as SE from '../sprite-editor/spriteEditor' import { mkScreenshotAsync } from "./screenshot"; import { SpriteGalleryProps } from './SpriteGallery'; // import { bunnyHopBinJs } from '../../public/games/bunny_hop/bunny_hop_min.js.js'; export interface GameModderProps { playHandler: (proj: UserProject) => void; changeMode: (mode: "play" | "share" | "mod") => void; } export interface UserImage { default: Bitmap, data: Bitmap, name: string, callToAction: string, } export interface GameModderState { userImages: UserImage[] currentImg: number, currentBackground: number, alertShown?: boolean; pulse?: boolean; } function IsGameModderState(s: any): s is GameModderState { return !!(s as GameModderState).userImages } function CreateEmptyImageText(w: number, h: number) { let res = "\n" for (let i = 0; i < h; i++) res += ".".repeat(w) + "\n" return res } function GetImageTextDimensions(s: string): { w: number, h: number } { s = s.trim() let lns = s.split("\n") let ln1 = lns[0].replace(/\s/g, "") return { w: ln1.length, h: lns.length } } // TODO: either we need binHexToBitmap or we need the original source code function mkPxtJson(): string { let json = { "name": "SampleIMages", "dependencies": { "device": "*" }, "description": "", "files": [ "main.blocks", "main.ts", "README.md" ], "preferredEditor": "blocksprj" } return JSON.stringify(json) } async function getTxtFile(url: string): Promise<string> { return new Promise((resolve, reject) => { var xhr = new XMLHttpRequest(); xhr.open('GET', url, true); xhr.responseType = 'text'; xhr.onload = function () { var status = xhr.status; if (status === 200) { resolve(xhr.response); } else { const err = new Error(`Error response (${status}) from '${url}'; content: ${xhr.response}`); reject(err) } }; xhr.send(); }); }; const moddableImages: { [k: string]: string } = { "character": ` . . . . . . . . . . . . . . . 1 1 . 1 1 . . . . . . . 1 3 . 1 3 . . . . . . . . 1 3 . 1 3 . . . . . . . 1 3 . 1 3 . . . . . . 1 1 1 1 1 1 . . . . . 1 1 1 1 1 1 1 1 . . . . 1 1 1 f 1 1 f 1 . . . . 1 1 1 1 1 1 1 1 . . . . 1 1 1 1 f f 1 1 . . . . . 1 1 1 1 1 1 . . . . 1 1 1 1 1 1 1 1 1 1 . . 1 1 1 1 1 1 1 1 1 1 . . . . . 1 1 1 1 . . . . . . . . 1 1 1 1 . . . . . . . . 1 1 1 1 . . . . . . . . 1 1 1 1 . . . . . . . . . 1 1 . . . . . . . . . . . 1 . . . . . . . . . . . . . . . . . . . . . . . . . . . . . `, "obstacle1": ` . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . 7 . . . . . . . . . . . . . . . . . . . . . 7 7 . . . . . . . . . . . . . . . . . . . 6 7 7 . . . . . . . . . . . . . . . . . . 6 6 7 7 7 . . . . . . . . . . . . . . . . . 6 6 7 7 7 . . . . . . . . . . . . . . . . . 6 6 7 7 7 7 . . . . . . . . . . . . . . . 6 6 7 7 7 7 7 . . . . . . . . . . . . . . 6 6 6 7 7 7 7 7 . . . . . . . . . . . . . . . . 6 6 6 6 6 . . . . . . . . . . . . . . . . . 6 6 6 6 . . . . . . . . . . . . . . . . . 6 6 6 6 6 7 7 . . . . . . . . . . . . . . 6 6 6 7 7 7 7 7 7 . . . . . . . . . . . . . 6 6 6 7 7 7 7 7 7 7 . . . . . . . . . . . . 6 6 7 7 7 7 7 7 7 7 7 . . . . . . . 6 6 6 6 6 7 7 7 7 7 7 7 7 7 7 7 7 . . . . . . 6 6 6 6 6 7 7 7 7 7 6 6 6 6 6 . . . . . . . . . . . . 6 6 6 6 7 7 . . . . . . . . . . . . . . . . 6 6 6 7 7 7 . . . . . . . . . . . . . . 6 6 7 7 7 7 7 7 7 . . . . . . . . . . 6 6 6 7 7 7 7 7 7 7 7 7 7 7 . . . . . 6 6 6 6 7 7 7 7 7 7 7 7 7 7 7 7 7 7 . . . . 6 6 6 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 6 . . . 6 6 6 6 6 6 7 7 7 7 7 7 7 7 7 7 7 6 . . . . . . 6 6 6 6 e e e e 7 7 7 7 7 6 6 6 . . . . . . . . . . e e e e . . . . . . . . . . . . . . . . . . e e e e . . . . . . . . . . . . . . . . . . e e e e . . . . . . . . . . . . . . . . 6 . e e e e . . 6 . . . . . . . . . 6 6 6 . . . e e e e . 6 . . . . . . . . . . 6 . 6 . . . e e e e . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . `, "obstacle2": ` . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . e e e e e . . . . . . . . . e e b b b e e . . . . . . . e e b e e e b e e . . . . . . e e b e e b b e e . . . . . . . e e b b e e e . . . . . . . e b e e e e b b e . . . . . . e e b b b b e e e . . e . . . e e e e e e e e . . e . . . . e b e e b e b e . e . e . . . e b e e e e b e e . . . . . . e e e b e e e e . . . . . . e e b e b e b e e e . . . . e e e e e e e e e e e e . . . . . . . . e e . . . . . . . `, "background": ` . . . d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d . . . . . d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d . . . d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d . d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b b b b b b b d d d d d d d d d d d d d d d d d d d d d d d d b b b b d d b b b b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b b b b b b d d d d d d d d d d d d d d d d d d d d d d d d d b b b b b b b b b b b b b d d d d d d d d d d d d d d d d d d d b b b b b b b b b b b b b b b b b b b b d d d d d d d d d d d d d d d d d d d b b b b b b b b b b b b b b b b d d d d d d d d d d d d d d d d d d d d d d d d d b b b b b b b d b d d d d d d d d d d d d d d d d d d d d d d d d d b b b b b b b b b b b b b b b b d d d d d d d d d d d d d d d d d b b b b b b b b b b b b b b b b b b d d d d d d d d d d d d d d d b b b b b b b b b b b b b b b b b b b d d d d d d d d d d d d d d d d d d d b b b b b d d d b b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b b b d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d b d b d d d d d d d d d d d d d d d . d d d d d d d d d d d d d d d d d b d b d d d d d d d d d d d d d d . . . d d d d d d d d d d d d d d d d b b b d d d d d d d d d d d d d . . . . . d d d d d d d d d d d d d d d b b b d d d d d d d d d d d d . . . ` } const CALL_TO_ACTION: { [k: string]: string } = { "character": "Draw your character!", "obstacle1": "Draw an obstacle!", "obstacle2": "Draw another obstacle!", "background": "Choose your background!" } // TODO: // 15x32 stump // 22x32 tree function range(len: number): number[] { return new Array(len) .fill(undefined) .map((_, i) => i) } function img2Rows(imgTxt: string) { let rows = imgTxt.split("\n") .map(r => r.replace(/\s/g, "")) .filter(r => !!r) return rows } function rows2img(rows: string[]): string { return rows.join('\n') } function resizeUp(imgTxt: string, targW: number, targH: number): string { let rows = img2Rows(imgTxt) let oldW = rows[0].length let oldH = rows.length if (oldW > targW || oldH > targH) return imgTxt; const left2add = Math.floor((targW - oldW) / 2) const right2add = targW - oldW - left2add const top2add = targH - oldH let addCols = (numL: number, numR: number) => { let l = '.'.repeat(numL) let r = '.'.repeat(numR) rows = rows.map(o => l + o + r) } let addRows = (numT: number) => { let newR = range(numT) .map(_ => '.'.repeat(targW)) rows = [...newR, ...rows] } addCols(left2add, right2add) addRows(top2add) return rows2img(rows) } function resizeTo24x24(imgTxt: string) { return resizeUp(imgTxt, 24, 24) } function mirror(imgTxt: string): string { let rows = img2Rows(imgTxt) rows = rows.map(r => r.split("").reverse().join("")) return rows2img(rows) } const SAMPLE_CHARACTERS = [`.`, ` . . . . . . . . . . b 5 b . . . . . . . . . . . . b 5 b . . . . . . . . . . . . . b c . . . . . . . . . . . b b b b b b . . . . . . . . . b b 5 5 5 5 5 b . . . . . . . b b 5 d 1 f 5 5 d f . . . . . . b 5 5 1 f f 5 d 4 c . . . . . . b 5 5 d f b d d 4 4 . . b d d d b b d 5 5 5 4 4 4 4 4 b b b d 5 5 5 b 5 5 4 4 4 4 4 b . b d c 5 5 5 5 d 5 5 5 5 5 b . . c d d c d 5 5 b 5 5 5 5 5 5 b . c b d d c c b 5 5 5 5 5 5 5 b . . c d d d d d d 5 5 5 5 5 d b . . . c b d d d d d 5 5 5 b b . . . . . c c c c c c c c b b . . . `, mirror(` e e e . . . . e e e . . . . c d d c . . c d d c . . . . c b d d f f d d b c . . . . c 3 b d d b d b 3 c . . . . f b 3 d d d d 3 b f . . . . e d d d d d d d d e . . . . e d f d d d d f d e . b f b f d d f d d f d d f . f d f f b d d b b d d 2 f . f d f . f 2 2 2 2 2 2 b b f f d f . f b d d d d d d b b d b f . f d d d d d b d d f f f . . f d f f f d f f d f . . . . f f . . f f . . f f . . . `), mirror(` . . 4 4 4 . . . . 4 4 4 . . . . . 4 5 5 5 e . . e 5 5 5 4 . . . 4 5 5 5 5 5 e e 5 5 5 5 5 4 . . 4 5 5 4 4 5 5 5 5 4 4 5 5 4 . . e 5 4 4 5 5 5 5 5 5 4 4 5 e . . . e e 5 5 5 5 5 5 5 5 e e . . . . . e 5 f 5 5 5 5 f 5 e . . . . . . f 5 5 5 4 4 5 5 5 f . . f f . . f 4 5 5 f f 5 5 6 f . f 5 f . . . f 6 6 6 6 6 6 4 4 f 5 5 f . . . f 4 5 5 5 5 5 5 4 4 5 f . . . . f 5 5 5 5 5 4 5 5 f f . . . . . f 5 f f f 5 f f 5 f . . . . . . f f . . f f . . f f . . . `)].map(resizeTo24x24) const SAMPLE_OBSTACLES = [`.`, ` . . . . . . . . . c c 8 . . . . . . . . . . 8 c c c f 8 c c . . . . . c c 8 8 f c a f f f c c . . . c c c f f f c a a f f c c c 8 c c c f f f f c c a a c 8 c c c c c b f f f 8 a c c a a a c c c a a b b 8 a b c c c c c c c c a f c a a b b a c c c c c f f c a 8 f c a a c c a c a c f f f c c a 8 a a c c c c a a f f f 8 a . a c a a c f f a a b 8 f f c a . . c c b a f f f a b b c c 6 c . . . c b b a f f 6 6 a b 6 c . . . . c c b b b 6 6 a c c c c . . . . . c c a b b c c c . . . . . . . . . c c c c c c . . . . . `, ` . . . . . . b b b b . . . . . . . . . . . . b 4 4 4 b . . . . . . . . . . . b b 4 4 4 b . . . . . . . . . b 4 b b b 4 4 b . . . . . . . b d 5 5 5 4 b 4 4 b . . . . . . b 3 2 3 5 5 4 e 4 4 b . . . . b d 2 2 2 5 7 5 4 e 4 4 e . . . b 5 3 2 3 5 5 5 5 e e e e . . b d 7 5 5 5 3 2 3 5 5 e e e . . b 5 5 5 5 5 2 2 2 5 5 d e e . b 3 2 3 5 7 5 3 2 3 5 d d e 4 . b 2 2 2 5 5 5 5 5 5 d d e 4 . b d 3 2 d 5 5 5 d d d 4 4 . . . b 5 5 5 5 d d 4 4 4 4 . . . . . 4 d d d 4 4 4 . . . . . . . . . 4 4 4 4 . . . . . . . . . . . . `, ` . . . b b b b b b b b b b . . . . . b 1 1 1 1 1 1 1 1 1 1 b . . . b 1 1 1 1 1 1 1 1 1 1 1 1 b . . b 1 1 1 1 1 1 1 1 1 1 1 1 b . . b d d c c c c c c c c d d b . . b d c 6 6 6 6 6 6 6 6 c d b . . b d c 6 1 d 6 6 6 6 6 c d b . . b d c 6 d 6 6 6 6 6 6 c d b . . b d c 6 6 6 6 6 6 6 6 c d b . . b d c 6 6 6 6 6 6 6 6 c d b . . b d c 6 6 6 6 6 6 6 6 c d b . . b d d c c c c c c c c d d b . . c b b b b b b b b b b b b c . f c c c c c c c c c c c c c c f f b b b b b b b b b b b b b b f f b c d d d d d d d d d d d b f f b c b b b b b b b b b b c b f f b c b b b b b b b b b b c b f f b c c c c c c c c c c c c b f f b b b b b b b b b b b b b b f f b f f f f f f f f f f f f b f f f f f f f f f f f f f f f f f `].map(resizeTo24x24) const SAMPLE_OBSTACLES2 = [`.`, ` . . . . . . . 6 . . . . . . . . . . . . . . 8 6 6 . . . 6 8 . . . . . e e e 8 8 6 6 . 6 7 8 . . . . e 2 2 2 2 e 8 6 6 7 6 . . . . e 2 2 4 4 2 7 7 7 7 7 8 6 . . . e 2 4 4 2 6 7 7 7 6 7 6 8 8 . e 2 4 5 2 2 6 7 7 6 2 7 7 6 . . e 2 4 4 2 2 6 7 6 2 2 6 7 7 6 . e 2 4 2 2 2 6 6 2 2 2 e 7 7 6 . e 2 4 2 2 4 2 2 2 4 2 2 e 7 6 . e 2 4 2 2 2 2 2 2 2 2 2 e c 6 . e 2 2 2 2 2 2 2 4 e 2 e e c . . e e 2 e 2 2 4 2 2 e e e c . . . e e e e 2 e 2 2 e e e c . . . . e e e 2 e e c e c c c . . . . . . c c c c c c c . . . . . . . . `, ` . . . . . c c b b b . . . . . . . . . . c b d d d d b . . . . . . . . . c d d d d d d b b . . . . . . . c d d d d d d d d b . . . . . c b b d d d d d d d b . . . . . c b b d d d d d d d b . . . c c c c b b b b d d d b b b . . c d d b c b b b b b b b b d b c b b d d d b b b b b d d b d b c c b b d d d d d d d b b b d c c b c c c b b b b b b b d d c c c c b b c c c c b d d d b c c b . c c c c c c c c c c c b b b b . . c c c c c b b b b b b b c . . . . . . . c c b b b b c c . . . . . . . . . . c c c c . . . . `, ` . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . f f f . . . . . . . . . . . . . . . . . . . . f 2 f f f f f . . . . . . . . . . . . . . . f f 2 2 e e e e e f f . . . . . . . . . . . . f f 2 2 2 e e e e e e f f . . . . . . . . . . . f e e e e f f f e e e e f . . . . . . . . . . f e 2 2 2 2 e e e f f f f f . . . . . . . . . . f 2 e f f f f f 2 2 2 e f f f . . c c . . . . . f f f e e e f f f f f f f f f . . c d c c . . . f e e 4 4 f b b e 4 4 e f e f . . c c d d c c . . f e d d f b b 4 d 4 e e f . . . . . c d d d c e e f d d d d d 4 e e e f . . . . . . . c c d c d d e e 2 2 2 2 2 2 2 f . . . . . . . . . c c c d d 4 4 e 5 4 4 4 4 4 f . . . . . . . . . . . . e e e e f f f f f f f f . . . . . . . . . . . . . . . . f f . . . f f f . . . . `].map(resizeTo24x24) // console.dir(SAMPLE_CHARACTERS) // console.dir(SAMPLE_OBSTACLES) export class GameModder extends React.Component<GameModderProps, GameModderState> { protected playBtn: HTMLButtonElement | undefined; protected spriteEditor: SpriteEditorComp; protected header: HTMLHeadingElement | undefined; private tabImages: Bitmap[]; private scale: number = 1.0; private alertTimeout: any; constructor(props: GameModderProps) { super(props); if (IsGameModderState(gameModderState)) { // Loading previous modder state this.state = gameModderState } else { // Creating new modder state let imgs = Object.keys(moddableImages) .map((name) => { let def = moddableImages[name] // TODO: match the original dimensions? One difficulty with this // is the sprite editor canvas can't handle this // let { w, h } = GetImageTextDimensions(moddableImages[name]) let [w, h] = [24, 24] let blank = CreateEmptyImageText(w, h); return { data: imageLiteralToBitmap(blank), name: name, callToAction: CALL_TO_ACTION[name], default: textToBitmap(def) }; }) this.state = { userImages: imgs, currentImg: 0, currentBackground: 12 } Object.assign(gameModderState, this.state) } this.tabImages = Object.keys(moddableImages) .map(k => moddableImages[k]) .map(textToBitmap) if (!(gameModderState as GameModderState).alertShown) this.alertTimeout = setTimeout(this.alertPlay, 5000); } // async renderExperiments() { // let tabBar = this.refs["tab-bar"] as TabBar // let dummyImg = createPngImg(20, 20, 64, 64) // tabBar.TabBarSvg.appendChild(dummyImg) // setInterval(() => { // updatePngImg(dummyImg, this.spriteEditor.bitmap().image) // }, 500) // function getImages(ts: string) { // let imgRegex = /img`([\d\s\.a-f]*)`/gm // let match = imgRegex.exec(ts); // let res: string[] = [] // while (match != null) { // res.push(match[1]) // match = imgRegex.exec(ts); // } // return res // } // // HACK: // let mainTs = bunny_hop_main_ts; // // let mainTs = await getTxtFile("games/bunny_hop/main.ts") // // TODO: find images // let imgs = getImages(mainTs) // // console.dir(imgs) // let imgsAsBmps = imgs.map(textToBitmap) // // console.dir(imgsAsBmps) // } private alertPlay = () => { this.save(); (gameModderState as GameModderState).alertShown = true; this.setState({ pulse: true }); } private clearTimers = () => { clearTimeout(this.alertTimeout); } private updateCurrentUserImage(bmp: Bitmap) { // TODO: set image bug somehow? function updateUserImage(old: UserImage, nw: Bitmap): UserImage { return { data: nw, name: old.name, callToAction: old.callToAction, default: old.default } } let newState = { userImages: this.state.userImages.map((m, i) => i === this.state.currentImg ? updateUserImage(m, bmp) : m) } this.setState(newState) Object.assign(gameModderState, newState) } private save() { if (this.spriteEditor && this.spriteEditor.editor) { this.spriteEditor.editor.commit() let newImg = this.spriteEditor.editor.bitmap().image this.updateCurrentUserImage(newImg) } } onTabChange(idx: number) { this.save() this.setState({ currentImg: idx }) if (IsGameModderState(gameModderState)) gameModderState.currentImg = idx tickEvent("shareExperiment.mod.tabChange", { "tab": idx }); } onBackgroundColorChanged(idx: number) { this.setState({ currentBackground: idx }) tickEvent("shareExperiment.mod.changeBackground", { "color": idx }); if (IsGameModderState(gameModderState)) gameModderState.currentBackground = idx } onSpriteGalleryPick(bmp: Bitmap, idx?: number) { tickEvent("shareExperiment.mod.galleryPick", { "tab": this.state.currentImg, "item": idx }); this.updateCurrentUserImage(bmp) } render() { let currImg = this.state.userImages[this.state.currentImg] let isBackgroundTab = this.state.currentImg === 3 let body = document.getElementsByTagName('body')[0] // const MARGIN = 20 const HEADER_HEIGHT = 50 let actualWidth = body.clientWidth let actualHeight = body.clientHeight - HEADER_HEIGHT let refWidth = 539.0 let refHeight = SE.TOTAL_HEIGHT let wScale = actualWidth / refWidth let hScale = actualHeight / refHeight this.scale = Math.min(wScale, hScale) const SPRITE_GALLERY_HEIGHT = 100 let spriteGalleryHeight = SPRITE_GALLERY_HEIGHT * this.scale let colorPickerHeight = (SE.TOTAL_HEIGHT + SPRITE_GALLERY_HEIGHT) * this.scale // TODO let samples = [ SAMPLE_CHARACTERS, SAMPLE_OBSTACLES, SAMPLE_OBSTACLES2 ] let spriteGalleryOptions = (samples[this.state.currentImg] || SAMPLE_CHARACTERS) .map(i => imageLiteralToBitmap(i)) let startImg = this.state.userImages[this.state.currentImg].data let galKey = `tab${this.state.currentImg}__` + spriteGalleryOptions.map(b => b.buf.toString()).join("_") let galProps: SpriteGalleryProps = { height: spriteGalleryHeight, options: spriteGalleryOptions, onClick: this.onSpriteGalleryPick.bind(this) } return ( <div className="game-modder"> <h1 ref="header" className="what-to-do-header">{currImg.callToAction}</h1> <TabBar ref="tab-bar" tabImages={this.tabImages} tabChange={this.onTabChange.bind(this)} startTab={this.state.currentImg} /> {isBackgroundTab ? <ColorPicker selectionChanged={this.onBackgroundColorChanged.bind(this)} selected={this.state.currentBackground} colors={SE.COLORS} height={colorPickerHeight}></ColorPicker> : <SpriteEditorComp ref="sprite-editor" startImage={startImg} onPlay={this.onPlay} scale={this.scale} galleryProps={galProps}></SpriteEditorComp> } {/* <div ref="sprite-gallery" className="sprite-gallery"> </div> */} <button ref="play-btn" className={`play-btn ${this.state.pulse ? "shake" : ""}`}> <span>Play</span> <i className="icon play"></i> </button> </div> ) } async componentDidMount() { this.playBtn = this.refs["play-btn"] as HTMLButtonElement; this.spriteEditor = this.refs["sprite-editor"] as SpriteEditorComp; this.header = this.refs['header'] as HTMLHeadingElement // events this.playBtn.addEventListener('click', this.onPlay.bind(this)) // HACK: Disable scrolling in iOS document.ontouchmove = function (e) { e.preventDefault(); } } componentDidUpdate() { this.spriteEditor = this.refs["sprite-editor"] as SpriteEditorComp; } componentWillUnmount() { this.playBtn = undefined; this.spriteEditor = undefined; this.header = undefined; this.clearTimers(); } async onPlay() { this.save(); (gameModderState as GameModderState).alertShown = true; const toReplace = this.state.userImages.filter(ui => !isEmptyBitmap(ui.data)); function modBackground(bin: string, newColor: number): string { const originalColor = 13 const template = (color: number) => `scene_setBackgroundColor__P935_mk(s);s.tmp_0.arg0=${color}` let old = template(originalColor) let newIdx = newColor + 1 // arcade function is 1-based b/c 0 is transparent let nw = template(newIdx) return bin.replace(old, nw) } function modBackgroundTs(bin: string, newColor: number): string { const originalColor = 13 const template = (color: number) => `scene.setBackgroundColor(${color})` let old = template(originalColor) let newIdx = newColor + 1 // arcade function is 1-based b/c 0 is transparent let nw = template(newIdx) return bin.replace(old, nw) } function modImg(bin: string, img: UserImage): string { // HACK: for some reason the compiler emits image prefixes that look like: // 8704100010000000 // whereas ours look like: // e4101000 const MOD_PREFIX_LEN = "e4101000".length const BIN_PREFIX_LEN = "8704100010000000".length let newHex = bitmapToBinHex(img.data) const oldToFind = bitmapToBinHex(img.default) .slice(MOD_PREFIX_LEN) let oldStartIncl = bin.indexOf(oldToFind) - BIN_PREFIX_LEN if (oldStartIncl < 0) return bin; let oldEndExcl = bin.indexOf(`"`, oldStartIncl) let oldHex = bin.slice(oldStartIncl, oldEndExcl) return bin.replace(oldHex, newHex) } let gameBinJs = bunny_hop_bin_js let gameMainTs = bunny_hop_main_ts let gameMainBlocks = bunny_hop_main_blocks; for (let i of toReplace) { const def = bitmapToText(i.default); const user = bitmapToText(i.data); gameBinJs = modImg(gameBinJs, i) gameMainTs = replaceImages(gameMainTs, def, user); gameMainBlocks = replaceImages(gameMainBlocks, def, user); } gameBinJs = modBackground(gameBinJs, this.state.currentBackground) gameMainTs = modBackgroundTs(gameMainTs, this.state.currentBackground); const screenshot = await mkScreenshotAsync(this.state.currentBackground + 1, this.state.userImages.map(u => isEmptyBitmap(u.data) ? u.default : u.data)); this.props.playHandler({ binJs: gameBinJs, mainTs: gameMainTs, mainBlocks: gameMainBlocks, screenshot }); } } function replaceImages(sourceFile: string, toReplace: string, userImage: string) { const sourceLines = sourceFile.split(/\n/).map(l => l.trim()); const replaceLines = toReplace.split(/\n/).map(l => l.trim()).slice(1, -1); userImage = userImage.replace("img`", "").replace("`", ""); let foundMatch = false; for (let i = 0; i < sourceLines.length; i++) { if (sourceLines[i] === replaceLines[0]) { foundMatch = true; for (let j = 1; j < replaceLines.length; j++) { if (sourceLines[i + j] != replaceLines[j]) { foundMatch = false; break; } } if (foundMatch) { sourceLines.splice(i, replaceLines.length, userImage); } } } return sourceLines.join("\n"); } export default GameModder;
the_stack
'use strict'; import { Chart, ChartData, Types, G2Dependents } from "./types"; import { customFormatter, customFormatterConfig, merge } from './common'; import themes from '../themes'; import { pxToNumber } from './common'; import { warn } from './log'; // import { legendHtmlContainer, legendHtmlList, legendHtmlListItem, legendHtmlMarker, legendTextStyle } from './g2Theme'; /* * 提取渐变色中的第一个颜色,具体逻辑来自 G2 内部 * */ // // const regexTags = /[MLHVQTCSAZ]([^MLHVQTCSAZ]*)/ig; // // const regexDot = /[^\s\,]+/ig; // const regexLG = /^l\s*\(\s*([\d.]+)\s*\)\s*(.*)/i; // const regexRG = /^r\s*\(\s*([\d.]+)\s*,\s*([\d.]+)\s*,\s*([\d.]+)\s*\)\s*(.*)/i; // // const regexPR = /^p\s*\(\s*([axyn])\s*\)\s*(.*)/i; // const regexColorStop = /[\d.]+:(#[^\s]+|[^\)]+\))/ig; // // // 取匹配出来的第一个颜色 // function getFirstStop(steps) { // return steps.match(regexColorStop)[0].split(':')[1]; // } // function getColor(color) { // if (color[1] === '(' || color[2] === '(') { // if (color[0] === 'l') { // // 线性 regexLG.test(color) // return getFirstStop(regexLG.exec(color)[2]); // } else if (color[0] === 'r') { // // 径向 regexRG.test(color) // return getFirstStop(regexRG.exec(color)[4]); // } else if (color[0] === 'p') { // // regexPR.test(color) // // return parsePattern(color, self, context); // } // } // return color; // } type Position = 'top' | 'top-left' | 'top-right' | 'right' | 'right-top' | 'right-bottom' | 'left' | 'left-top' | 'left-bottom' | 'bottom' | 'bottom-left' | 'bottom-right'; export interface LegendConfig extends customFormatterConfig { visible?: boolean; autoCollapse?: boolean; /** @deprecated 暂时无法修改分页尺寸 */ collapseRow?: 'auto' | number; position?: Position; align?: string; padding?: [number, number, number, number]; nameFormatter?(text: string, item: G2Dependents.ListItem, index: number): string; valueFormatter?(value: string | number, item: G2Dependents.ListItem, index: number): string; showData?: boolean; marker?: Types.MarkerCfg; allowAllCanceled?: boolean; hoverable?: boolean; /** @deprecated config.legend.onHover 已废弃,请使用 chart.on('legend-item:mouseenter', onHover) */ onHover?: Types.EventCallback; clickable?: boolean; /** @deprecated config.legend.onClick 已废弃,请使用 chart.on('legend-item:click', onClick) */ onClick?: Types.EventCallback; /** @deprecated config.legend.defaultClickBehavior 已废弃,请使用 chart.on('legend-item:click', onClick) 绑定自定义点击事件 */ defaultClickBehavior?: boolean; customConfig?: Types.LegendCfg; } function getPosition(position?: string, align?: string): Position { const [p, a] = position.split('-'); if (!a && align) { if (align === 'center') { return p as Position; } return `${p}-${align}` as Position; } return position as Position; } function getPadding(position: string, base: number, userPadding?: number[], isPolar?: boolean) { if (userPadding) { return userPadding; } const len = base * 2 / 3; const [p] = position.split('-'); switch (p) { case 'bottom': return [len, 0, 0, 0]; case 'left': if (isPolar === true) { return [0, base, 0, 0]; } return [0, len, 0, 0]; case 'top': return [0, 0, len, 0]; case 'right': if (isPolar === true) { return [0, 0, 0, base]; } return [0, 0, 0, len]; } return [len, len, len, len]; } /** * rectLegend 直角坐标系legend配置。 * * @param {this} ctx 组件实例 this 指针 * @param {Chart} chart 图表实例 * @param {Object} config 配置项 * @param {Object} defaultConfig 组件的自定义配置 * @param {boolean} isOneDataGroup 数据是否为单组形式,类似饼图和漏斗图 * @param {string} field 数据映射字段 * @param {boolean} isPolar 是否极坐标系 * @param {function} itemFormatter 组件自定义的 item 格式函数 * */ export default function<T> ( ctx: T, chart: Chart, config: { legend?: LegendConfig | boolean; }, defaultConfig: Types.LegendCfg, isOneDataGroup: boolean, field?: string, isPolar?: boolean, itemFormatter?: (item: G2Dependents.ListItem, i: number) => G2Dependents.ListItem ) { // 设置图例 if (config.legend === false || (config.legend && typeof config.legend !== 'boolean' && config.legend.visible === false)) { chart.legend(false); } else { const { // 自动折叠图例 autoCollapse = true, collapseRow, // 图例位置 position = 'top', align = 'left', padding, // 格式化函数 nameFormatter, valueFormatter, showData, marker, // 交互相关 allowAllCanceled = false, hoverable = true, onHover, clickable = true, onClick, defaultClickBehavior, // 自定义配置 customConfig, // style = {}, } = (config.legend === true ? {} : (config.legend || {})) as LegendConfig; if (collapseRow) { warn('config.legend', `collapseRow 已废弃,暂时无法修改分页尺寸`); } const baseFontSizeNum = pxToNumber(themes['widgets-font-size-1']); const legendConfig: Types.LegendCfg = { ...defaultConfig, position: getPosition(position, align), flipPage: autoCollapse, itemName: { // formatter: nameFormatter, formatter: (text, item, index) => { if (nameFormatter) { return nameFormatter(text, itemFormatter ? itemFormatter(item, index) : item, index); } return text; } }, // background: { // padding: 0, // }, padding: getPadding(position, baseFontSizeNum, padding, isPolar), marker: marker || { // symbol: 'circle', style: { r: baseFontSizeNum / 3, // fill: styleSheet.legendMarkerColor, // lineCap: 'butt', lineAppendWidth: 0, fillOpacity: 1, }, } }; // legend hover 相关事件 if (!hoverable) { chart.removeInteraction('legend-active'); } if (onHover) { warn('config.legend', `onHover 属性已废弃,请使用通用事件方法 props.event = { 'legend-item:mouseenter': onHover }`); chart.on('legend-item:mouseenter', onHover); } // legend click 相关事件 // 去除默认图例行为 chart.removeInteraction('legend-filter'); if (clickable) { if (allowAllCanceled) { chart.interaction('legend-custom-filter'); } else { chart.interaction('legend-custom-filter-last'); } } if (onClick) { warn('config.legend', `onClick 属性已废弃,请使用通用事件方法 props.event = { 'legend-item:click': onClick }`); chart.on('legend-item:click', onClick); } if (defaultClickBehavior !== undefined) { warn('config.legend', `defaultClickBehavior 属性已废弃,取消默认点击效果只需要关闭 legend.clickable 即可`); } if (showData) { const customValueFormatter = customFormatter(config.legend === true ? {} : (config.legend || {})); legendConfig.itemValue = { style: { fill: themes['widgets-legend-text'], fontSize: baseFontSizeNum, lineHeight: baseFontSizeNum, fontFamily: themes['widgets-font-family-txd-m-number'], }, formatter: (text, item, index) => { // @ts-ignore const value = getLastValue(item.name, ctx.rawData, isOneDataGroup); if (valueFormatter) { return valueFormatter(value, itemFormatter ? itemFormatter(item, index) : item, index); } else if (customValueFormatter) { return customValueFormatter(value) } return value; }, }; } // // 因为图例项有下边距,所以bottom设置为0即可 // const legendStyle = Object.assign({}, legendHtmlContainer); // const legendItemStyle = Object.assign({}, legendHtmlListItem); // if (position === 'bottom') { // legendStyle.bottom = 0; // legendStyle.overflow = 'visible'; // // 置于下方时设置margin top // legendItemStyle.marginBottom = 0; // legendItemStyle.marginTop = themes['widgets-font-size-1'] // } else { // legendStyle.top = 0; // } // if (align === 'right') { // legendStyle.right = 0; // legendStyle.textAlign = 'right'; // } else if (align === 'left') { // legendStyle.left = 0; // } else if (align === 'center') { // legendStyle.left = 0; // legendStyle.width = '100%'; // legendStyle.textAlign = 'center'; // } else { // // 默认放到左边 // legendStyle.left = 0; // } // // const legendConfig = { // // 这些是widgets特有的属性 // autoCollapse, // collapseRow, // // 以下为g2的属性 // useHtml: true, // title: null, // position: position || 'top', // allowAllCanceled, // // 这个属性文档里没有,设置为false可以让图例不居中,再手动设置定位样式 // autoPosition: false, // hoverable, // onHover, // clickable, // onClick, // // 隐藏属性,设置了 onClick 时依然保留默认点击行为 // defaultClickHandlerEnabled: defaultClickBehavior, // itemTpl: (value, color, checked, index) => { // const item = getRawData(config, this.rawData, value, isOneDataGroup); // // const newName = nameFormatter ? nameFormatter(value, { // ...item, // color, // checked, // }, index) : value; // // if (showData) { // const dataValue = getDataValue(item.data); // const newValue = valueFormatter ? valueFormatter(dataValue, { // ...item, // color, // checked, // }, index) : dataValue; // // return `${'<li class="g2-legend-list-item item-{index} {checked}" data-color="{originColor}" data-value="{originValue}">' + // `<i class="g2-legend-marker" style="background-color:${getColor(color)};"></i>` + // '<span class="g2-legend-text">'}${newName}</span><span class="g2-legend-value">${newValue}</span></li>`; // } // // return `${'<li class="g2-legend-list-item item-{index} {checked}" data-color="{originColor}" data-value="{originValue}">' + // `<i class="g2-legend-marker" style="background-color:${getColor(color)};"></i>` + // '<span class="g2-legend-text">'}${newName}</span></li>`; // }, // 'g2-legend': legendStyle, // 'g2-legend-list': Object.assign({}, legendHtmlList), // 'g2-legend-list-item': legendItemStyle, // 'g2-legend-marker': Object.assign({}, legendHtmlMarker), // textStyle: Object.assign({}, legendTextStyle), // }; // if (componentConfig) { // // 内部的componentConfig直接使用assign浅复制,方便覆盖。 // Object.assign(legendConfig, componentConfig); // } if (customConfig) { merge(legendConfig, customConfig); } // if (legendConfig['g2-legend'] && typeof legendConfig['g2-legend'] === 'object') { // Object.keys(style).forEach((key) => { // // 确保每一项有值设置了,如果是假值则忽略 // if (style[key]) { // // hack 字号转化为 px // if (key === 'fontSize' && !isInvalidNumber(style[key])) { // legendConfig['g2-legend'][key] = `${style[key]}px`; // } else { // legendConfig['g2-legend'][key] = style[key]; // } // // fix: 新版G2后,图例文字颜色的设置需要注入 textStyle 中才能生效。 // if (key === 'color' && legendConfig.textStyle && typeof legendConfig.textStyle === 'object') { // legendConfig.textStyle.fill = style[key]; // } // } // }); // // Object.assign(legendConfig['g2-legend'], style); // } if (field) { // fix: 修复 legend 设置了 field 后,内部 options 变为 { [field]: options },无法读取 onClick 的问题 // if (onClick) { // chart.legend({ // onClick, // // 隐藏属性,设置了 onClick 时依然保留默认点击行为 // defaultClickHandlerEnabled: defaultClickBehavior, // }); // } chart.legend(field, legendConfig); } else { chart.legend(legendConfig); } // if (this.afterRenderCallbacks && legendConfig.autoCollapse) { // const legendCollapseInfo = legendCollapse.call(this, legendConfig); // this.afterRenderCallbacks.push(legendCollapseInfo.render); // // if (this.unmountCallbacks) { // this.unmountCallbacks.push(legendCollapseInfo.unmount); // } // } } } function getLastValue(name: string, rawData: ChartData, isOneDataGroup: boolean) { const dataGroup = getItemData(name, rawData, isOneDataGroup); if (!dataGroup) { return ''; } if (isOneDataGroup) { if (Array.isArray(dataGroup)) { return dataGroup[1]; } if (typeof dataGroup === 'object') { return dataGroup.y; } } else if (!Array.isArray(dataGroup) && Array.isArray(dataGroup.data)) { const len = dataGroup.data.length; const lastItem = dataGroup.data[len - 1]; if (Array.isArray(lastItem)) { return lastItem[1]; } if (typeof lastItem === 'object') { return lastItem.y; } } return ''; } function getItemData(name: string, rawData: ChartData, isOneDataGroup: boolean): undefined | Types.LooseObject | (number | string)[] { if (!rawData) { return undefined; } if (isOneDataGroup) { const originData = rawData[0]; let result = undefined; originData && originData.data.some((r: any) => { if ((Array.isArray(r) && r[0] === name) || (typeof r === 'object' && r.x === name)) { result = r; return true; } return false; }); // if (Array.isArray(result)) { // result = { // data: result, // }; // } return result; } let originData = undefined; rawData.some((r: Types.LooseObject) => { if (r.data && r.name === name) { originData = r; return true; } return false; }); return originData; } // function getRawData(config, rawData, name, isOneDataGroup) { // if (!rawData) { // return {}; // } // // if (isOneDataGroup) { // const originData = rawData[0] || {}; // let result = {}; // // originData.data.some((r) => { // if ((Array.isArray(r) && r[0] === name) || (typeof r === 'object' && r.x === name)) { // result = r; // return true; // } // return false; // }); // // if (Array.isArray(result)) { // result = { // data: result, // }; // } // // return result; // } // // let originData = {}; // if (config.dataType !== 'g2') { // rawData.some((r) => { // if (r.name === name) { // originData = r; // return true; // } // return false; // }); // } // // return originData; // } // // function getDataValue(data) { // if (!Array.isArray(data)) { // return '-'; // } // // for (let i = 0; i < data.length; i++) { // // 单组数据时,如饼图/漏斗图,data[i] 不是数组/对象 // if (typeof data[i] !== 'object' && i === 1) { // return data[i]; // } // if (i === data.length - 1) { // if (Array.isArray(data[i])) { // return data[i][1]; // } // if (typeof data[i] === 'object') { // return data[i].y; // } // } // } // } // // function legendCollapse(legendConfig) { // let { collapseRow = 2 } = legendConfig; // let collapseInstance = null; // return { // render(chart, config) { // if (config.legend !== false && this.chartDom) { // const legendWrapperDom = this.chartDom.querySelector('.g2-legend'); // const legendListDom = this.chartDom.querySelector('.g2-legend-list'); // const legendListItemDom = this.chartDom.querySelector('.g2-legend-list-item'); // // if (!legendWrapperDom || !legendListDom || !legendListItemDom) { // return; // } // // const itemStyle = window.getComputedStyle(legendListItemDom); // const marginTop = pxToNumber(itemStyle.getPropertyValue('margin-top')); // const marginBottom = pxToNumber(itemStyle.getPropertyValue('margin-bottom')); // const itemHeight = legendListItemDom.offsetHeight + marginTop + marginBottom; // const wrapperHeight = legendWrapperDom.offsetHeight; // // // 自动适配图例折叠高度 // if (collapseRow === 'auto') { // const chartHeight = this._size && this._size[1]; // if (chartHeight) { // // 行数最多占图表高度的三分之一,最小为2。 // collapseRow = Math.max(2, Math.round((chartHeight / itemHeight) / 3)); // } // } else { // collapseRow = Number(collapseRow); // } // // if (wrapperHeight > itemHeight * collapseRow) { // if (!collapseInstance) { // collapseInstance = new Collapse(legendWrapperDom, legendListDom, { // wrapperHeight, // itemHeight, // collapseRow, // collapseTop: marginTop, // collapseBottom: marginBottom, // }); // } else if (collapseInstance.dom !== legendWrapperDom || collapseInstance.listDom !== legendListDom) { // // 重新渲染后,dom节点可能已经改变,销毁重建 // collapseInstance.destroy(); // // collapseInstance = new Collapse(legendWrapperDom, legendListDom, { // wrapperHeight, // itemHeight, // collapseRow, // collapseTop: marginTop, // collapseBottom: marginBottom, // }); // } // // collapseInstance.start({ collapseRow }); // } else if (collapseInstance) { // collapseInstance.end(); // } // } // }, // unmount() { // if (collapseInstance) { // collapseInstance.destroy(); // } // }, // }; // } // // class Collapse { // constructor(dom, listDom, config) { // this.dom = dom; // this.listDom = listDom; // this.moveOffset = 0; // this.config = config; // this.handleClick = this.handleClick.bind(this); // // const { itemHeight, collapseRow, wrapperHeight, collapseTop, collapseBottom } = this.config; // // const collapseDom = document.createElement('div'); // collapseDom.className = 'widgets-legend-collapse'; // collapseDom.style.paddingTop = `${collapseTop}px`; // collapseDom.style.paddingBottom = `${collapseBottom}px`; // this.collapseDom = collapseDom; // // const collapseUpDom = document.createElement('div'); // collapseUpDom.className = 'legend-collapse-btn collapse-up'; // this.collapseUpDom = collapseUpDom; // const collapseDownDom = document.createElement('div'); // collapseDownDom.className = 'legend-collapse-btn collapse-down'; // this.collapseDownDom = collapseDownDom; // collapseDom.appendChild(collapseUpDom); // collapseDom.appendChild(collapseDownDom); // // collapseDom.addEventListener('click', this.handleClick); // collapseDom.addEventListener('mousemove', noopEvent); // collapseDom.addEventListener('mouseout', noopEvent); // // this.minOffset = -(wrapperHeight / itemHeight) + collapseRow; // this.maxOffset = 0; // } // // handleClick(e) { // e.stopPropagation(); // // if (!e.target.classList.contains('legend-collapse-btn') || e.target.classList.contains('disable')) { // return; // } // // let { moveOffset } = this; // // // 上一页 // if (e.target.classList.contains('collapse-up')) { // moveOffset += 1; // } // // 下一页 // if (e.target.classList.contains('collapse-down')) { // moveOffset -= 1; // } // // this.moveOffset = moveOffset; // // this.renderState(); // } // // renderState() { // const { itemHeight } = this.config; // // this.collapseUpDom.classList.remove('disable'); // this.collapseDownDom.classList.remove('disable'); // // // 不能向下 // if (this.moveOffset <= this.minOffset) { // this.moveOffset = this.minOffset; // this.collapseDownDom.classList.add('disable'); // } // // // 不能向上 // if (this.moveOffset >= this.maxOffset) { // this.moveOffset = this.maxOffset; // this.collapseUpDom.classList.add('disable'); // } // // this.listDom.style.transform = `translate(0, ${this.moveOffset * itemHeight}px)`; // } // // start({ collapseRow: newCollapseRow }) { // const { itemHeight, collapseRow: oldCollapseRow } = this.config; // const collapseRow = newCollapseRow || oldCollapseRow; // // this.dom.classList.add('has-collapse'); // // // 展示时重新获取高度 // // 修复因样式变化导致滚动范围改变所引起的问题。 // this.config.wrapperHeight = this.dom.offsetHeight; // this.minOffset = -(this.config.wrapperHeight / itemHeight) + collapseRow; // // this.dom.style.maxHeight = `${itemHeight * collapseRow}px`; // this.dom.appendChild(this.collapseDom); // // this.renderState(); // } // // end() { // this.dom.classList.remove('has-collapse'); // this.dom.style.maxHeight = ''; // // dom被g2重新渲染后已经不在原来的树中,需要额外判断 // if (this.collapseDom.parentNode === this.dom) { // this.dom.removeChild(this.collapseDom); // } // } // // destroy() { // this.end(); // // this.collapseDom.removeEventListener('click', this.handleClick); // this.collapseDom.removeEventListener('mousemove', noopEvent); // this.collapseDom.removeEventListener('mouseout', noopEvent); // } // } // // function noopEvent(e) { // e.stopPropagation(); // }
the_stack
declare global { const device: Detox.Device; const detox: Detox.Detox; const element: Detox.Element; const waitFor: Detox.WaitFor; const expect: Detox.Expect<Detox.Expect<Promise<void>>>; const by: Detox.Matchers; namespace Detox { interface Detox { /** * The setup phase happens inside detox.init(). This is the phase where detox reads its configuration, starts a server, loads its expection library and starts a simulator * @param config * @param options * @example const config = require('../package.json').detox; * * before(async () => { * await detox.init(config); * }); */ init(config: any, options?: DetoxInitOptions): Promise<void>; /** * Artifacts currently include only logs from the app process before each task * @param args */ beforeEach(...args: any[]): Promise<void>; /** * Artifacts currently include only logs from the app process after each task * @param args */ afterEach(...args: any[]): Promise<void>; /** * The cleanup phase should happen after all the tests have finished. This is the phase where detox-server shuts down. * @example after(async () => { * await detox.cleanup(); * }); */ cleanup(): Promise<void>; } // Detox exports all methods from detox global and all of the global constants. interface DetoxExport extends Detox { device: Device; element: Element; waitFor: WaitFor; expect: Expect<Expect<Promise<void>>>; by: Matchers; } interface Device { /** * Launch the app * @param config * @example // Terminate the app and launch it again. If set to false, the simulator will try to bring app from background, * // if the app isn't running, it will launch a new instance. default is false * await device.launchApp({newInstance: true}); * // Grant or deny runtime permissions for your application. * await device.launchApp({permissions: {calendar: 'YES'}}); * // Mock opening the app from URL to test your app's deep link handling mechanism. * await device.launchApp({url: url}); */ launchApp(config: DeviceLanchAppConfig): Promise<void>; /** * By default, terminateApp() with no params will terminate the app * To terminate another app, specify its bundle id * @param bundle * @example await device.terminateApp('other.bundle.id'); */ terminateApp(bundle?: string): Promise<void>; /** * Send application to background by bringing com.apple.springboard to the foreground. * Combining sendToHome() with launchApp({newInstance: false}) will simulate app coming back from background. * @example await device.sendToHome(); * await device.launchApp({newInstance: false}); */ sendToHome(): Promise<void>; /** * If this is a React Native app, reload the React Native JS bundle. This action is much faster than device.launchApp(), and can be used if you just need to reset your React Native logic. * @example await device.reloadReactNative() */ reloadReactNative(): Promise<void>; /** * By default, installApp() with no params will install the app file defined in the current configuration. * To install another app, specify its path * @param path * @example await device.installApp('path/to/other/app'); */ installApp(path?: any): Promise<void>; /** * By default, uninstallApp() with no params will uninstall the app defined in the current configuration. * To uninstall another app, specify its bundle id * @param bundle * @example await device.installApp('other.bundle.id'); */ uninstallApp(bundle?: string): Promise<void>; /** * Mock opening the app from URL. sourceApp is an optional parameter to specify source application bundle id. * @param url */ openURL(url: { url: string; sourceApp?: string }): Promise<void>; /** * Mock handling of received user notification when app is in foreground. * @param params */ sendUserNotification(...params: any[]): Promise<void>; /** * Mock handling of received user activity when app is in foreground. * @param params */ sendUserActivity(...params: any[]): Promise<void>; /** * Takes "portrait" or "landscape" and rotates the device to the given orientation. Currently only available in the iOS Simulator. * @param orientation */ setOrientation(orientation: Orientation): Promise<void>; /** * Note: setLocation is dependent on fbsimctl. if fbsimctl is not installed, the command will fail, it must be installed. Sets the simulator location to the given latitude and longitude. * @param lat * @param lon * @example await device.setLocation(32.0853, 34.7818); */ setLocation(lat: number, lon: number): Promise<void>; /** * Disable EarlGrey's network synchronization mechanism on preffered endpoints. Usful if you want to on skip over synchronizing on certain URLs. * @param urls * @example await device.setURLBlacklist(['.*127.0.0.1.*']); */ setURLBlacklist(urls: string[]): Promise<void>; /** * Enable EarlGrey's synchronization mechanism (enabled by default). This is being reset on every new instance of the app. * @example * await device.enableSynchronization(); */ enableSynchronization(): Promise<void>; /** * Disable EarlGrey's synchronization mechanism (enabled by default) This is being reset on every new instance of the app. * @example * await device.disableSynchronization(); */ disableSynchronization(): Promise<void>; /** * Resets the Simulator to clean state (like the Simulator > Reset Content and Settings... menu item), especially removing previously set permissions. * @example * await device.resetContentAndSettings(); */ resetContentAndSettings(): Promise<void>; /** * Returns the current device, ios or android. * @example * if (device.getPlatform() === 'ios') { * await expect(loopSwitch).toHaveValue('1'); * } */ getPlatform(): 'ios' | 'android'; /** * Takes a screenshot on the device and schedules putting it to the artifacts folder upon completion of the current test. * @param text * @example * await device.takeScreenshot('tap on menu'); * * • If the test passes, the screenshot will be put to <artifacts-location>/✓ Menu items should have Logout/tap on menu.png. * • If the test fails, the screenshot will be put to <artifacts-location>/✗ Menu items should have Logout/tap on menu.png. * * > NOTE: At the moment, taking screenshots on-demand in --take-screenshots failing mode is not yet implemented. */ takeScreenshot(name: string): Promise<void>; /** * Simulate shake (iOS Only) */ shake(): Promise<void>; /** * Toggles device enrollment in biometric auth (TouchID or FaceID) (iOS Only) * @example * await device.setBiometricEnrollment(true); * // or * await device.setBiometricEnrollment(false); */ setBiometricEnrollment(enabled: true): Promise<void>; /** * Simulates the success of a face match via FaceID (iOS Only) */ matchFace(): Promise<void>; /** * Simulates the failure of a face match via FaceID (iOS Only) */ unmatchFace(): Promise<void>; /** * Simulates the success of a finger match via TouchID (iOS Only) */ matchFinger(): Promise<void>; /** * Simulates the failure of a finger match via TouchID (iOS Only) */ unmatchFinger(): Promise<void>; /** * Clears the simulator keychain (iOS Only) */ clearKeychain(): Promise<void>; /** * Simulate press back button (Android Only) * @example * await device.pressBack(); */ pressBack(): Promise<void>; /** * (Android Only) * Exposes UiAutomator's UiDevice API (https://developer.android.com/reference/android/support/test/uiautomator/UiDevice). * This is not a part of the official Detox API, * it may break and change whenever an update to UiDevice or UiAutomator gradle dependencies ('androidx.test.uiautomator:uiautomator') is introduced. * UIDevice's autogenerated code reference: https://github.com/wix/Detox/blob/master/detox/src/android/espressoapi/UIDevice.js */ getUiDevice(): Promise<void>; } type DetoxAny = Element & Actions<any> & WaitFor; interface Element { (by: Matchers): DetoxAny; /** * Choose from multiple elements matching the same matcher using index * @param index * @example await element(by.text('Product')).atIndex(2); */ atIndex(index: number): DetoxAny; } interface Matchers { (by: Matchers): Matchers; /** * by.id will match an id that is given to the view via testID prop. * @param id * @example // In a React Native component add testID like so: * <TouchableOpacity testID={'tap_me'}> * // Then match with by.id: * await element(by.id('tap_me')); */ id(id: string): Matchers; /** * Find an element by text, useful for text fields, buttons. * @param text * @example await element(by.text('Tap Me')); */ text(text: string): Matchers; /** * Find an element by accessibilityLabel on iOS, or by contentDescription on Android. * @param label * @example await element(by.label('Welcome')); */ label(label: string): Matchers; /** * Find an element by native view type. * @param nativeViewType * @example await element(by.type('RCTImageView')); */ type(nativeViewType: string): Matchers; /** * Find an element with an accessibility trait. (iOS only) * @example await element(by.traits(['button'])); */ traits(traits: string[]): Matchers; /** * Find an element by a matcher with a parent matcher * @param parentBy * @example await element(by.id('Grandson883').withAncestor(by.id('Son883'))); */ withAncestor(parentBy: Matchers): Matchers; /** * Find an element by a matcher with a child matcher * @param childBy * @example await element(by.id('Son883').withDescendant(by.id('Grandson883'))); */ withDescendant(childBy: Matchers): Matchers; /** * Find an element by multiple matchers * @param by * @example await element(by.text('Product').and(by.id('product_name')); */ and(by: Matchers): Matchers; } interface Expect<R> { (element: Element): Expect<Promise<void>>; /** * Expect the view to be at least 75% visible. * @example await expect(element(by.id('UniqueId204'))).toBeVisible(); */ toBeVisible(): R; /** * Expect the view to not be visible. * @example await expect(element(by.id('UniqueId205'))).toBeNotVisible(); */ toBeNotVisible(): R; /** * Expect the view to exist in the UI hierarchy. * @example await expect(element(by.id('UniqueId205'))).toExist(); */ toExist(): R; /** * Expect the view to not exist in the UI hierarchy. * @example await expect(element(by.id('RandomJunk959'))).toNotExist(); */ toNotExist(): R; /** * In React Native apps, expect UI component of type <Text> to have text. * In native iOS apps, expect UI elements of type UIButton, UILabel, UITextField or UITextViewIn to have inputText with text. * @param text * @example await expect(element(by.id('UniqueId204'))).toHaveText('I contain some text'); */ toHaveText(text: string): R; /** * It searches by accessibilityLabel on iOS, or by contentDescription on Android. * In React Native it can be set for both platforms by defining an accessibilityLabel on the view. * @param label * @example await expect(element(by.id('UniqueId204'))).toHaveLabel('Done'); */ toHaveLabel(label: string): R; /** * In React Native apps, expect UI component to have testID with that id. * In native iOS apps, expect UI element to have accesibilityIdentifier with that id. * @param id * @example await expect(element(by.text('I contain some text'))).toHaveId('UniqueId204'); */ toHaveId(id: string): R; /** * Expect components like a Switch to have a value ('0' for off, '1' for on). * @param value * @example await expect(element(by.id('UniqueId533'))).toHaveValue('0'); */ toHaveValue(value: any): R; } interface WaitFor { /** * This API polls using the given expectation continuously until the expectation is met. Use manual synchronization with waitFor only as a last resort. * NOTE: Every waitFor call must set a timeout using withTimeout(). Calling waitFor without setting a timeout will do nothing. * @example await waitFor(element(by.id('UniqueId336'))).toExist().withTimeout(2000); */ (element: Element): Expect<WaitFor>; /** * Waits for the condition to be met until the specified time (millis) have elapsed. * @param millis number * @example await waitFor(element(by.id('UniqueId336'))).toExist().withTimeout(2000); */ withTimeout(millis: number): Promise<void>; /** * Performs the action repeatedly on the element until an expectation is met * @param by * @example await waitFor(element(by.text('Text5'))).toBeVisible().whileElement(by.id('ScrollView630')).scroll(50, 'down'); */ whileElement(by: Matchers): DetoxAny; } interface Actions<R> { /** * Simulate tap on an element * @example * await element(by.id('tappable')).tap(); */ tap(): Promise<Actions<R>>; /** * Simulate long press on an element * @example * await element(by.id('tappable')).longPress(); */ longPress(): Promise<Actions<R>>; /** * Simulate multiple taps on an element. * @param times number of times to tap * @example * await element(by.id('tappable')).multiTap(3); */ multiTap(times: number): Promise<Actions<R>>; /** * Simulate tap at a specific point on an element. * Note: The point coordinates are relative to the matched element and the element size could changes on different devices or even when changing the device font size. * @param point * @example * await element(by.id('tappable')).tapAtPoint({ x:5, y:10 }); */ tapAtPoint(point: { x: number; y: number }): Promise<Actions<R>>; /** * Use the builtin keyboard to type text into a text field. * @param text * @example * await element(by.id('textField')).typeText('passcode'); */ typeText(text: string): Promise<Actions<R>>; /** * Paste text into a text field. * @param text * @example * await element(by.id('textField')).replaceText('passcode again'); */ replaceText(text: string): Promise<Actions<R>>; /** * Clear text from a text field. * @example * await element(by.id('textField')).clearText(); */ clearText(): Promise<Actions<R>>; /** * Taps the backspace key on the built-in keyboard. * @example * await element(by.id('textField')).tapBackspaceKey(); */ tapBackspaceKey(): Promise<Actions<R>>; /** * Taps the return key on the built-in keyboard. * @example * await element(by.id('textField')).tapReturnKey(); */ tapReturnKey(): Promise<Actions<R>>; /** * Scrolls a given amount of pixels in the provided direction, starting from the provided start positions. * @param pixels - independent device pixels * @param direction - left/right/up/down * @param @optional startPositionX - the X starting scroll position, in percentage; valid input: `[0.0, 1.0]`, `NaN`; default: `NaN`—choose the best value automatically * @param @optional startPositionY - the Y starting scroll position, in percentage; valid input: `[0.0, 1.0]`, `NaN`; default: `NaN`—choose the best value automatically * @example * await element(by.id('scrollView')).scroll(100, 'down', NaN, 0.85); * await element(by.id('scrollView')).scroll(100, 'up'); */ scroll( pixels: number, direction: Direction, startPositionX?: number, startPositionY?: number, ): Promise<Actions<R>>; /** * Scroll to edge. * @param edge * @example * await element(by.id('scrollView')).scrollTo('bottom'); * await element(by.id('scrollView')).scrollTo('top'); */ scrollTo(edge: Direction): Promise<Actions<R>>; /** * Swipes in the provided direction at the provided speed, started from percentage. * @param direction * @param speed default: `fast` * @param @optional percentage screen percentage to swipe; valid input: `[0.0, 1.0]` * @example * await element(by.id('scrollView')).swipe('down'); * await element(by.id('scrollView')).swipe('down', 'fast'); * await element(by.id('scrollView')).swipe('down', 'fast', 0.5); */ swipe(direction: Direction, speed?: Speed, percentage?: number): Promise<Actions<R>>; /** * Sets a picker view’s column to the given value. This function supports both date pickers and general picker views. (iOS Only) * @param column number of datepicker column (starts from 0) * @param value string value in setted column (must be correct) * @example a * wait expect(element(by.type('UIPickerView'))).toBeVisible(); * await element(by.type('UIPickerView')).setColumnToValue(1,"6"); * await element(by.type('UIPickerView')).setColumnToValue(2,"34"); * * > Note: When working with date pickers, you should always set an explicit locale when launching your app in order to prevent flakiness from different date and time styles. * See [here](https://github.com/wix/Detox/blob/master/docs/APIRef.DeviceObjectAPI.md#9-launch-with-a-specific-language-ios-only) for more information. */ setColumnToValue(column: number, value: string): Promise<Actions<R>>; /** * Sets the date of a date picker to a date generated from the provided string and date format. (iOS only) * @param dateString string representing a date in the supplied `dateFormat` * @param dateFormat format for the `dateString` supplied * @example * await expect(element(by.id('datePicker'))).toBeVisible(); * await element(by.id('datePicker')).setDatePickerDate('2019-02-06T05:10:00-08:00', "yyyy-MM-dd'T'HH:mm:ssZZZZZ"); */ setDatePickerDate(dateString: string, dateFormat: string): Promise<Actions<R>>; /** * Pinches in the given direction with speed and angle. (iOS only) * @param direction * @param speed * @param angle value in radiant, default is `0` * @example * await expect(element(by.id('PinchableScrollView'))).toBeVisible(); * await element(by.id('PinchableScrollView')).pinchWithAngle('outward', 'slow', 0); */ pinchWithAngle(direction: Direction, speed: Speed, angle: number): Promise<Actions<R>>; } type Direction = 'left' | 'right' | 'top' | 'bottom' | 'up' | 'down'; type Orientation = 'portrait' | 'landscape'; type Speed = 'fast' | 'slow'; interface LanguageAndLocale { language?: string; locale?: string; } interface DetoxInitOptions { /** * Detox exports device, expect, element, by and waitFor as globals by default, if you want to control their initialization manually, set init detox with initGlobals set to false. * This is useful when during E2E tests you also need to run regular expectations in node. jest Expect for instance, will not be overriden by Detox when this option is used. */ initGlobals?: boolean; /** * By default await detox.init(config); will launch the installed app. If you wish to control when your app is launched, add {launchApp: false} param to your init. */ launchApp?: boolean; /** * By default await detox.init(config); will uninstall and install the app. If you wish to reuse the existing app for a faster run, add {reuse: true} param to your init. */ reuse?: boolean; } /** * Source for string definitions is https://github.com/wix/AppleSimulatorUtils */ interface DevicePermissions { location?: LocationPermission; notifications?: NotificationsPermission; calendar?: CalendarPermission; camera?: CameraPermission; contacts?: ContactsPermission; health?: HealthPermission; homekit?: HomekitPermission; medialibrary?: MediaLibraryPermission; microphone?: MicrophonePermission; motion?: MotionPermission; photos?: PhotosPermission; reminders?: RemindersPermission; siri?: SiriPermission; speech?: SpeechPermission; } type LocationPermission = 'always' | 'inuse' | 'never' | 'unset'; type PermissionState = 'YES' | 'NO' | 'unset'; type CameraPermission = PermissionState; type ContactsPermission = PermissionState; type CalendarPermission = PermissionState; type HealthPermission = PermissionState; type HomekitPermission = PermissionState; type MediaLibraryPermission = PermissionState; type MicrophonePermission = PermissionState; type MotionPermission = PermissionState; type PhotosPermission = PermissionState; type RemindersPermission = PermissionState; type SiriPermission = PermissionState; type SpeechPermission = PermissionState; type NotificationsPermission = PermissionState; interface DeviceLanchAppConfig { /** * Restart the app * Terminate the app and launch it again. If set to false, the simulator will try to bring app from background, if the app isn't running, it will launch a new instance. default is false */ newInstance?: boolean; /** * Set runtime permissions * Grant or deny runtime permissions for your application. */ permissions?: DevicePermissions; /** * Launch from URL * Mock opening the app from URL to test your app's deep link handling mechanism. */ url?: any; /** * Launch with user notifications */ userNotification?: any; /** * Launch with user activity */ userActivity?: any; /** * Launch into a fresh installation * A flag that enables relaunching into a fresh installation of the app (it will uninstall and install the binary again), default is false. */ delete?: boolean; /** * Detox can start the app with additional launch arguments * The added launchArgs will be passed through the launch command to the device and be accessible via [[NSProcessInfo processInfo] arguments] */ launchArgs?: any; /** * Launch config for specifying the native language and locale */ languageAndLocale?: LanguageAndLocale; } } } declare const detoxExport: Detox.DetoxExport; export = detoxExport;
the_stack
import * as cdk from '@aws-cdk/core'; import * as cfn_parse from '@aws-cdk/core/lib/cfn-parse'; /** * Properties for defining a `AWS::SQS::Queue`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html * @external */ export interface CfnQueueProps { /** * `AWS::SQS::Queue.ContentBasedDeduplication`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-contentbaseddeduplication * @external */ readonly contentBasedDeduplication?: boolean | cdk.IResolvable; /** * `AWS::SQS::Queue.DelaySeconds`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-delayseconds * @external */ readonly delaySeconds?: number; /** * `AWS::SQS::Queue.FifoQueue`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-fifoqueue * @external */ readonly fifoQueue?: boolean | cdk.IResolvable; /** * `AWS::SQS::Queue.KmsDataKeyReusePeriodSeconds`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-kmsdatakeyreuseperiodseconds * @external */ readonly kmsDataKeyReusePeriodSeconds?: number; /** * `AWS::SQS::Queue.KmsMasterKeyId`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-kmsmasterkeyid * @external */ readonly kmsMasterKeyId?: string; /** * `AWS::SQS::Queue.MaximumMessageSize`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-maxmesgsize * @external */ readonly maximumMessageSize?: number; /** * `AWS::SQS::Queue.MessageRetentionPeriod`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-msgretentionperiod * @external */ readonly messageRetentionPeriod?: number; /** * `AWS::SQS::Queue.QueueName`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-name * @external */ readonly queueName?: string; /** * `AWS::SQS::Queue.ReceiveMessageWaitTimeSeconds`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-receivemsgwaittime * @external */ readonly receiveMessageWaitTimeSeconds?: number; /** * `AWS::SQS::Queue.RedrivePolicy`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-redrive * @external */ readonly redrivePolicy?: any | cdk.IResolvable; /** * `AWS::SQS::Queue.Tags`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#cfn-sqs-queue-tags * @external */ readonly tags?: cdk.CfnTag[]; /** * `AWS::SQS::Queue.VisibilityTimeout`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-visiblitytimeout * @external */ readonly visibilityTimeout?: number; } /** * A CloudFormation `AWS::SQS::Queue`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html * @external * @cloudformationResource AWS::SQS::Queue */ export declare class CfnQueue extends cdk.CfnResource implements cdk.IInspectable { /** * The CloudFormation resource type name for this resource class. * * @external */ static readonly CFN_RESOURCE_TYPE_NAME = "AWS::SQS::Queue"; /** * A factory method that creates a new instance of this class from an object * containing the CloudFormation properties of this resource. * Used in the @aws-cdk/cloudformation-include module. * * @internal */ static _fromCloudFormation(scope: cdk.Construct, id: string, resourceAttributes: any, options: cfn_parse.FromCloudFormationOptions): CfnQueue; /** * @external * @cloudformationAttribute Arn */ readonly attrArn: string; /** * @external * @cloudformationAttribute QueueName */ readonly attrQueueName: string; /** * `AWS::SQS::Queue.ContentBasedDeduplication`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-contentbaseddeduplication * @external */ contentBasedDeduplication: boolean | cdk.IResolvable | undefined; /** * `AWS::SQS::Queue.DelaySeconds`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-delayseconds * @external */ delaySeconds: number | undefined; /** * `AWS::SQS::Queue.FifoQueue`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-fifoqueue * @external */ fifoQueue: boolean | cdk.IResolvable | undefined; /** * `AWS::SQS::Queue.KmsDataKeyReusePeriodSeconds`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-kmsdatakeyreuseperiodseconds * @external */ kmsDataKeyReusePeriodSeconds: number | undefined; /** * `AWS::SQS::Queue.KmsMasterKeyId`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-kmsmasterkeyid * @external */ kmsMasterKeyId: string | undefined; /** * `AWS::SQS::Queue.MaximumMessageSize`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-maxmesgsize * @external */ maximumMessageSize: number | undefined; /** * `AWS::SQS::Queue.MessageRetentionPeriod`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-msgretentionperiod * @external */ messageRetentionPeriod: number | undefined; /** * `AWS::SQS::Queue.QueueName`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-name * @external */ queueName: string | undefined; /** * `AWS::SQS::Queue.ReceiveMessageWaitTimeSeconds`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-receivemsgwaittime * @external */ receiveMessageWaitTimeSeconds: number | undefined; /** * `AWS::SQS::Queue.RedrivePolicy`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-redrive * @external */ redrivePolicy: any | cdk.IResolvable | undefined; /** * `AWS::SQS::Queue.Tags`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#cfn-sqs-queue-tags * @external */ readonly tags: cdk.TagManager; /** * `AWS::SQS::Queue.VisibilityTimeout`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-queues.html#aws-sqs-queue-visiblitytimeout * @external */ visibilityTimeout: number | undefined; /** * Create a new `AWS::SQS::Queue`. * * @param scope - scope in which this resource is defined. * @param id - scoped id of the resource. * @param props - resource properties. * @external */ constructor(scope: cdk.Construct, id: string, props?: CfnQueueProps); /** * (experimental) Examines the CloudFormation resource and discloses attributes. * * @param inspector - tree inspector to collect and process attributes. * @experimental */ inspect(inspector: cdk.TreeInspector): void; /** * @external */ protected get cfnProperties(): { [key: string]: any; }; /** * @external */ protected renderProperties(props: { [key: string]: any; }): { [key: string]: any; }; } /** * Properties for defining a `AWS::SQS::QueuePolicy`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-policy.html * @external */ export interface CfnQueuePolicyProps { /** * `AWS::SQS::QueuePolicy.PolicyDocument`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-policy.html#cfn-sqs-queuepolicy-policydoc * @external */ readonly policyDocument: any | cdk.IResolvable; /** * `AWS::SQS::QueuePolicy.Queues`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-policy.html#cfn-sqs-queuepolicy-queues * @external */ readonly queues: string[]; } /** * A CloudFormation `AWS::SQS::QueuePolicy`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-policy.html * @external * @cloudformationResource AWS::SQS::QueuePolicy */ export declare class CfnQueuePolicy extends cdk.CfnResource implements cdk.IInspectable { /** * The CloudFormation resource type name for this resource class. * * @external */ static readonly CFN_RESOURCE_TYPE_NAME = "AWS::SQS::QueuePolicy"; /** * A factory method that creates a new instance of this class from an object * containing the CloudFormation properties of this resource. * Used in the @aws-cdk/cloudformation-include module. * * @internal */ static _fromCloudFormation(scope: cdk.Construct, id: string, resourceAttributes: any, options: cfn_parse.FromCloudFormationOptions): CfnQueuePolicy; /** * `AWS::SQS::QueuePolicy.PolicyDocument`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-policy.html#cfn-sqs-queuepolicy-policydoc * @external */ policyDocument: any | cdk.IResolvable; /** * `AWS::SQS::QueuePolicy.Queues`. * * @see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-sqs-policy.html#cfn-sqs-queuepolicy-queues * @external */ queues: string[]; /** * Create a new `AWS::SQS::QueuePolicy`. * * @param scope - scope in which this resource is defined. * @param id - scoped id of the resource. * @param props - resource properties. * @external */ constructor(scope: cdk.Construct, id: string, props: CfnQueuePolicyProps); /** * (experimental) Examines the CloudFormation resource and discloses attributes. * * @param inspector - tree inspector to collect and process attributes. * @experimental */ inspect(inspector: cdk.TreeInspector): void; /** * @external */ protected get cfnProperties(): { [key: string]: any; }; /** * @external */ protected renderProperties(props: { [key: string]: any; }): { [key: string]: any; }; }
the_stack
import React from "react"; import { pubkeyToString } from "utils"; import { PublicKey, Connection, StakeActivationData } from "@solana/web3.js"; import { useCluster, Cluster } from "../cluster"; import { HistoryProvider } from "./history"; import { TokensProvider } from "./tokens"; import { create } from "superstruct"; import { ParsedInfo } from "validators"; import { StakeAccount } from "validators/accounts/stake"; import { TokenAccount, MintAccountInfo, TokenAccountInfo, } from "validators/accounts/token"; import * as Cache from "providers/cache"; import { ActionType, FetchStatus } from "providers/cache"; import { reportError } from "utils/sentry"; import { VoteAccount } from "validators/accounts/vote"; import { NonceAccount } from "validators/accounts/nonce"; import { SysvarAccount } from "validators/accounts/sysvar"; import { ConfigAccount } from "validators/accounts/config"; import { FlaggedAccountsProvider } from "./flagged-accounts"; import { ProgramDataAccount, ProgramDataAccountInfo, UpgradeableLoaderAccount, } from "validators/accounts/upgradeable-program"; import { RewardsProvider } from "./rewards"; import { programs, MetadataJson } from "@metaplex/js"; import getEditionInfo, { EditionInfo } from "./utils/getEditionInfo"; export { useAccountHistory } from "./history"; const Metadata = programs.metadata.Metadata; export type StakeProgramData = { program: "stake"; parsed: StakeAccount; activation?: StakeActivationData; }; export type UpgradeableLoaderAccountData = { program: "bpf-upgradeable-loader"; parsed: UpgradeableLoaderAccount; programData?: ProgramDataAccountInfo; }; export type NFTData = { metadata: programs.metadata.MetadataData; json: MetadataJson | undefined; editionInfo: EditionInfo; }; export type TokenProgramData = { program: "spl-token"; parsed: TokenAccount; nftData?: NFTData; }; export type VoteProgramData = { program: "vote"; parsed: VoteAccount; }; export type NonceProgramData = { program: "nonce"; parsed: NonceAccount; }; export type SysvarProgramData = { program: "sysvar"; parsed: SysvarAccount; }; export type ConfigProgramData = { program: "config"; parsed: ConfigAccount; }; export type ProgramData = | UpgradeableLoaderAccountData | StakeProgramData | TokenProgramData | VoteProgramData | NonceProgramData | SysvarProgramData | ConfigProgramData; export interface Details { executable: boolean; owner: PublicKey; space: number; data?: ProgramData; } export interface Account { pubkey: PublicKey; lamports: number; details?: Details; } type State = Cache.State<Account>; type Dispatch = Cache.Dispatch<Account>; const StateContext = React.createContext<State | undefined>(undefined); const DispatchContext = React.createContext<Dispatch | undefined>(undefined); type AccountsProviderProps = { children: React.ReactNode }; export function AccountsProvider({ children }: AccountsProviderProps) { const { url } = useCluster(); const [state, dispatch] = Cache.useReducer<Account>(url); // Clear accounts cache whenever cluster is changed React.useEffect(() => { dispatch({ type: ActionType.Clear, url }); }, [dispatch, url]); return ( <StateContext.Provider value={state}> <DispatchContext.Provider value={dispatch}> <TokensProvider> <HistoryProvider> <RewardsProvider> <FlaggedAccountsProvider>{children}</FlaggedAccountsProvider> </RewardsProvider> </HistoryProvider> </TokensProvider> </DispatchContext.Provider> </StateContext.Provider> ); } async function fetchAccountInfo( dispatch: Dispatch, pubkey: PublicKey, cluster: Cluster, url: string ) { dispatch({ type: ActionType.Update, key: pubkey.toBase58(), status: Cache.FetchStatus.Fetching, url, }); let data; let fetchStatus; try { const connection = new Connection(url, "confirmed"); const result = (await connection.getParsedAccountInfo(pubkey)).value; let lamports, details; if (result === null) { lamports = 0; } else { lamports = result.lamports; // Only save data in memory if we can decode it let space: number; if (!("parsed" in result.data)) { space = result.data.length; } else { space = result.data.space; } let data: ProgramData | undefined; if ("parsed" in result.data) { try { const info = create(result.data.parsed, ParsedInfo); switch (result.data.program) { case "bpf-upgradeable-loader": { const parsed = create(info, UpgradeableLoaderAccount); // Fetch program data to get program upgradeability info let programData: ProgramDataAccountInfo | undefined; if (parsed.type === "program") { const result = ( await connection.getParsedAccountInfo(parsed.info.programData) ).value; if ( result && "parsed" in result.data && result.data.program === "bpf-upgradeable-loader" ) { const info = create(result.data.parsed, ParsedInfo); programData = create(info, ProgramDataAccount).info; } else { throw new Error( `invalid program data account for program: ${pubkey.toBase58()}` ); } } data = { program: result.data.program, parsed, programData, }; break; } case "stake": { const parsed = create(info, StakeAccount); const isDelegated = parsed.type === "delegated"; const activation = isDelegated ? await connection.getStakeActivation(pubkey) : undefined; data = { program: result.data.program, parsed, activation, }; break; } case "vote": data = { program: result.data.program, parsed: create(info, VoteAccount), }; break; case "nonce": data = { program: result.data.program, parsed: create(info, NonceAccount), }; break; case "sysvar": data = { program: result.data.program, parsed: create(info, SysvarAccount), }; break; case "config": data = { program: result.data.program, parsed: create(info, ConfigAccount), }; break; case "spl-token": const parsed = create(info, TokenAccount); let nftData; try { // Generate a PDA and check for a Metadata Account if (parsed.type === "mint") { const metadata = await Metadata.load( connection, await Metadata.getPDA(pubkey) ); if (metadata) { // We have a valid Metadata account. Try and pull edition data. const editionInfo = await getEditionInfo( metadata, connection ); const id = pubkeyToString(pubkey); const metadataJSON = await getMetaDataJSON( id, metadata.data ); nftData = { metadata: metadata.data, json: metadataJSON, editionInfo, }; } } } catch (error) { // unable to find NFT metadata account } data = { program: result.data.program, parsed, nftData, }; break; default: data = undefined; } } catch (error) { reportError(error, { url, address: pubkey.toBase58() }); } } details = { space, executable: result.executable, owner: result.owner, data, }; } data = { pubkey, lamports, details }; fetchStatus = FetchStatus.Fetched; } catch (error) { if (cluster !== Cluster.Custom) { reportError(error, { url }); } fetchStatus = FetchStatus.FetchFailed; } dispatch({ type: ActionType.Update, status: fetchStatus, data, key: pubkey.toBase58(), url, }); } const getMetaDataJSON = async ( id: string, metadata: programs.metadata.MetadataData ): Promise<MetadataJson | undefined> => { return new Promise(async (resolve, reject) => { const uri = metadata.data.uri; if (!uri) return resolve(undefined); const processJson = (extended: any) => { if (!extended || extended?.properties?.files?.length === 0) { return; } if (extended?.image) { extended.image = extended.image.startsWith("http") ? extended.image : `${metadata.data.uri}/${extended.image}`; } return extended; }; try { fetch(uri) .then(async (_) => { try { const data = await _.json(); try { localStorage.setItem(uri, JSON.stringify(data)); } catch { // ignore } resolve(processJson(data)); } catch { resolve(undefined); } }) .catch(() => { resolve(undefined); }); } catch (ex) { console.error(ex); resolve(undefined); } }); }; export function useAccounts() { const context = React.useContext(StateContext); if (!context) { throw new Error(`useAccounts must be used within a AccountsProvider`); } return context.entries; } export function useAccountInfo( address: string | undefined ): Cache.CacheEntry<Account> | undefined { const context = React.useContext(StateContext); if (!context) { throw new Error(`useAccountInfo must be used within a AccountsProvider`); } if (address === undefined) return; return context.entries[address]; } export function useMintAccountInfo( address: string | undefined ): MintAccountInfo | undefined { const accountInfo = useAccountInfo(address); return React.useMemo(() => { if (address === undefined) return; try { const data = accountInfo?.data?.details?.data; if (!data) return; if (data.program !== "spl-token" || data.parsed.type !== "mint") { return; } return create(data.parsed.info, MintAccountInfo); } catch (err) { reportError(err, { address }); } }, [address, accountInfo]); } export function useTokenAccountInfo( address: string | undefined ): TokenAccountInfo | undefined { const accountInfo = useAccountInfo(address); if (address === undefined) return; try { const data = accountInfo?.data?.details?.data; if (!data) return; if (data.program !== "spl-token" || data.parsed.type !== "account") { return; } return create(data.parsed.info, TokenAccountInfo); } catch (err) { reportError(err, { address }); } } export function useFetchAccountInfo() { const dispatch = React.useContext(DispatchContext); if (!dispatch) { throw new Error( `useFetchAccountInfo must be used within a AccountsProvider` ); } const { cluster, url } = useCluster(); return React.useCallback( (pubkey: PublicKey) => { fetchAccountInfo(dispatch, pubkey, cluster, url); }, [dispatch, cluster, url] ); }
the_stack
import { ReactPeg } from "react-peg"; import { _, Text } from "../common"; import { String, IBasicType, BasicType, ITypeReference, TypeReference, IIdentifier, Identifier, IArrayType, ArrayType, ITupleType, TupleType, RestType, Source } from "../basic"; import { ITypeForInStatement, ITypeIfStatement, TypeForInStatement, TypeIfStatement } from "../statement"; import { FunctionType, IFunctionType } from "../function"; export type ITypeExpression = | IBasicType | ITypeReference | ITypeCallExpression | IInferType | IConditionalTypeExpression | IMappedTypeExpression | IUnionType | IIntersectionType | IOperatorType | IIndexType | IFunctionType | IParenthesizedType | IContextType export function TypeExpression() { return ( <or> <ContextType /> <ParenthesizedType /> <FunctionType /> <OperatorType /> <IndexType /> <UnionType /> <IntersectionType /> <ConditionalTypeExpression /> <MappedTypeExpression /> <TypeCallExpression /> <BasicType /> <InferType /> <TypeReference /> </or> ) } export interface IParenthesizedType { kind: "ParenthesizedType" param: ITypeExpression } export function ParenthesizedType() { const action = ({ param }): IParenthesizedType => { return { kind: "ParenthesizedType", param } } return ( <pattern action={action}> {Text("(")} <TypeExpression label="param" /> {Text(")")} </pattern> ) } export interface ITypeArrowFunctionExpression { kind: "TypeArrowFunctionExpression" params: IParamList body: IConditionalTypeExpression } /** * TODO: add more expressions? use block statement? */ export function TypeArrowFunctionExpression() { const action = ({ params, body }) => { return { kind: "TypeArrowFunctionExpression", params, body } } return ( <pattern action={action}> {Text("(")} <ParamList label="params" /> {Text(")")} {Text("=>")} {/* <or label="body"> */} <TypeExpression label="body" /> {/* <TypeBlockStatement /> </or> */} </pattern> ) } export interface IConditionalTypeExpression { kind: "ConditionalTypeExpression" body: ITypeIfStatement } export function ConditionalTypeExpression() { const action = ({ statement }): IConditionalTypeExpression => { return { kind: "ConditionalTypeExpression", body: statement } } return ( <pattern action={action}> {Text("^{")} <TypeIfStatement label="statement" /> {Text("}")} </pattern> ) } export interface ITypeCallExpression { kind: "TypeCallExpression" callee: ITypeReference params: IParamList } export function TypeCallExpression() { const action = ({ callee, params }): ITypeCallExpression => { return { kind: "TypeCallExpression", callee, params } } return ( <pattern action={action}> <_ /> <TypeReference label="callee" /> {Text("<")} <ParamList label="params" /> {Text(">")} <_ /> </pattern> ) } export type ITypeExpressionList = ITypeExpression[]; export function TypeExpressionList() { return ( <pattern action={({ head, tail }): ITypeExpressionList => head ? [head, ...tail] : []}> <opt label="head"> <or> <TypeExpression /> <RestType /> </or> </opt> <repeat type="*" label="tail"> <pattern action={({ param }) => param}> {Text(",")} <or label="param"> <RestType /> <TypeExpression /> </or> </pattern> </repeat> </pattern> ) } export type ITypeExpressionParam = ITypeExpression & { constraint?: ITypeExpression, default?: ITypeExpression } export type IParamList = ITypeExpressionParam[] export function ParamList() { const action = ({ head, tail }): IParamList => head ? [head, ...tail] : []; const actionReturnParam = ({ param, constraint, _default }): ITypeExpressionParam => { const _param: ITypeExpressionParam = param; if (constraint) { _param.constraint = constraint } if (_default) { _param.default = _default } return _param } const paramWithConstraint = () => { return ( <list> <TypeExpression label="param" /> <opt label="constraint"> <pattern action={({ expression }) => expression}> {Text("extends")} <TypeExpression label="expression" /> </pattern> </opt> <opt label="_default"> <pattern action={({ expression }) => expression}> {Text("=")} <TypeExpression label="expression" /> </pattern> </opt> </list> ) } return ( <pattern action={action}> <opt label="head"> <pattern action={actionReturnParam}> {paramWithConstraint()} </pattern> </opt> <repeat type="*" label="tail"> <pattern action={actionReturnParam}> {Text(",")} {paramWithConstraint()} </pattern> </repeat> </pattern> ) } export interface IInferType { kind: "InferType" typeName: IIdentifier } export function InferType() { const action = ({ typeName }): IInferType => { return { kind: "InferType", typeName } } return ( <pattern action={action}> {Text("infer")} <Identifier label="typeName" /> </pattern> ) } export interface IUnionType { kind: "UnionType" types: ITypeExpression[] } export function UnionType() { const action = ({ types }): IUnionType => { return { kind: "UnionType", types } } return ( <pattern action={action}> <or> {Text("|")} {Text("union")} </or> {Text("[")} <TypeExpressionList label="types" /> {Text("]")} </pattern> ) } export interface IIntersectionType { kind: "IntersectionType" types: ITypeExpression[] } export function IntersectionType() { const action = ({ types }): IIntersectionType => { return { kind: "IntersectionType", types } } /** An intersection type combines multiple types into one */ return ( <pattern action={action}> <or> {Text("&")} {Text("combine")} </or> {Text("[")} <TypeExpressionList label="types" /> {Text("]")} </pattern> ) } export type IOperatorType = IKeyOfType | IReadonlyArray | IReadonlyTuple export function OperatorType() { return ( <or> <KeyOfType /> <ReadonlyArray /> <ReadonlyTuple /> </or> ) } export interface IReadonlyTuple { kind: "ReadonlyTuple" operand: ITupleType } export function ReadonlyTuple() { const action = ({ operand }): IReadonlyTuple => { return { kind: "ReadonlyTuple", operand } } return ( <pattern action={action}> {Text("readonly")} <TupleType label="operand" /> </pattern> ) } export interface IReadonlyArray { kind: "ReadonlyArray" operand: IArrayType } export function ReadonlyArray() { const action = ({ operand }): IReadonlyArray => { return { kind: "ReadonlyArray", operand } } return ( <pattern action={action}> {Text("readonly")} <ArrayType label="operand" /> </pattern> ) } export interface IKeyOfType { kind: "KeyOfType" operand: ITypeExpression } export function KeyOfType() { const action = ({ operand }): IKeyOfType => { return { kind: "KeyOfType", operand } } return ( <pattern action={action}> {Text("keyof")} <TypeExpression label="operand" /> </pattern> ) } export interface IIndexType { kind: "IndexType" head: ITypeExpression members: ITypeExpression[] } function IndexTypeHead() { const typeExpression = TypeExpression(); typeExpression.children = typeExpression.children.filter((child: { rule: Function }) => child.rule !== IndexType); return typeExpression; } export function IndexType() { const action = ({ head, members }): IIndexType => { return { kind: "IndexType", head, members } } return ( <pattern action={action}> <IndexTypeHead label="head" /> <repeat type="+" label="members"> <pattern action={({ indexType }) => indexType}> {Text("[")} <TypeExpression label="indexType" /> {Text("]")} </pattern> </repeat> </pattern> ) } export interface IMappedTypeExpression { kind: "MappedTypeExpression" body: ITypeForInStatement } export function MappedTypeExpression() { const action = ({ statement }): IMappedTypeExpression => { return { kind: "MappedTypeExpression", body: statement } } return ( <pattern action={action}> {Text("^{")} <TypeForInStatement label="statement" /> {Text("}")} </pattern> ) } export interface IContextType { kind: "ContextType" body: { context: string source: string } } export function ContextType() { const action = ({ context, source, globalContext }): IContextType => { const ast: IContextType = { kind: "ContextType", body: { context, source } }; return globalContext.resolveContextType ? globalContext.resolveContextType(ast) : ast; } return ( <pattern action={action}> {Text("```")} <String label="context" /> <Source label="source" /> {Text("```")} </pattern> ) }
the_stack
import * as msal from '@azure/msal-node'; import * as MicrosoftGraph from '@microsoft/microsoft-graph-types'; import fetch, { Response } from 'node-fetch'; import qs from 'qs'; import { MicrosoftGraphProviderConfig } from './config'; /** * OData (Open Data Protocol) Query * * {@link https://docs.microsoft.com/en-us/odata/concepts/queryoptions-overview} * {@link https://docs.microsoft.com/en-us/graph/query-parameters} * @public */ export type ODataQuery = { /** * search resources within a collection matching a free-text search expression. */ search?: string; /** * filter a collection of resources */ filter?: string; /** * specifies the related resources or media streams to be included in line with retrieved resources */ expand?: string; /** * request a specific set of properties for each entity or complex type */ select?: string[]; /** * Retrieves the total count of matching resources. */ count?: boolean; }; /** * Extends the base msgraph types to include the odata type. * * @public */ export type GroupMember = | (MicrosoftGraph.Group & { '@odata.type': '#microsoft.graph.user' }) | (MicrosoftGraph.User & { '@odata.type': '#microsoft.graph.group' }); /** * A HTTP Client that communicates with Microsoft Graph API. * Simplify Authentication and API calls to get `User` and `Group` from Azure Active Directory * * Uses `msal-node` for authentication * * @public */ export class MicrosoftGraphClient { /** * Factory method that instantiate `msal` client and return * an instance of `MicrosoftGraphClient` * * @public * * @param config - Configuration for Interacting with Graph API */ static create(config: MicrosoftGraphProviderConfig): MicrosoftGraphClient { const clientConfig: msal.Configuration = { auth: { clientId: config.clientId, clientSecret: config.clientSecret, authority: `${config.authority}/${config.tenantId}`, }, }; const pca = new msal.ConfidentialClientApplication(clientConfig); return new MicrosoftGraphClient(config.target, pca); } /** * @param baseUrl - baseUrl of Graph API {@link MicrosoftGraphProviderConfig.target} * @param pca - instance of `msal.ConfidentialClientApplication` that is used to acquire token for Graph API calls * */ constructor( private readonly baseUrl: string, private readonly pca: msal.ConfidentialClientApplication, ) {} /** * Get a collection of resource from Graph API and * return an `AsyncIterable` of that resource * * @public * @param path - Resource in Microsoft Graph * @param query - OData Query {@link ODataQuery} * @param queryMode - Mode to use while querying. Some features are only available at "advanced". */ async *requestCollection<T>( path: string, query?: ODataQuery, queryMode?: 'basic' | 'advanced', ): AsyncIterable<T> { // upgrade to advanced query mode transparently when "search" is used // to stay backwards compatible. const appliedQueryMode = query?.search ? 'advanced' : queryMode ?? 'basic'; // not needed for "search" // as of https://docs.microsoft.com/en-us/graph/aad-advanced-queries?tabs=http // even though a few other places say the opposite // - https://docs.microsoft.com/en-us/graph/api/user-list?view=graph-rest-1.0&tabs=http#request-headers // - https://docs.microsoft.com/en-us/graph/api/resources/group?view=graph-rest-1.0#properties if (appliedQueryMode === 'advanced' && (query?.filter || query?.select)) { query.count = true; } const headers: Record<string, string> = appliedQueryMode === 'advanced' ? { // Eventual consistency is required for advanced querying capabilities // like "$search" or parts of "$filter". // If a new user/group is not found, it'll eventually be imported on a subsequent read ConsistencyLevel: 'eventual', } : {}; let response = await this.requestApi(path, query, headers); for (;;) { if (response.status !== 200) { await this.handleError(path, response); } const result = await response.json(); // Graph API return array of collections const elements: T[] = result.value; yield* elements; // Follow cursor to the next page if one is available if (!result['@odata.nextLink']) { return; } response = await this.requestRaw(result['@odata.nextLink'], headers); } } /** * Abstract on top of {@link MicrosoftGraphClient.requestRaw} * * @public * @param path - Resource in Microsoft Graph * @param query - OData Query {@link ODataQuery} * @param headers - optional HTTP headers */ async requestApi( path: string, query?: ODataQuery, headers?: Record<string, string>, ): Promise<Response> { const queryString = qs.stringify( { $search: query?.search, $filter: query?.filter, $select: query?.select?.join(','), $expand: query?.expand, $count: query?.count, }, { addQueryPrefix: true, // Microsoft Graph doesn't like an encoded query string encode: false, }, ); return await this.requestRaw( `${this.baseUrl}/${path}${queryString}`, headers, ); } /** * Makes a HTTP call to Graph API with token * * @param url - HTTP Endpoint of Graph API * @param headers - optional HTTP headers */ async requestRaw( url: string, headers?: Record<string, string>, ): Promise<Response> { // Make sure that we always have a valid access token (might be cached) const token = await this.pca.acquireTokenByClientCredential({ scopes: ['https://graph.microsoft.com/.default'], }); if (!token) { throw new Error('Error while requesting token for Microsoft Graph'); } return await fetch(url, { headers: { ...headers, Authorization: `Bearer ${token.accessToken}`, }, }); } /** * Get {@link https://docs.microsoft.com/en-us/graph/api/resources/user | User} * from Graph API * * @public * @param userId - The unique identifier for the `User` resource * @param query - OData Query {@link ODataQuery} * */ async getUserProfile( userId: string, query?: ODataQuery, ): Promise<MicrosoftGraph.User> { const response = await this.requestApi(`users/${userId}`, query); if (response.status !== 200) { await this.handleError('user profile', response); } return await response.json(); } /** * Get {@link https://docs.microsoft.com/en-us/graph/api/resources/profilephoto | profilePhoto} * of `User` from Graph API with size limit * * @param userId - The unique identifier for the `User` resource * @param maxSize - Maximum pixel height of the photo * */ async getUserPhotoWithSizeLimit( userId: string, maxSize: number, ): Promise<string | undefined> { return await this.getPhotoWithSizeLimit('users', userId, maxSize); } async getUserPhoto( userId: string, sizeId?: string, ): Promise<string | undefined> { return await this.getPhoto('users', userId, sizeId); } /** * Get a collection of * {@link https://docs.microsoft.com/en-us/graph/api/resources/user | User} * from Graph API and return as `AsyncIterable` * * @public * @param query - OData Query {@link ODataQuery} * @param queryMode - Mode to use while querying. Some features are only available at "advanced". */ async *getUsers( query?: ODataQuery, queryMode?: 'basic' | 'advanced', ): AsyncIterable<MicrosoftGraph.User> { yield* this.requestCollection<MicrosoftGraph.User>( `users`, query, queryMode, ); } /** * Get {@link https://docs.microsoft.com/en-us/graph/api/resources/profilephoto | profilePhoto} * of `Group` from Graph API with size limit * * @param groupId - The unique identifier for the `Group` resource * @param maxSize - Maximum pixel height of the photo * */ async getGroupPhotoWithSizeLimit( groupId: string, maxSize: number, ): Promise<string | undefined> { return await this.getPhotoWithSizeLimit('groups', groupId, maxSize); } async getGroupPhoto( groupId: string, sizeId?: string, ): Promise<string | undefined> { return await this.getPhoto('groups', groupId, sizeId); } /** * Get a collection of * {@link https://docs.microsoft.com/en-us/graph/api/resources/group | Group} * from Graph API and return as `AsyncIterable` * * @public * @param query - OData Query {@link ODataQuery} * @param queryMode - Mode to use while querying. Some features are only available at "advanced". */ async *getGroups( query?: ODataQuery, queryMode?: 'basic' | 'advanced', ): AsyncIterable<MicrosoftGraph.Group> { yield* this.requestCollection<MicrosoftGraph.Group>( `groups`, query, queryMode, ); } /** * Get a collection of * {@link https://docs.microsoft.com/en-us/graph/api/resources/user | User} * belonging to a `Group` from Graph API and return as `AsyncIterable` * @public * @param groupId - The unique identifier for the `Group` resource * */ async *getGroupMembers(groupId: string): AsyncIterable<GroupMember> { yield* this.requestCollection<GroupMember>(`groups/${groupId}/members`); } /** * Get {@link https://docs.microsoft.com/en-us/graph/api/resources/organization | Organization} * from Graph API * @public * @param tenantId - The unique identifier for the `Organization` resource * */ async getOrganization( tenantId: string, ): Promise<MicrosoftGraph.Organization> { const response = await this.requestApi(`organization/${tenantId}`); if (response.status !== 200) { await this.handleError(`organization/${tenantId}`, response); } return await response.json(); } /** * Get {@link https://docs.microsoft.com/en-us/graph/api/resources/profilephoto | profilePhoto} * from Graph API * * @param entityName - type of parent resource, either `User` or `Group` * @param id - The unique identifier for the {@link entityName | entityName} resource * @param maxSize - Maximum pixel height of the photo * */ private async getPhotoWithSizeLimit( entityName: string, id: string, maxSize: number, ): Promise<string | undefined> { const response = await this.requestApi(`${entityName}/${id}/photos`); if (response.status === 404) { return undefined; } else if (response.status !== 200) { await this.handleError(`${entityName} photos`, response); } const result = await response.json(); const photos = result.value as MicrosoftGraph.ProfilePhoto[]; let selectedPhoto: MicrosoftGraph.ProfilePhoto | undefined = undefined; // Find the biggest picture that is smaller than the max size for (const p of photos) { if ( !selectedPhoto || (p.height! >= selectedPhoto.height! && p.height! <= maxSize) ) { selectedPhoto = p; } } if (!selectedPhoto) { return undefined; } return await this.getPhoto(entityName, id, selectedPhoto.id!); } private async getPhoto( entityName: string, id: string, sizeId?: string, ): Promise<string | undefined> { const path = sizeId ? `${entityName}/${id}/photos/${sizeId}/$value` : `${entityName}/${id}/photo/$value`; const response = await this.requestApi(path); if (response.status === 404) { return undefined; } else if (response.status !== 200) { await this.handleError('photo', response); } return `data:image/jpeg;base64,${Buffer.from( await response.arrayBuffer(), ).toString('base64')}`; } private async handleError(path: string, response: Response): Promise<void> { const result = await response.json(); const error = result.error as MicrosoftGraph.PublicError; throw new Error( `Error while reading ${path} from Microsoft Graph: ${error.code} - ${error.message}`, ); } }
the_stack
/// <reference types="activex-shdocvw" /> declare namespace Shell32 { // tslint:disable-next-line no-const-enum const enum BrowseInfoFlags { /** * Allow folder junctions such as a library or a compressed file with a .zip file name extension to be browsed. * * _Windows 7 and later._ */ BIF_BROWSEFILEJUNCTIONS = 0x00010000, /** Only return computers. If the user selects anything other than a computer, the **OK** button is grayed. */ BIF_BROWSEFORCOMPUTER = 0x00001000, /** * Only allow the selection of printers. If the user selects anything other than a printer, the **OK** button is grayed. * * In Windows XP and later systems, the best practice is to use a Windows XP-style dialog, setting the root of the dialog to the **Printers and Faxes** folder (`CSIDL_PRINTERS`). */ BIF_BROWSEFORPRINTER = 0x00002000, /** * The browse dialog box displays files as well as folders. * * _[Version 4.7.1](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ BIF_BROWSEINCLUDEFILES = 0x00004000, /** * The browse dialog box can display URLs. The `BIF_USENEWUI` and `BIF_BROWSEINCLUDEFILES` flags must also be set. If any of these three flags are not set, the browser dialog box rejects URLs. * Even when these flags are set, the browse dialog box displays URLs only if the folder that contains the selected item supports URLs. * * When the folder's [IShellFolder::GetAttributesOf](https://msdn.microsoft.com/en-us/library/windows/desktop/bb775068.aspx) method is called to request the selected item's attributes, the * folder must set the `SFGAO_FOLDER` attribute flag. Otherwise, the browse dialog box will not display the URL. * * _[Version 5.0](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ BIF_BROWSEINCLUDEURLS = 0x00000080, /** Do not include network folders below the domain level in the dialog box's tree view control. */ BIF_DONTGOBELOWDOMAIN = 0x00000002, /** * Include an edit control in the browse dialog box that allows the user to type the name of an item. * * _[Version 4.7.1](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ BIF_EDITBOX = 0x00000010, /** * Use the new user interface. Setting this flag provides the user with a larger dialog box that can be resized. The dialog box has several new capabilities, including: drag-and-drop * capability within the dialog box, reordering, shortcut menus, new folders, delete, and other shortcut menu commands. * * _[Version 5.0](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ BIF_NEWDIALOGSTYLE = 0x00000040, /** * Do not include the **New Folder** button in the browse dialog box. * * _[Version 6.0](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ BIF_NONEWFOLDERBUTTON = 0x00000200, /** * When the selected item is a shortcut, return the PIDL of the shortcut itself rather than its target. * * _[Version 6.0](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ BIF_NOTRANSLATETARGETS = 0x00000400, /** * Only return file system ancestors. An ancestor is a subfolder that is beneath the root folder in the namespace hierarchy. If the user selects an ancestor of the root folder that is not * part of the file system, the **OK** button is grayed. */ BIF_RETURNFSANCESTORS = 0x00000008, /** * Only return file system directories. If the user selects folders that are not part of the file system, the **OK** button is grayed. * * **Note** The **OK** button remains enabled for `\\server` items, as well as `\\server\share` and directory items. However, if the user selects a `\\server` item, passing the PIDL * returned by [SHBrowseForFolder](https://msdn.microsoft.com/en-us/library/windows/desktop/bb762115.aspx) to * [SHGetPathFromIDList](https://msdn.microsoft.com/en-us/library/windows/desktop/bb762194.aspx) fails. */ BIF_RETURNONLYFSDIRS = 0x00000001, /** * The browse dialog box can display sharable resources on remote systems. This is intended for applications that want to expose remote shares on a local system. The BIF_NEWDIALOGSTYLE flag * must also be set. * * _[Version 5.0](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ BIF_SHAREABLE = 0x00008000, /** * Include a status area in the dialog box. The callback function can set the status text by sending messages to the dialog box. This flag is not supported when BIF_NEWDIALOGSTYLE * is specified. */ BIF_STATUSTEXT = 0x00000004, /** * When combined with `BIF_NEWDIALOGSTYLE`, adds a usage hint to the dialog box, in place of the edit box. `BIF_EDITBOX` overrides this flag. * * _[Version 6.0](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ BIF_UAHINT = 0x00000100, /** * Use the new user interface, including an edit box. This flag is equivalent to `BIF_EDITBOX | BIF_NEWDIALOGSTYLE`. * * _[Version 5.0](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ BIF_USENEWUI = 0x00000090, /** * If the user types an invalid name into the edit box, the browse dialog box calls the application's * [BrowseCallbackProc](https://msdn.microsoft.com/en-us/library/windows/desktop/bb762598.aspx) with the **BFFM_VALIDATEFAILED** message. This flag is ignored if BIF_EDITBOX is not specified. * * _[Version 4.71](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779)_ */ BIF_VALIDATE = 0x00000020, } // tslint:disable-next-line no-const-enum const enum ExplorerBarCLSID { Favorites = '{EFA24E61-B078-11d0-89E4-00C04FC9E26E}', Folders = '{EFA24E64-B078-11d0-89E4-00C04FC9E26E}', History = '{EFA24E62-B078-11d0-89E4-00C04FC9E26E}', Search = '{30D02401-6A81-11d0-8274-00C04FD5AE38}', } // tslint:disable-next-line no-const-enum const enum FileOperationFlag { /** Preserve undo information, if possible. */ FOF_ALLOWUNDO = 64, /** Perform the operation on files only if a wildcard file name (*.*) is specified. */ FOF_FILESONLY = 128, /** Respond with "Yes to All" for any dialog box that is displayed. */ FOF_NOCONFIRMATION = 16, /** Do not confirm the creation of a new directory if the operation requires one to be created. */ FOF_NOCONFIRMMKDIR = 512, /** * Do not copy connected files as a group. Only copy the specified files. * * _[Version 5.0](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ FOF_NO_CONNECTED_ELEMENTS = 8192, /** * Do not copy the security attributes of the file. * * _[Version 4.7.1](https://msdn.microsoft.com/en-us/library/windows/desktop/bb776779.aspx)_ */ FOF_NOCOPYSECURITYATTRIBS = 2048, /** Do not display a user interface if an error occurs. */ FOF_NOERRORUI = 1024, /** Only operate in the local directory. Do not operate recursively into subdirectories. */ FOF_NORECURSION = 4096, /** Give the file being operated on a new name in a move, copy, or rename operation if a file with the target name already exists. */ FOF_RENAMEONCOLLISION = 8, /** Do not display a progress dialog box. */ FOF_SILENT = 4, /** Display a progress dialog box but do not show the file names. */ FOF_SIMPLEPROGRESS = 256, } // tslint:disable-next-line no-const-enum const enum FileSystemDetails { Name = 0, Size = 1, Type = 2, LastModified = 3, Attributes = 4, InfoTip = -1, } // tslint:disable-next-line no-const-enum const enum HotkeyModifiers { SHIFT = 1, CTRL = 2, ALT = 4, Extended = 8 } // tslint:disable-next-line no-const-enum const enum LinkShowWindowState { /** Activates and displays a window. If the window is minimized or maximized, the system restores it to its original size and position. */ Normal = 1, /** Activates the window and displays it as a minimized window. */ Minimized = 2, /** Activates the window and displays it as a maximized window. */ Maximized = 3 } /** Constants for Folder2.OfflineStatus */ // tslint:disable-next-line no-const-enum const enum OfflineFolderStatus { OFS_DIRTYCACHE = 3, OFS_INACTIVE = -1, OFS_OFFLINE = 1, OFS_ONLINE = 0, OFS_SERVERBACK = 2, } // tslint:disable-next-line no-const-enum const enum ShellFolderEnumerationFlags { /** **Windows 7 and later** The calling application is checking for the existence of child items in the folder. */ SHCONTF_CHECKING_FOR_CHILDREN = 0x00010, /** Include items that are folders in the enumeration. */ SHCONTF_FOLDERS = 0x00020, /** Include items that are not folders in the enumeration. */ SHCONTF_NONFOLDERS = 0x00040, /** Include hidden items in the enumeration. This does not include hidden system items. (To include hidden system items, use SHCONTF_INCLUDESUPERHIDDEN.) */ SHCONTF_INCLUDEHIDDEN = 0x00080, /** The calling application is looking for printer objects. */ SHCONTF_NETPRINTERSRCH = 0x00200, /** The calling application is looking for resources that can be shared. */ SHCONTF_SHAREABLE = 0x00400, /** Include items with accessible storage and their ancestors, including hidden items. */ SHCONTF_STORAGE = 0x00800, /** **Windows 7 and later**. Child folders should provide a navigation enumeration. */ SHCONTF_NAVIGATION_ENUM = 0x01000, /** **Windows Vista and later**. The calling application is looking for resources that can be enumerated quickly. */ SHCONTF_FASTITEMS = 0x02000, /** **Windows Vista and later**. Enumerate items as a simple list even if the folder itself is not structured in that way. */ SHCONTF_FLATLIST = 0x04000, /** * **Windows Vista and later**. The calling application is monitoring for change notifications.This means that the enumerator does not have to return all results. * Items can be reported through change notifications. */ SHCONTF_ENABLE_ASYNC = 0x08000, /** * **Windows 7 and later**. Include hidden system items in the enumeration. This value does not include hidden non-system items. * (To include hidden non-system items, use SHCONTF_INCLUDEHIDDEN.) */ SHCONTF_INCLUDESUPERHIDDEN = 0x10000, } // tslint:disable-next-line no-const-enum const enum ShellFolderViewOptions { /** The **Show All Files** option is enabled. */ SFVVO_SHOWALLOBJECTS = 0x00000001, /** The **Hide extensions for known file types** option is disabled. */ SFVVO_SHOWEXTENSIONS = 0x00000002, /** The **Display Compressed Files and Folders with Alternate Color** option is enabled. */ SFVVO_SHOWCOMPCOLOR = 0x00000008, /** The **Do Not Show Hidden Files** option is enabled. */ SFVVO_SHOWSYSFILES = 0x00000020, /** The **Classic Style** option is enabled. */ SFVVO_WIN95CLASSIC = 0x00000040, /** The **Double-Click to Open an Item** option is enabled. */ SFVVO_DOUBLECLICKINWEBVIEW = 0x00000080, /** The **Active Desktop – View as Web Page** option is enabled. */ SFVVO_DESKTOPHTML = 0x00000200 } // tslint:disable-next-line no-const-enum const enum ShellFolderViewSelectItem { Deselect = 0, Select = 1, EditMode = 3, DeselectAllButThis = 4, ScrollIntoView = 8, Focus = 16 } // tslint:disable-next-line no-const-enum const enum ShellLinkResolveFlags { /** * Do not display a dialog box if the link cannot be resolved. When this flag is set, the high-order word of _fFlags_ specifies a time-out duration, in milliseconds. * The method returns if the link cannot be resolved within the time-out duration. If the high-order word is set to zero, the time-out duration defaults to 3000 milliseconds (3 seconds). */ NoUI = 1, /** If the link has changed, update its path and list of identifiers. */ Update = 4, /** Do not update the link information. */ NoUpdate = 8, /** Do not execute the search heuristics. */ NoSearch = 16, /** Do not use distributed link tracking. */ NoTrack = 32, /** * Disable distributed link tracking. By default, distributed link tracking tracks removable media across multiple devices based on the volume name. * It also uses the UNC path to track remote file systems whose drive letter has changed. Setting this flag disables both types of tracking. */ NoLinkInfo = 64, /** Call the Windows Installer. */ InvokeMSI = 128 } // tslint:disable-next-line no-const-enum const enum SettingKey { /** * The state of the **Use check boxes to select items** option. This option is enabled automatically when the system has a pen input device configured. * * _Windows Vista and later_ */ SSF_AUTOCHECKSELECT = 0x00800000, /** Not used. */ SSF_DESKTOPHTML = 0x00000200, /** The state of the **Allow all uppercase names** option. As of Windows Vista, this folder option is no longer available. */ SSF_DONTPRETTYPATH = 0x00000800, /** The state of the **Double-click to open an item (single-click to select)** option. */ SSF_DOUBLECLICKINWEBVIEW = 0x00000080, /** Not used. */ SSF_FILTER = 0x00010000, /** Not used. */ SSF_HIDDENFILEEXTS = 0x00000004, /** The state of icon display in the Windows Explorer list view. If this option is active, no icons are displayed in the list view. */ SSF_HIDEICONS = 0x00004000, /** * The state of display name display in the Windows Explorer list view. If this option is active, icons are displayed in the list view, but display names are not. * * _Windows Vista and later_ */ SSF_ICONSONLY = 0x01000000, /** The state of the **Show map network drive button in toolbar** option. As of Windows Vista, this option is no longer available. */ SSF_MAPNETDRVBUTTON = 0x00001000, /** The state of the Recycle Bin's **Display delete confirmation dialog** option. */ SSF_NOCONFIRMRECYCLE = 0x00008000, /** The state of the **Automatically search for network folders and printers** option. As of Windows Vista, this option is no longer available. */ SSF_NONETCRAWLING = 0x00100000, /** The state of the **Launch folder windows in a separate process** option. */ SSF_SEPPROCESS = 0x00080000, /** Not used. */ SSF_SERVERADMINUI = 0x00000004, /** The state of the **Hidden files and folders** option. */ SSF_SHOWALLOBJECTS = 0x00000001, /** The state of the **Show File Attributes in Detail View** option. As of Windows Vista, this option is no longer available. */ SSF_SHOWATTRIBCOL = 0x00000100, /** The state of the **Show encrypted or compressed NTFS files in color** option. */ SSF_SHOWCOMPCOLOR = 0x00000008, /** The state of the **Hide extensions for known file types** option. */ SSF_SHOWEXTENSIONS = 0x00000002, /** The state of the **Show pop-up description for folder and desktop items** option. */ SSF_SHOWINFOTIP = 0x00002000, /** Not used. */ SSF_SHOWSTARTPAGE = 0x00400000, /** The state of the **Hide protected operating system files** option. */ SSF_SHOWSUPERHIDDEN = 0x00040000, /** * The state of the **Hidden files and folders** option. In Windows Vista and later, this is equivalent to `SSF_SHOWALLOBJECTS`. In versions of Windows before Windows Vista, this value * referred to the state of the **Do not show hidden files and folders** option. */ SSF_SHOWSYSFILES = 0x00000020, /** * The state of the **Display file icon on thumbnails** option. If this option is active, a file type overlay is applied when a file supplies a thumbnail representation. * * _Windows Vista and later_ */ SSF_SHOWTYPEOVERLAY = 0x02000000, /** Not used. */ SSF_SORTCOLUMNS = 0x00000010, /** The state of the Windows XP display option, which selects between the Windows XP style and the classic style. As of Windows Vista, this option is no longer available. */ SSF_STARTPANELON = 0x00200000, /** The state of the **Display as a web view option**. As of Windows Vista, this option is no longer available. */ SSF_WEBVIEW = 0x00020000, /** The state of the **Classic Style** option. As of Windows Vista, this option is no longer available. */ SSF_WIN95CLASSIC = 0x00000400, } // tslint:disable-next-line no-const-enum const enum ShellExecuteShow { /** Open the application with a hidden window. */ Hidden = 0, /** Open the application with a normal window. If the window is minimized or maximized, the system restores it to its original size and position. */ Normal = 1, /** Open the application with a minimized window. */ Minimized = 2, /** Open the application with a maximized window. */ Maximized = 3, /** Open the application with its window at its most recent size and position. The active window remains active. */ Last = 4, /** Open the application with its window at its current size and position. */ Current = 5, /** Open the application with a minimized window. The active window remains active. */ MinimizedNotActivated = 7, /** Open the application with its window in the default state specified by the application. */ Application = 10, } /** Constants for Special Folders for open/Explore */ // tslint:disable-next-line no-const-enum const enum ShellSpecialFolderConstants { ssfALTSTARTUP = 29, ssfAPPDATA = 26, ssfBITBUCKET = 10, ssfCOMMONALTSTARTUP = 30, ssfCOMMONAPPDATA = 35, ssfCOMMONDESKTOPDIR = 25, ssfCOMMONFAVORITES = 31, ssfCOMMONPROGRAMS = 23, ssfCOMMONSTARTMENU = 22, ssfCOMMONSTARTUP = 24, ssfCONTROLS = 3, ssfCOOKIES = 33, ssfDESKTOP = 0, ssfDESKTOPDIRECTORY = 16, ssfDRIVES = 17, ssfFAVORITES = 6, ssfFONTS = 20, ssfHISTORY = 34, ssfINTERNETCACHE = 32, ssfLOCALAPPDATA = 28, ssfMYPICTURES = 39, ssfNETHOOD = 19, ssfNETWORK = 18, ssfPERSONAL = 5, ssfPRINTERS = 4, ssfPRINTHOOD = 27, ssfPROFILE = 40, ssfPROGRAMFILES = 38, ssfPROGRAMFILESx86 = 48, ssfPROGRAMS = 2, ssfRECENT = 8, ssfSENDTO = 9, ssfSTARTMENU = 11, ssfSTARTUP = 7, ssfSYSTEM = 37, ssfSYSTEMx86 = 41, ssfTEMPLATES = 21, ssfWINDOWS = 36, } /** FileSearchBand Class */ class FileSearchBand { private 'Shell32.FileSearchBand_typekey': FileSearchBand; private constructor(); /** Retrieve the file from which the search was restored. */ readonly QueryFile: any; /** Get the search scope */ readonly Scope: any; /** Retrieve the guid of the currently active search. */ readonly SearchID: string; /** method SetFocus */ SetFocus(): void; /** method SetSearchParameters */ SetSearchParameters(pbstrSearchID: string, bNavToResults: boolean, pvarScope?: any, pvarQueryFile?: any): void; } /** Definition of interface Folder version 3 */ class Folder3 { private 'Shell32.Folder3_typekey': Folder3; private constructor(); /** Get Application object */ readonly Application: any; /** Copy Items to this folder. */ CopyHere(vItem: string | ShellFolderItem | FolderItems3, vOptions?: FileOperationFlag): void; /** Call this after the WebView barricade is dismissed by the user */ DismissedWebViewBarricade(): void; /** * Get the details about an item. * @param vItem The item for which to retrieve the information. * @param iColumn An integer value that specifies the information to be retrieved. The information available for an item depends on the folder in which it is displayed. This value * corresponds to the zero-based column number that is displayed in a Shell view. */ GetDetailsOf(vItem: ShellFolderItem, iColumn: number): string; /** Should the WebView barricade be shown? */ readonly HaveToShowWebViewBarricade: boolean; /** The collection of Items in folder */ Items(): FolderItems3; /** Move Items to this folder. */ MoveHere(vItem: string | ShellFolderItem | FolderItems3, vOptions?: FileOperationFlag): void; /** Create a new sub folder in this folder. */ NewFolder(bName: string): void; /** Offline status of the server? */ readonly OfflineStatus: OfflineFolderStatus; /** Get Parent object */ readonly ParentFolder: Folder3; /** Parse the name to get an item. */ ParseName(bName: string): ShellFolderItem | null; /** Folder's FolderItem interface */ readonly Self: ShellFolderItem; /** Ask if the WebView barricade should be shown or not */ ShowWebViewBarricade: boolean; /** Synchronize all offline files */ Synchronize(): void; /** Get the display name for the window */ readonly Title: string; } /** Definition of interface FolderItems3 */ class FolderItems3 { private 'Shell32.FolderItems3_typekey': FolderItems3; private constructor(); /** Get Application object */ readonly Application: any; /** Get count of items in the folder */ readonly Count: number; /** Set a wildcard filter to apply to the items returned */ Filter(grfFlags: ShellFolderEnumerationFlags, bstrFileSpec: string): void; /** * Executes a verb on a collection of `FolderItem` objects * @param vVerb String that corresponds to the command to be executed. If no verb is specified, the default verb is executed. * @param vArgs String with one or more arguments to the command specified by vVerb. The format of this string depends on the particular verb. */ InvokeVerbEx(vVerb?: string, vArgs?: string): void; /** Return the figure for the given index */ Item(index?: any): ShellFolderItem; /** Get the list of verbs common to all the items */ readonly Verbs: FolderItemVerbs; } /** Definition of interface FolderItemVerb */ class FolderItemVerb { private 'Shell32.FolderItemVerb_typekey': FolderItemVerb; private constructor(); /** Execute the verb */ DoIt(): void; /** Get display name for item */ readonly Name: string; } /** Definition of interface FolderItemVerbs */ class FolderItemVerbs { private 'Shell32.FolderItemVerbs_typekey': FolderItemVerbs; private constructor(); /** Get count of open folder windows */ readonly Count: number; /** Return the specified verb */ Item(index?: any): FolderItemVerb; } /** Shell Object Type Information */ class Shell { private 'Shell32.Shell_typekey': Shell; private constructor(); /** Add an object to the Recent Docuements */ AddToRecent(varFile: string | null, bstrCategory?: string): void; /** Get Application object */ readonly Application: any; /** Browse the name space for a Folder */ BrowseForFolder(Hwnd: number, Title: string, Options: number | BrowseInfoFlags, RootFolder?: string | ShellSpecialFolderConstants): Folder3; /** Determine if the current user can start/stop the named service. */ CanStartStopService(ServiceName: string): boolean; /** Cascade Windows */ CascadeWindows(): void; /* * Runs the specified Control Panel (*.cpl) application. If the application is already open, it will activate the running instance. * * **Note** As of Windows Vista, most Control Panel applications are Shell items and cannot be opened with this function. To open those Control Panel applications, pass the canonical name to * `control.exe`. For example: * * `control.exe /name Microsoft.Personalization` */ ControlPanelItem(bstrDir: string): void; /** Eject the pc */ EjectPC(): void; /** Explore a folder */ Explore(vDir: string | ShellSpecialFolderConstants): void; /** * Return explorer policy value * * The specified value name must be within the **HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Policies\Explorer** subkey. * If the value name does not exist then the method returns null. */ ExplorerPolicy(bstrPolicyName: string): any; /** Bring up the file run dialog box */ FileRun(): void; /** Find a computer */ FindComputer(): void; /** Find Files */ FindFiles(): void; /** Find a Printer in the Directory Service */ FindPrinter(Name?: string, location?: string, model?: string): void; /** Return shell global setting */ GetSetting(lSetting: number | SettingKey): boolean; /** * `DirectoryServiceAvailable` -- Returns **true** if the directory service is available * * `IsOS_DomainMember` -- Returns **true** if the computer is a member of a domain (_Windows XP and later_) * * `IsOS_Personal` -- Returns **true** if the operating system is Windows XP Home Edition (_Windows XP only_) * * `IsOS_Professional` -- Returns **true** if the operating system is Windows XP Professional Edition (_Windows XP only_) */ GetSystemInformation(Name: 'DirectoryServiceAvailable' | 'IsOS_DomainMember' | 'IsOS_Personal' | 'IsOS_Professional'): boolean; /** * `DoubleClickTime` -- The double-click time, in milliseconds * * `PhysicalMemoryInstalled` -- The amount of physical memory installed, in bytes * * `ProcessorArchitecture` -- The processor architecture. For details, see the discussion of the **wProcessorArchitecture** member of the * [`SYSTEM_INFO`](https://msdn.microsoft.com/en-us/library/windows/desktop/ms724958.aspx) structure * * `ProcessorLevel` -- The processor level. Returns 3, 4, or 5, for x386, x486, and Pentium-level processors, respectively (_Windows Vista and later_) * * `ProcessorSpeed` -- The processor speed, in megahertz (MHz) */ GetSystemInformation(Name: 'DoubleClickTime' | 'PhysicalMemoryInstalled' | 'ProcessorArchitecture' | 'ProcessorLevel' | 'ProcessorSpeed'): number; /** Display shell help */ Help(): void; /** get restriction settings */ IsRestricted(Group: string, Restriction: string): number; /** Determine if a service is running by name. */ IsServiceRunning(ServiceName: string): any; /** Minimize all windows */ MinimizeAll(): void; /** Get special folder from ShellSpecialFolderConstants */ NameSpace(vDir: string | ShellSpecialFolderConstants): Folder3 | null; /** Open a folder */ Open(vDir: string | ShellSpecialFolderConstants): void; /** Get Parent object */ readonly Parent: any; /** Refresh the menu */ RefreshMenu(): void; /** Immersive Search */ SearchCommand(): void; /** * Start a service by name, and optionally set it to autostart. * * The method returns `false` if the service has already been started. Before calling this method, you can call * [Shell.IsServiceRunning](https://msdn.microsoft.com/en-us/library/windows/desktop/gg537742.aspx) to ascertain the status of the service. */ ServiceStart(ServiceName: string, Persistent?: boolean): boolean; /** * Stop a service by name, and optionally disable autostart. * * The method returns `false` if the service has already been stopped. Before calling this method, you can call * [Shell.IsServiceRunning](https://msdn.microsoft.com/en-us/library/windows/desktop/gg537742.aspx) to ascertain the status of the service. */ ServiceStop(ServiceName: string, Persistent?: boolean): boolean; /** Displays the **Date and Time Properties** dialog box. */ SetTime(): void; /** * Execute generic command * @param sFile A string that contains the name of the file on which `ShellExecute` will perform the action specified by `vOperation`. * @param [vArgs] A string that contains parameter values for the operation. * @param [vDir] The fully qualified path of the directory that contains the file specified by `sFile`. If this parameter is not specified, the current working directory is used. * @param [vOperation] The operation to be performed. This value is set to one of the verb strings that is supported by the file. If this parameter is not specified, the default operation is * performed. * @param [vShow] A recommendation as to how the application window should be displayed initially. The application can ignore this recommendation. */ ShellExecute(File: string, vArgs?: string, vDir?: string, vOperation?: string, vShow?: ShellExecuteShow): void; /** Show/Hide browser bar. */ ShowBrowserBar(bstrClsid: ExplorerBarCLSID, bShow: boolean): any; /** Exit Windows */ ShutdownWindows(): void; /** Tile windows horizontally */ TileHorizontally(): void; /** Tile windows vertically */ TileVertically(): void; /** Raise/lower the desktop */ ToggleDesktop(): void; /** Displays the **Taskbar and Start Menu Properties** dialog box. */ TrayProperties(): void; /** Undo Minimize All */ UndoMinimizeALL(): void; /** The collection of open folder windows */ Windows(): ShellWindows; /** Displays the **Windows Security** dialog box. */ WindowsSecurity(): void; /** Displays your open windows in a 3D stack that you can flip through. */ WindowSwitcher(): void; } /** ShellDispatch Load in Shell Context */ class ShellDispatchInproc { private 'Shell32.ShellDispatchInproc_typekey': ShellDispatchInproc; private constructor(); } /** Shell Folder Item */ class ShellFolderItem { private 'Shell32.ShellFolderItem_typekey': ShellFolderItem; private constructor(); /** Get Application object */ readonly Application: any; /** Access an extended property */ ExtendedProperty(bstrPropName: string): any; /** If item is a folder return folder object */ readonly GetFolder: Folder3 | null; /** If item is link return link object */ readonly GetLink: ShellLinkObject | null; /** * Execute a command on the item. * Must be one of the values returned by the item's `FolderItemVerb.Name` property. * If no verb is specified, the default verb will be invoked. */ InvokeVerb(vVerb?: string): void; /** * Extended version of InvokeVerb * @param vVerb String that corresponds to the command to be executed. If no verb is specified, the default verb is executed. * @param vArgs String with one or more arguments to the command specified by vVerb. The format of this string depends on the particular verb. */ InvokeVerbEx(vVerb?: string, vArgs?: string): void; /** Indicates if the item can be hosted inside a browser or Windows Explorer frame. */ readonly IsBrowsable: boolean; /** Is the item a file system object? */ readonly IsFileSystem: boolean; /** Is the item a Folder? */ readonly IsFolder: boolean; /** Is the item a link? */ readonly IsLink: boolean; /** Modification Date? */ ModifyDate: VarDate; /** Get display name for item */ Name: string; /** Get Parent object */ readonly Parent: any; /** Get the pathname to the item */ readonly Path: string; /** Size */ readonly Size: number; /** Contains a string representation of the item's type */ readonly Type: string; /** Get the list of verbs for the object */ Verbs(): FolderItemVerbs; } /** Shell Folder View Object */ class ShellFolderView { private 'Shell32.ShellFolderView_typekey': ShellFolderView; private constructor(); /** Get Application object */ readonly Application: any; /** Get Current View Mode */ CurrentViewMode: number; /** Filter View */ FilterView(bstrFilterText: string): void; /** The currently focused item in the folder */ readonly FocusedItem: ShellFolderItem; /** Get the folder being viewed */ readonly Folder: Folder3; /** Get Folder Flags */ FolderFlags: number; /** Get Group By Column */ GroupBy: string; /** Set Icon Size */ IconSize: number; /** Show items menu and return command selected */ PopupItemMenu(pfi: ShellFolderItem, vx?: any, vy?: any): string; /** The collection of Selected Items in folder */ SelectedItems(): FolderItems3; /** Select the item */ SelectItem(pvfi: ShellFolderItem, dwFlags: ShellFolderViewSelectItem): void; /** Select Item relative to the Current Item */ SelectItemRelative(iRelative: number): void; /** Get Sorting Columns */ SortColumns: string; /** Returns the view options for showing a folder. */ readonly ViewOptions: ShellFolderViewOptions; } /** * The **ShellFolderView** object fires two events, **EnumDone** and **SelectionChanged**, that are typically handled by applications. However, some applications * must handle events from a series of **ShellFolderView** objects. For example, an application might host a **WebBrowser** control that allows users to navigate through * a series of folders. Each folder has its own **ShellFolderView** object with its associated events. Handling these events can be difficult. * * The **ShellFolderViewOC** object simplifies event handling for such scenarios. It allows applications to handle events for all ShellFolderView objects with a single * pair of **ShellFolderViewOC** event handlers. Each time the user navigates to a new folder, the application passes the associated **ShellFolderView** object to the * **ShellFolderViewOC** object by calling **SetFolderView**. Then, when an **EnumDone** or **SelectionChanged** event is fired, the **ShellFolderViewOC** object * forwards the event to its own handler for processing. */ class ShellFolderViewOC { private 'Shell32.ShellFolderViewOC_typekey': ShellFolderViewOC; private constructor(); /** Set the ShellFolderView object to monitor events of. */ SetFolderView(pdisp: ShellFolderView): void; } /** Shell Link object */ class ShellLinkObject { private 'Shell32.ShellLinkObject_typekey': ShellLinkObject; private constructor(); /** Arguments for the link */ Arguments: string; /** Description of the link */ Description: string; /** Get the IconLocation for the link */ GetIconLocation(pbs: string): number; /** Hotkey for the link */ Hotkey: number; /** Fully qualified path of the link */ Path: string; /** Tell the link to resolve itself */ Resolve(fFlags: ShellLinkResolveFlags): void; /** * Tell the link to save the changes * @param sFile The fully qualified path of the file where the new link information is to be saved. If no file is specified, the current file is used. */ Save(vWhere?: string): void; /** * Set the IconLocation for the link * @param sPath The fully qualified path of the file that contains the icon. * @param iIcon The index of the icon in the file specified by _sPath_. */ SetIconLocation(sPath: string, iIcon: number): void; /** Get the Show Command for the link */ ShowCommand: LinkShowWindowState; /** Get the target of a link object */ readonly Target: ShellFolderItem | null; /** Get the working directory for the link */ WorkingDirectory: string; } class ShellWindows { private 'Shell32.ShellWindows_typekey': ShellWindows; private constructor(); /** * Retrieves an InternetExplorer object that represents the Shell window. * @param index Default is 0 */ Item(index?: number): SHDocVw.InternetExplorer; /** Contains the number of items in the collection. */ readonly Count: number; } } interface ActiveXObject { on( obj: Shell32.ShellFolderView, event: 'BeginDrag' | 'DefaultVerbInvoked' | 'EnumDone' | 'SelectionChanged' | 'VerbInvoked', handler: ( this: Shell32.ShellFolderView, parameter: {}) => void): void; on( obj: Shell32.ShellFolderViewOC, event: 'BeginDrag' | 'DefaultVerbInvoked' | 'EnumDone' | 'SelectionChanged' | 'VerbInvoked', handler: (this: Shell32.ShellFolderViewOC, parameter: {}) => void): void; } interface ActiveXObjectNameMap { 'Shell.Application': Shell32.Shell; 'Shell.FolderView': Shell32.ShellFolderViewOC; } interface EnumeratorConstructor { new(col: Shell32.FolderItems3): Enumerator<Shell32.ShellFolderItem>; new(col: Shell32.FolderItemVerbs): Enumerator<Shell32.FolderItemVerb>; new(col: Shell32.ShellWindows): Enumerator<SHDocVw.InternetExplorer>; }
the_stack
* @fileoverview * @suppress {globalThis,undefinedVars} */ /** * Extend the Error with additional fields for rewritten stack frames */ interface Error { /** * Stack trace where extra frames have been removed and zone names added. */ zoneAwareStack?: string; /** * Original stack trace with no modifications */ originalStack?: string; } Zone.__load_patch('Error', (global: any, Zone: ZoneType, api: _ZonePrivate) => { /* * This code patches Error so that: * - It ignores un-needed stack frames. * - It Shows the associated Zone for reach frame. */ const enum FrameType { /// Skip this frame when printing out stack blackList, /// This frame marks zone transition transition } const blacklistedStackFramesSymbol = api.symbol('blacklistedStackFrames'); const NativeError = global[api.symbol('Error')] = global['Error']; // Store the frames which should be removed from the stack frames const blackListedStackFrames: {[frame: string]: FrameType} = {}; // We must find the frame where Error was created, otherwise we assume we don't understand stack let zoneAwareFrame1: string; let zoneAwareFrame2: string; let zoneAwareFrame1WithoutNew: string; let zoneAwareFrame2WithoutNew: string; let zoneAwareFrame3WithoutNew: string; global['Error'] = ZoneAwareError; const stackRewrite = 'stackRewrite'; type BlackListedStackFramesPolicy = 'default'|'disable'|'lazy'; const blackListedStackFramesPolicy: BlackListedStackFramesPolicy = global['__Zone_Error_BlacklistedStackFrames_policy'] || 'default'; interface ZoneFrameName { zoneName: string; parent?: ZoneFrameName; } function buildZoneFrameNames(zoneFrame: _ZoneFrame) { let zoneFrameName: ZoneFrameName = {zoneName: zoneFrame.zone.name}; let result = zoneFrameName; while (zoneFrame.parent) { zoneFrame = zoneFrame.parent; const parentZoneFrameName = {zoneName: zoneFrame.zone.name}; zoneFrameName.parent = parentZoneFrameName; zoneFrameName = parentZoneFrameName; } return result; } function buildZoneAwareStackFrames( originalStack: string, zoneFrame: _ZoneFrame|ZoneFrameName|null, isZoneFrame = true) { let frames: string[] = originalStack.split('\n'); let i = 0; // Find the first frame while (!(frames[i] === zoneAwareFrame1 || frames[i] === zoneAwareFrame2 || frames[i] === zoneAwareFrame1WithoutNew || frames[i] === zoneAwareFrame2WithoutNew || frames[i] === zoneAwareFrame3WithoutNew) && i < frames.length) { i++; } for (; i < frames.length && zoneFrame; i++) { let frame = frames[i]; if (frame.trim()) { switch (blackListedStackFrames[frame]) { case FrameType.blackList: frames.splice(i, 1); i--; break; case FrameType.transition: if (zoneFrame.parent) { // This is the special frame where zone changed. Print and process it accordingly zoneFrame = zoneFrame.parent; } else { zoneFrame = null; } frames.splice(i, 1); i--; break; default: frames[i] += isZoneFrame ? ` [${(zoneFrame as _ZoneFrame).zone.name}]` : ` [${(zoneFrame as ZoneFrameName).zoneName}]`; } } } return frames.join('\n'); } /** * This is ZoneAwareError which processes the stack frame and cleans up extra frames as well as * adds zone information to it. */ function ZoneAwareError(): Error { // We always have to return native error otherwise the browser console will not work. let error: Error = NativeError.apply(this, arguments); // Save original stack trace const originalStack = (error as any)['originalStack'] = error.stack; // Process the stack trace and rewrite the frames. if ((ZoneAwareError as any)[stackRewrite] && originalStack) { let zoneFrame = api.currentZoneFrame(); if (blackListedStackFramesPolicy === 'lazy') { // don't handle stack trace now (error as any)[api.symbol('zoneFrameNames')] = buildZoneFrameNames(zoneFrame); } else if (blackListedStackFramesPolicy === 'default') { try { error.stack = error.zoneAwareStack = buildZoneAwareStackFrames(originalStack, zoneFrame); } catch (e) { // ignore as some browsers don't allow overriding of stack } } } if (this instanceof NativeError && this.constructor != NativeError) { // We got called with a `new` operator AND we are subclass of ZoneAwareError // in that case we have to copy all of our properties to `this`. Object.keys(error).concat('stack', 'message').forEach((key) => { const value = (error as any)[key]; if (value !== undefined) { try { this[key] = value; } catch (e) { // ignore the assignment in case it is a setter and it throws. } } }); return this; } return error; } // Copy the prototype so that instanceof operator works as expected ZoneAwareError.prototype = NativeError.prototype; (ZoneAwareError as any)[blacklistedStackFramesSymbol] = blackListedStackFrames; (ZoneAwareError as any)[stackRewrite] = false; const zoneAwareStackSymbol = api.symbol('zoneAwareStack'); // try to define zoneAwareStack property when blackListed // policy is delay if (blackListedStackFramesPolicy === 'lazy') { Object.defineProperty(ZoneAwareError.prototype, 'zoneAwareStack', { configurable: true, enumerable: true, get: function() { if (!this[zoneAwareStackSymbol]) { this[zoneAwareStackSymbol] = buildZoneAwareStackFrames( this.originalStack, this[api.symbol('zoneFrameNames')], false); } return this[zoneAwareStackSymbol]; }, set: function(newStack: string) { this.originalStack = newStack; this[zoneAwareStackSymbol] = buildZoneAwareStackFrames( this.originalStack, this[api.symbol('zoneFrameNames')], false); } }); } // those properties need special handling const specialPropertyNames = ['stackTraceLimit', 'captureStackTrace', 'prepareStackTrace']; // those properties of NativeError should be set to ZoneAwareError const nativeErrorProperties = Object.keys(NativeError); if (nativeErrorProperties) { nativeErrorProperties.forEach(prop => { if (specialPropertyNames.filter(sp => sp === prop).length === 0) { Object.defineProperty(ZoneAwareError, prop, { get: function() { return NativeError[prop]; }, set: function(value) { NativeError[prop] = value; } }); } }); } if (NativeError.hasOwnProperty('stackTraceLimit')) { // Extend default stack limit as we will be removing few frames. NativeError.stackTraceLimit = Math.max(NativeError.stackTraceLimit, 15); // make sure that ZoneAwareError has the same property which forwards to NativeError. Object.defineProperty(ZoneAwareError, 'stackTraceLimit', { get: function() { return NativeError.stackTraceLimit; }, set: function(value) { return NativeError.stackTraceLimit = value; } }); } if (NativeError.hasOwnProperty('captureStackTrace')) { Object.defineProperty(ZoneAwareError, 'captureStackTrace', { // add named function here because we need to remove this // stack frame when prepareStackTrace below value: function zoneCaptureStackTrace(targetObject: Object, constructorOpt?: Function) { NativeError.captureStackTrace(targetObject, constructorOpt); } }); } const ZONE_CAPTURESTACKTRACE = 'zoneCaptureStackTrace'; Object.defineProperty(ZoneAwareError, 'prepareStackTrace', { get: function() { return NativeError.prepareStackTrace; }, set: function(value) { if (!value || typeof value !== 'function') { return NativeError.prepareStackTrace = value; } return NativeError.prepareStackTrace = function( error: Error, structuredStackTrace: {getFunctionName: Function}[]) { // remove additional stack information from ZoneAwareError.captureStackTrace if (structuredStackTrace) { for (let i = 0; i < structuredStackTrace.length; i++) { const st = structuredStackTrace[i]; // remove the first function which name is zoneCaptureStackTrace if (st.getFunctionName() === ZONE_CAPTURESTACKTRACE) { structuredStackTrace.splice(i, 1); break; } } } return value.call(this, error, structuredStackTrace); }; } }); if (blackListedStackFramesPolicy === 'disable') { // don't need to run detectZone to populate // blacklisted stack frames return; } // Now we need to populate the `blacklistedStackFrames` as well as find the // run/runGuarded/runTask frames. This is done by creating a detect zone and then threading // the execution through all of the above methods so that we can look at the stack trace and // find the frames of interest. let detectZone: Zone = Zone.current.fork({ name: 'detect', onHandleError: function( parentZD: ZoneDelegate, current: Zone, target: Zone, error: any): boolean { if (error.originalStack && Error === ZoneAwareError) { let frames = error.originalStack.split(/\n/); let runFrame = false, runGuardedFrame = false, runTaskFrame = false; while (frames.length) { let frame = frames.shift(); // On safari it is possible to have stack frame with no line number. // This check makes sure that we don't filter frames on name only (must have // line number or exact equals to `ZoneAwareError`) if (/:\d+:\d+/.test(frame) || frame === 'ZoneAwareError') { // Get rid of the path so that we don't accidentally find function name in path. // In chrome the separator is `(` and `@` in FF and safari // Chrome: at Zone.run (zone.js:100) // Chrome: at Zone.run (http://localhost:9876/base/build/lib/zone.js:100:24) // FireFox: Zone.prototype.run@http://localhost:9876/base/build/lib/zone.js:101:24 // Safari: run@http://localhost:9876/base/build/lib/zone.js:101:24 let fnName: string = frame.split('(')[0].split('@')[0]; let frameType = FrameType.transition; if (fnName.indexOf('ZoneAwareError') !== -1) { if (fnName.indexOf('new ZoneAwareError') !== -1) { zoneAwareFrame1 = frame; zoneAwareFrame2 = frame.replace('new ZoneAwareError', 'new Error.ZoneAwareError'); } else { zoneAwareFrame1WithoutNew = frame; zoneAwareFrame2WithoutNew = frame.replace('Error.', ''); if (frame.indexOf('Error.ZoneAwareError') === -1) { zoneAwareFrame3WithoutNew = frame.replace('ZoneAwareError', 'Error.ZoneAwareError'); } } blackListedStackFrames[zoneAwareFrame2] = FrameType.blackList; } if (fnName.indexOf('runGuarded') !== -1) { runGuardedFrame = true; } else if (fnName.indexOf('runTask') !== -1) { runTaskFrame = true; } else if (fnName.indexOf('run') !== -1) { runFrame = true; } else { frameType = FrameType.blackList; } blackListedStackFrames[frame] = frameType; // Once we find all of the frames we can stop looking. if (runFrame && runGuardedFrame && runTaskFrame) { (ZoneAwareError as any)[stackRewrite] = true; break; } } } } return false; } }) as Zone; // carefully constructor a stack frame which contains all of the frames of interest which // need to be detected and blacklisted. const childDetectZone = detectZone.fork({ name: 'child', onScheduleTask: function(delegate, curr, target, task) { return delegate.scheduleTask(target, task); }, onInvokeTask: function(delegate, curr, target, task, applyThis, applyArgs) { return delegate.invokeTask(target, task, applyThis, applyArgs); }, onCancelTask: function(delegate, curr, target, task) { return delegate.cancelTask(target, task); }, onInvoke: function(delegate, curr, target, callback, applyThis, applyArgs, source) { return delegate.invoke(target, callback, applyThis, applyArgs, source); } }); // we need to detect all zone related frames, it will // exceed default stackTraceLimit, so we set it to // larger number here, and restore it after detect finish. const originalStackTraceLimit = Error.stackTraceLimit; Error.stackTraceLimit = 100; // we schedule event/micro/macro task, and invoke them // when onSchedule, so we can get all stack traces for // all kinds of tasks with one error thrown. childDetectZone.run(() => { childDetectZone.runGuarded(() => { const fakeTransitionTo = () => {}; childDetectZone.scheduleEventTask( blacklistedStackFramesSymbol, () => { childDetectZone.scheduleMacroTask( blacklistedStackFramesSymbol, () => { childDetectZone.scheduleMicroTask( blacklistedStackFramesSymbol, () => { throw new Error(); }, undefined, (t: Task) => { (t as any)._transitionTo = fakeTransitionTo; t.invoke(); }); childDetectZone.scheduleMicroTask( blacklistedStackFramesSymbol, () => { throw Error(); }, undefined, (t: Task) => { (t as any)._transitionTo = fakeTransitionTo; t.invoke(); }); }, undefined, (t) => { (t as any)._transitionTo = fakeTransitionTo; t.invoke(); }, () => {}); }, undefined, (t) => { (t as any)._transitionTo = fakeTransitionTo; t.invoke(); }, () => {}); }); }); Error.stackTraceLimit = originalStackTraceLimit; });
the_stack
import * as fs from "fs"; import * as path from "path"; import * as vscode from "vscode"; import { CancellationToken, Command, CompletionItem, CompletionItemKind, CompletionItemProvider, Disposable, ExtensionContext, FileSystemWatcher, Position, TextDocument, TextEdit, Uri, WorkspaceConfiguration, } from "vscode"; import resolvePackage from "./resolve"; export default class IntellisenseProvider implements CompletionItemProvider { /** * Builtin Node.js modules */ public static readonly builtinModules: string[] = getBuiltinModules(); public static readonly configPath: string = "node-module-intellisense"; public static readonly defaultAutoStripExtensions: string[] = [ ".js", ".jsx", ".ts", ".d.ts", ".tsx" ]; public static readonly languageSelector: string[] = [ "javascript", "javascriptreact", "typescript", "typescriptreact", "html", "coffeescript" ]; public static readonly triggerCharacters: string[] = [ "'", "\"", "/" ]; private context: ExtensionContext; private dependencies: string[] = []; private packageJsonFile: string = this.resolveWorkspacePath("package.json"); private packageJsonWatcher: FileSystemWatcher; private config: WorkspaceConfiguration; private enableDevDependencies: boolean = true; private enableFileModules: boolean = true; private modulePaths: string[] = []; private enableBuiltinModules: boolean = true; private autoStripExtensions: string[] = IntellisenseProvider.defaultAutoStripExtensions; private readonly disposables: Disposable[] = []; public activate(context: ExtensionContext) { this.context = context; context.subscriptions.push(this); // load configuration const loadConfig = () => { this.config = vscode.workspace.getConfiguration(IntellisenseProvider.configPath); this.enableBuiltinModules = this.config.get("scanBuiltinModules", true); this.enableDevDependencies = this.config.get("scanDevDependencies", true); this.enableFileModules = this.config.get("scanFileModules", true); this.modulePaths = this.config.get("modulePaths", []); this.autoStripExtensions = this.config.get("autoStripExtensions", IntellisenseProvider.defaultAutoStripExtensions); this.autoStripExtensions.sort((a, b) => b.length - a.length); // this.debug(this.autoStripExtensions); }; vscode.workspace.onDidChangeConfiguration((e) => { loadConfig(); // this.debug("reload config", this.config); }); loadConfig(); // this.debug("load config", this.config); // create completion provider vscode.languages.registerCompletionItemProvider(IntellisenseProvider.languageSelector, this, ...IntellisenseProvider.triggerCharacters); // this.debug("activate"); // this.debug("builtinModules", IntellisenseProvider.builtinModules); // load dependencies from package.json file this.updateDependenciesFromPackageJson(); // watching package.json and auto update dependencies info this.packageJsonWatcher = vscode.workspace.createFileSystemWatcher("**/package.json"); this.disposables.push(this.packageJsonWatcher); const onPackageJsonFileChange = (e: Uri) => { // this.debug("workspace file change:", e); if (e.fsPath === this.packageJsonFile) { this.updateDependenciesFromPackageJson(); } }; this.packageJsonWatcher.onDidChange(onPackageJsonFileChange); this.packageJsonWatcher.onDidCreate(onPackageJsonFileChange); this.packageJsonWatcher.onDidDelete(onPackageJsonFileChange); } public dispose() { // this.debug("dispose"); this.disposables.forEach((item) => { try { item.dispose(); } catch (err) { // this.debug("dispose", err); } }); } /** * Provide completion items for the given position and document. * * @param document The document in which the command was invoked. * @param position The position at which the command was invoked. * @param token A cancellation token. * @return An array of completions, a [completion list](#CompletionList), or a thenable that resolves to either. * The lack of a result can be signaled by returning `undefined`, `null`, or an empty array. */ public async provideCompletionItems(document: TextDocument, position: Position, token: CancellationToken): Promise<CompletionItem[]> { const info = parseLine(document, position); if (!info) { return []; } // this.debug("provideCompletionItems: parseLine", position, info); let list: CompletionItem[] = []; const isShowPackageSubPath = info.isPackagePath && info.search.indexOf("/") > 0; const isShowPackage = info.isPackagePath || info.search === ""; const isShowFile = info.isAbsoultePath || info.isRelativePath || info.search === ""; const isIncludeExtname = info.type === "reference"; if (isShowPackageSubPath) { // package sub path let pkgDir; try { pkgDir = await resolvePackageDirectory(info.packageName, document.uri.fsPath); const currentDir = path.resolve(pkgDir, info.packageSubPath); const files = await this.readCurrentDirectory(currentDir, info.search, false); // fix insertText files.forEach((item) => { item.insertText = item.label.slice(info.search.length); }); list = list.concat(files); } catch (err) { this.debug("resolvePackageDirectory", err); } } else { // builtin modules if (isShowPackage && this.enableBuiltinModules) { list = IntellisenseProvider.builtinModules.map((name) => { return createCompletionItem(name, CompletionItemKind.Module, { detail: "builtin module" }); }); } // packages npm dependencies if (isShowPackage) { list = list.concat(this.dependencies.map((name) => { return createCompletionItem(name, CompletionItemKind.Module, { detail: "npm dependencies" }); })); } } // packages from relative path if (isShowFile && this.enableFileModules) { const currentDir = path.resolve(path.dirname(document.uri.fsPath), info.search); const files = await this.readCurrentDirectory(currentDir, info.search || "./", isIncludeExtname); // fix insertText files.forEach((item) => { item.insertText = item.label.slice(info.search.length); }); list = list.concat(files); } // packages from relative path if (this.modulePaths.length > 0) { for (const modulePath of this.modulePaths) { const currentDir = this.resolveWorkspacePath(modulePath.replace("${workspaceRoot}", ""), info.search || ""); const files = await this.readCurrentDirectory(currentDir, info.search || "", isIncludeExtname); // fix insertText files.forEach((item) => { item.insertText = item.label.slice(info.search.length); }); list = list.concat(files); } } // this.debug("provideCompletionItems", list); return list; } /** * Given a completion item fill in more data, like [doc-comment](#CompletionItem.documentation) * or [details](#CompletionItem.detail). * * The editor will only resolve a completion item once. * * @param item A completion item currently active in the UI. * @param token A cancellation token. * @return The resolved completion item or a thenable that resolves to of such. It is OK to return the given * `item`. When no result is returned, the given `item` will be used. */ public resolveCompletionItem?(item: CompletionItem, token: CancellationToken): CompletionItem | Thenable<CompletionItem> { // this.debug("resolveCompletionItem", item); return item; } private debug(...data: any[]): void { // tslint:disable-next-line:no-console console.log("IntellisenseProvider debug:", ...data); } private showWarning(msg: string): void { vscode.window.showWarningMessage(`node-module-intellisense: ${ msg }`); } private resolveWorkspacePath(...paths: string[]): string { if (vscode.workspace.rootPath) { return path.resolve(vscode.workspace.rootPath, ...paths); } return path.resolve(...paths); } private async updateDependenciesFromPackageJson(): Promise<void> { // check if file exists const exists = await isFileExists(this.packageJsonFile); if (!exists) { // this.debug("package.json file not exists"); return; } // get file content let data: string; try { data = (await readFileContent(this.packageJsonFile)).toString(); } catch (err) { return this.showWarning(err.message); } // parse JSON file let json; try { json = JSON.parse(data.toString()); } catch (err) { return this.showWarning(`parsing package.json file error: ${ err.message }`); } // get dependencies const list = new Set<string>(); if (json.dependencies) { Object.keys(json.dependencies).forEach((name) => list.add(name)); } if (this.enableDevDependencies && json.devDependencies) { Object.keys(json.devDependencies).forEach((name) => list.add(name)); } this.dependencies = Array.from(list.values()); // this.debug("load dependencies from package.json:", this.dependencies); } private async readCurrentDirectory(dir: string, prefix: string, isIncludeExtname: boolean): Promise<CompletionItem[]> { const names = await readdir(dir); const list: CompletionItem[] = []; const fileMap = new Map<string, boolean>(); const relativePathInfo = (p) => { if (vscode.workspace.rootPath) { return `relative to workspace: ${ path.relative(vscode.workspace.rootPath, p) }`; } return `absolute path: ${ p }`; }; list.push(createCompletionItem("..", CompletionItemKind.Module, { detail: "directory", documentation: relativePathInfo(path.dirname(dir)), })); for (const name of names) { const realPath = path.join(dir, name); const stats = await readFileStats(realPath); if (stats.isDirectory()) { // directory list.push(createCompletionItem(`${ prefix }${ name }`, CompletionItemKind.Module, { detail: "directory", documentation: relativePathInfo(realPath), })); } else if (stats.isFile()) { // file const [ strip, ext ] = parseFileExtensionName(name, this.autoStripExtensions); this.debug("FILE", name, strip, ext); let n = name; if (!isIncludeExtname && strip) { n = name.slice(0, name.length - ext.length); } if (!fileMap.has(n)) { fileMap.set(n, true); list.push(createCompletionItem(`${ prefix }${ n }`, CompletionItemKind.File, { detail: "file module", documentation: relativePathInfo(realPath), })); } } } return list; } } /** * returns builtin modules */ function getBuiltinModules(): string[] { return Object.keys((process as any).binding("natives")).filter((n) => { if (n.indexOf("_") !== -1) { return false; } if (n.indexOf("/") !== -1) { return false; } if (n.indexOf("-") !== -1) { return false; } return true; }); } interface ExtraCompletionInfo { label?: string; kind?: CompletionItemKind; detail?: string; documentation?: string; sortText?: string; filterText?: string; insertText?: string; command?: Command; textEdit?: TextEdit; additionalTextEdits?: TextEdit; } /** * create CompletionItem */ function createCompletionItem(name: string, kind: CompletionItemKind, info: ExtraCompletionInfo): CompletionItem { const item = new CompletionItem(name, kind); Object.assign(item, info); return item; } /** * returns true if file is exists */ function isFileExists(filename: string): Promise<boolean> { return new Promise((resolve, reject) => { fs.exists(filename, resolve); }); } /** * returns file content */ function readFileContent(filename: string): Promise<Buffer> { return new Promise((resolve, reject) => { fs.readFile(filename, (err, data) => { if (err) { return reject(err); } resolve(data); }); }); } /** * returns file stats */ function readFileStats(filename: string): Promise<fs.Stats> { return new Promise((resolve, reject) => { fs.stat(filename, (err, stats) => { if (err) { return reject(err); } resolve(stats); }); }); } /** * returns directory files */ function readdir(dir: string): Promise<string[]> { return new Promise((resolve, reject) => { fs.readdir(dir, (err, list) => { if (err) { return reject(err); } resolve(list); }); }); } interface IntellisenseLineInfo { line?: string; quotation?: string; quotationStart?: number; search?: string; isAbsoultePath?: boolean; isRelativePath?: boolean; isPackagePath?: boolean; packageName?: string; packageSubPath?: string; position?: Position; type?: StatementType; } type StatementType = "require" | "import" | "export" | "reference" | false; /** * Parse current line */ function parseLine(document: TextDocument, position: Position): IntellisenseLineInfo { const info: IntellisenseLineInfo = { position, }; const line = document.getText(document.lineAt(position).range); info.type = getStatementType(line); if (!info.type) { return; } const [ i, quotation ] = getForwardQuotation(line, position.character); info.quotation = quotation; info.quotationStart = i; info.search = line.slice(i + 1, position.character); if (info.search[0] === ".") { info.isRelativePath = true; } else if (info.search[0] === "/") { info.isAbsoultePath = true; } else { info.isPackagePath = true; let j = info.search.indexOf(path.sep); if (j !== -1 && info.search[0] === "@") { j = info.search.indexOf(path.sep, j + 1); } if (j === -1) { info.packageName = info.search; info.packageSubPath = ""; } else { info.packageName = info.search.slice(0, j); info.packageSubPath = info.search.slice(j + 1); } } return info; } /** * Returns statement type */ function getStatementType(line: string): StatementType { line = line.trim(); if (line.indexOf("import ") === 0) { return "import"; } if (line.indexOf("require(") !== -1) { return "require"; } if (line.indexOf("export ") === 0 && line.indexOf(" from ") !== -1) { return "export"; } if (line.trim().indexOf("/// <reference ") === 0) { return "reference"; } return false; } /** * Returns forward quotation position and character */ function getForwardQuotation(line: string, index: number): [ number, string ] { const i = line.lastIndexOf("\"", index - 1); const j = line.lastIndexOf("'", index - 1); if (i > j) { return [ i, "\"" ]; } return [ j, "'" ]; } /** * Parse File extension name */ function parseFileExtensionName(filename: string, autoStripExtensions: string[]): [ boolean, string ] { const len = filename.length; for (const ext of autoStripExtensions) { if (filename.slice(len - ext.length) === ext) { return [ true, ext ]; } } return [ false, "" ]; } /** * Returns require package directory from current path */ function resolvePackageDirectory(pkgName: string, filename: string): Promise<string> { return resolvePackage(pkgName, path.dirname(filename)); }
the_stack
import ava, {TestInterface} from 'ava'; import {DialogflowConversation} from '../conv'; import * as Api from '../api/v2'; import * as ActionsApi from '../../actionssdk/api/v2'; import {ContextValues} from '../context'; import {Incoming} from '../incoming'; import {clone} from '../../../common'; import {Permission, SimpleResponse, Image, List} from '../../actionssdk'; interface AvaContext { conv: DialogflowConversation; body: Api.GoogleCloudDialogflowV2WebhookRequest; request: ActionsApi.GoogleActionsV2AppRequest; } const test = ava as TestInterface<AvaContext>; test.beforeEach(t => { t.context.request = { isInSandbox: true, }; t.context.body = { originalDetectIntentRequest: { payload: t.context.request, }, }; t.context.conv = new DialogflowConversation({ body: t.context.body, headers: {}, }); }); test('conv can be instantiated', t => { t.true(t.context.conv instanceof DialogflowConversation); }); test('conv.request is set correctly', t => { t.is(t.context.conv.request, t.context.request); }); test('conv.body is set correctly', t => { t.is(t.context.conv.body, t.context.body); }); test('conv.action is parsed correctly', t => { const action = 'abc123'; const conv = new DialogflowConversation({ body: { queryResult: { action, }, originalDetectIntentRequest: { payload: {}, }, } as Api.GoogleCloudDialogflowV2WebhookRequest, headers: {}, }); t.is(conv.action, action); }); test('conv.intent is parsed correctly', t => { const intent = 'abc123'; const conv = new DialogflowConversation({ body: { queryResult: { intent: { displayName: intent, }, }, originalDetectIntentRequest: { payload: {}, }, } as Api.GoogleCloudDialogflowV2WebhookRequest, headers: {}, }); t.is(conv.intent, intent); }); test('conv.parameters is parsed correctly', t => { const parameters = { a: '1', b: '2', }; const conv = new DialogflowConversation({ body: { queryResult: { parameters, }, originalDetectIntentRequest: { payload: {}, }, } as Api.GoogleCloudDialogflowV2WebhookRequest, headers: {}, }); t.is(conv.parameters, parameters); }); test('conv.contexts is an instance of ContextValues', t => { t.true(t.context.conv.contexts instanceof ContextValues); }); test('conv.incoming is an instance of Incoming', t => { t.true(t.context.conv.incoming instanceof Incoming); }); test('conv.query is parsed correctly', t => { const query = 'abc123'; const conv = new DialogflowConversation({ body: { queryResult: { queryText: query, }, originalDetectIntentRequest: { payload: {}, }, } as Api.GoogleCloudDialogflowV2WebhookRequest, headers: {}, }); t.is(conv.query, query); }); test('conv.version is detected correctly to be 2', t => { t.is(t.context.conv.version, 2); }); test('conv.followup sets the raw json correctly with no parameters', t => { const lang = 'ab-CD'; const event = 'abc_123'; const conv = new DialogflowConversation({ body: { queryResult: { languageCode: lang, }, originalDetectIntentRequest: { payload: {}, }, } as Api.GoogleCloudDialogflowV2WebhookRequest, headers: {}, }); conv.followup(event); t.deepEqual(clone(conv.serialize()), { followupEventInput: { name: event, languageCode: lang, }, }); }); test('conv.followup sets the raw json correctly with parameters', t => { const lang = 'ab-CD'; const event = 'abc_123'; const parameters = { a: '1', b: '2', }; const conv = new DialogflowConversation({ body: { queryResult: { languageCode: lang, }, originalDetectIntentRequest: { payload: {}, }, } as Api.GoogleCloudDialogflowV2WebhookRequest, headers: {}, }); conv.followup(event, parameters); t.deepEqual(clone(conv.serialize()), { followupEventInput: { name: event, languageCode: lang, parameters, }, }); }); test('conv.followup sets the raw json correctly with parameters and lang', t => { const lang = 'ef-GH'; const event = 'abc_123'; const parameters = { a: '1', b: '2', }; const conv = new DialogflowConversation({ body: { queryResult: { languageCode: 'ab-CD', }, originalDetectIntentRequest: { payload: {}, }, } as Api.GoogleCloudDialogflowV2WebhookRequest, headers: {}, }); conv.followup(event, parameters, lang); t.deepEqual(clone(conv.serialize()), { followupEventInput: { name: event, languageCode: lang, parameters, }, }); }); test('conv.serialize returns the raw json when set with conv.json', t => { const json = { a: '1', b: '2', c: { d: '3', e: '4', }, }; t.context.conv.json(json); t.deepEqual(t.context.conv.serialize() as typeof json, json); }); test('conv.serialize returns the correct response with simple response string', t => { const response = 'abc123'; const conv = new DialogflowConversation({ body: {}, headers: {}, }); conv.add(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: 'abc123', }, }, ], }, }, }, }); }); test('conv.serialize returns the correct response with permission response', t => { const conv = new DialogflowConversation({ body: {}, headers: {}, }); conv.ask( new Permission({ permissions: 'NAME', context: 'To read your mind', }) ); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, systemIntent: { intent: 'actions.intent.PERMISSION', data: { '@type': 'type.googleapis.com/google.actions.v2.PermissionValueSpec', optContext: 'To read your mind', permissions: ['NAME'], }, }, }, }, }); }); test('conv.serialize returns the correct response with simple response string and reprompts', t => { const response = 'abc123'; const reprompt1 = 'reprompt123'; const reprompt2 = 'reprompt456'; const conv = new DialogflowConversation({ body: {} as Api.GoogleCloudDialogflowV2WebhookRequest, headers: {}, }); conv.add(response); conv.noInputs = [reprompt1, new SimpleResponse(reprompt2)]; t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: 'abc123', }, }, ], }, noInputPrompts: [ { textToSpeech: reprompt1, }, { textToSpeech: reprompt2, }, ], }, }, }); }); const simulatorConv = () => new DialogflowConversation({ body: { responseId: 'responseIdRandom123', queryResult: { queryText: 'test', action: 'input.unknown', parameters: {}, allRequiredParamsPresent: true, fulfillmentText: 'Sorry, what was that?', fulfillmentMessages: [ { text: { text: ['One more time?'], }, }, ], intent: { name: 'projects/projectRandom/agent/intents/randomId', displayName: 'Default Fallback Intent', isFallback: true, }, intentDetectionConfidence: 1, languageCode: 'en', }, originalDetectIntentRequest: { payload: {}, }, session: 'sessionRandom', } as Api.GoogleCloudDialogflowV2WebhookRequest, headers: {}, }); test('conv.serialize w/ simple response has fulfillmentText when from simulator', t => { const response = 'abc123'; const conv = simulatorConv(); conv.add(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, fulfillmentText: response, }); }); test('conv.serialize w/ simple response text has fulfillmentText when from simulator', t => { const speech = 'abc123'; const text = 'abcd1234'; const conv = simulatorConv(); conv.add( new SimpleResponse({ speech, text, }) ); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: speech, displayText: text, }, }, ], }, }, }, fulfillmentText: text, }); }); test('conv.serialize w/ two simple responses has fulfillmentText warning for simulator', t => { const response = 'abc123'; const response2 = 'abcd1234'; const conv = simulatorConv(); conv.add(response); conv.add(response2); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, { simpleResponse: { textToSpeech: response2, }, }, ], }, }, }, fulfillmentText: 'Cannot display response in Dialogflow simulator. ' + 'Please test on the Google Assistant simulator instead.', }); }); test('conv.serialize w/ solo helper has fulfillmentText warning for simulator', t => { const permission: 'NAME' = 'NAME'; const context = 'To read your mind'; const conv = simulatorConv(); conv.ask( new Permission({ permissions: permission, context, }) ); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, systemIntent: { data: { '@type': 'type.googleapis.com/google.actions.v2.PermissionValueSpec', optContext: context, permissions: [permission], }, intent: 'actions.intent.PERMISSION', }, }, }, fulfillmentText: 'Cannot display response in Dialogflow simulator. ' + 'Please test on the Google Assistant simulator instead.', }); }); test('conv.serialize w/ non solo helper has fulfillmentText warning for simulator', t => { const response = 'abc123'; const conv = simulatorConv(); conv.ask(response); conv.ask( new List({ items: { one: { title: 'one1', synonyms: ['one11', 'one12'], }, two: { title: 'two1', synonyms: ['two11', 'two12'], }, }, }) ); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, systemIntent: { data: { '@type': 'type.googleapis.com/google.actions.v2.OptionValueSpec', listSelect: { items: [ { optionInfo: { key: 'one', synonyms: ['one11', 'one12'], }, title: 'one1', }, { optionInfo: { key: 'two', synonyms: ['two11', 'two12'], }, title: 'two1', }, ], }, }, intent: 'actions.intent.OPTION', }, }, }, fulfillmentText: 'Cannot display response in Dialogflow simulator. ' + 'Please test on the Google Assistant simulator instead.', }); }); test('conv.serialize w/ image has fulfillmentText warning for simulator', t => { const response = 'abc123'; const image = 'abcd1234'; const alt = 'abcde12345'; const conv = simulatorConv(); conv.add(response); conv.add( new Image({ url: image, alt, }) ); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, { basicCard: { image: { accessibilityText: alt, url: image, }, }, }, ], }, }, }, fulfillmentText: 'Cannot display response in Dialogflow simulator. ' + 'Please test on the Google Assistant simulator instead.', }); }); test('conv.serialize defaults to v2 for empty request', t => { const response = 'abc123'; const conv = new DialogflowConversation({ body: {}, headers: {}, }); conv.add(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, }); }); test('conv.data is parsed correctly', t => { const session = 'sessionId123'; const data = { a: '1', b: '2', c: { d: '3', e: '4', }, }; const conv = new DialogflowConversation({ body: { queryResult: { outputContexts: [ { name: `${session}/contexts/_actions_on_google`, parameters: { data: JSON.stringify(data), }, }, ], }, } as Api.GoogleCloudDialogflowV2WebhookRequest, }); t.deepEqual(conv.data, data); }); test('conv generates no contexts from empty conv.data', t => { const response = "What's up?"; const conv = new DialogflowConversation(); t.deepEqual(conv.data, {}); conv.ask(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, }); }); test('conv generates first conv.data replaced correctly', t => { const session = 'sessionId123'; const response = "What's up?"; const data = { a: '1', b: '2', c: { d: '3', e: '4', }, }; const conv = new DialogflowConversation({ body: { session, } as Api.GoogleCloudDialogflowV2WebhookRequest, }); t.deepEqual(conv.data, {}); conv.ask(response); conv.data = data; t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, outputContexts: [ { name: `${session}/contexts/_actions_on_google`, lifespanCount: 99, parameters: { data: JSON.stringify(data), }, }, ], }); }); test('conv generates first conv.data mutated correctly', t => { const session = 'sessionId123'; const response = "What's up?"; const a = '7'; const conv = new DialogflowConversation<{a?: string}>({ body: { session, } as Api.GoogleCloudDialogflowV2WebhookRequest, }); t.deepEqual(conv.data, {}); conv.ask(response); conv.data.a = a; t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, outputContexts: [ { name: `${session}/contexts/_actions_on_google`, lifespanCount: 99, parameters: { data: JSON.stringify({a}), }, }, ], }); }); test('conv generates different conv.data correctly', t => { const session = 'sessionId123'; const response = "What's up?"; const data = { a: '1', b: '2', c: { d: '3', e: '4', }, }; const e = '6'; const conv = new DialogflowConversation<typeof data>({ body: { session, queryResult: { outputContexts: [ { name: `${session}/contexts/_actions_on_google`, parameters: { data: JSON.stringify(data), }, }, ], }, } as Api.GoogleCloudDialogflowV2WebhookRequest, }); t.deepEqual(conv.data, data); conv.ask(response); conv.data.c.e = e; t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, outputContexts: [ { name: `${session}/contexts/_actions_on_google`, lifespanCount: 99, parameters: { data: JSON.stringify({ a: '1', b: '2', c: { d: '3', e, }, }), }, }, ], }); }); test('conv generates different conv.data correctly when only with init data', t => { const session = 'sessionId123'; const response = "What's up?"; const data = { a: '1', b: '2', c: { d: '3', e: '4', }, }; const a = '7'; const conv = new DialogflowConversation<typeof data>({ body: { session, } as Api.GoogleCloudDialogflowV2WebhookRequest, init: { data, }, }); t.deepEqual(conv.data, data); conv.ask(response); conv.data.a = a; t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, outputContexts: [ { name: `${session}/contexts/_actions_on_google`, lifespanCount: 99, parameters: { data: JSON.stringify({ a, b: '2', c: { d: '3', e: '4', }, }), }, }, ], }); }); test('conv generates same conv.data as no output contexts', t => { const session = 'sessionId123'; const response = "What's up?"; const data = { a: '1', b: '2', c: { d: '3', e: '4', }, }; const conv = new DialogflowConversation<typeof data>({ body: { session, queryResult: { outputContexts: [ { name: `${session}/contexts/_actions_on_google`, parameters: { data: JSON.stringify(data), }, }, ], }, } as Api.GoogleCloudDialogflowV2WebhookRequest, }); t.deepEqual(conv.data, data); conv.ask(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, }); }); test('conv sends userStorage when it is not empty', t => { const response = "What's up?"; const data = { a: '1', b: '2', c: { d: '3', e: '4', }, }; const conv = new DialogflowConversation(); t.deepEqual(conv.data, {}); conv.user.storage = data; conv.ask(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, userStorage: JSON.stringify({data}), }, }, }); }); test('conv does not send userStorage when it is empty', t => { const response = "What's up?"; const conv = new DialogflowConversation(); t.deepEqual(conv.user.storage, {}); conv.ask(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, }); }); test('conv does not detect coming from simulator given no responseId', t => { const response = "What's up?"; const conv = new DialogflowConversation({ body: { originalDetectIntentRequest: { payload: {}, }, }, }); conv.ask(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, }); }); test('conv sends speechBiasingHints when set', t => { const response = 'What is your favorite color out of red, blue, and green?'; const biasing = ['red', 'blue', 'green']; const conv = new DialogflowConversation(); conv.speechBiasing = biasing; conv.ask(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { simpleResponse: { textToSpeech: response, }, }, ], }, speechBiasingHints: biasing, }, }, }); }); test('conv does not error out when simple response is after image', t => { const response = 'How are you?'; const conv = new DialogflowConversation(); conv.ask(new Image({url: '', alt: ''})); conv.ask(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { basicCard: { image: { url: '', accessibilityText: '', }, }, }, { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, }); }); test('conv w/ simple response after image has fulfillmentText warning for simulator', t => { const response = 'abc123'; const image = 'abcd1234'; const alt = 'abcde12345'; const conv = simulatorConv(); conv.add( new Image({ url: image, alt, }) ); conv.add(response); t.deepEqual(clone(conv.serialize()), { payload: { google: { expectUserResponse: true, richResponse: { items: [ { basicCard: { image: { accessibilityText: alt, url: image, }, }, }, { simpleResponse: { textToSpeech: response, }, }, ], }, }, }, fulfillmentText: 'Cannot display response in Dialogflow simulator. ' + 'Please test on the Google Assistant simulator instead.', }); });
the_stack
import * as ts from 'typescript'; /** * The options for rewriting the file. */ export interface RewriteOptions { /** * Optional module names that should result in replacing to something different than just 'aws-cdk-lib'. */ readonly customModules?: { [moduleName: string]: string }; /** * When true, this will rewrite imports of generated L1s to reference aws-cdk-lib. * * For example: * import * as codestar from './codestar.generated';` * becomes: * import * as codestar from 'aws-cdk-lib/aws-codestar'; */ readonly rewriteCfnImports?: boolean; /** * The unscoped name of the package, e.g. 'aws-kinesisfirehose'. */ readonly packageUnscopedName?: string; /** * When true, imports to known types from the 'constructs' library will be rewritten * to explicitly import from 'constructs', rather than '@aws-cdk/core'. * @default false */ readonly rewriteConstructsImports?: boolean; } /** * Re-writes "hyper-modular" CDK imports (most packages in `@aws-cdk/*`) to the * relevant "mono" CDK import path. The re-writing will only modify the imported * library path, presrving the existing quote style, etc... * * Syntax errors in the source file being processed may cause some import * statements to not be re-written. * * Supported import statement forms are: * - `import * as lib from '@aws-cdk/lib';` * - `import { Type } from '@aws-cdk/lib';` * - `import '@aws-cdk/lib';` * - `import lib = require('@aws-cdk/lib');` * - `import { Type } = require('@aws-cdk/lib'); * - `require('@aws-cdk/lib'); * * @param sourceText the source code where imports should be re-written. * @param libName the mono CDK library name. * @param fileName a customized file name to provide the TypeScript processor. * * @returns the updated source code. */ export function rewriteMonoPackageImports(sourceText: string, libName: string, fileName: string = 'index.ts', options: RewriteOptions = {}): string { return rewriteImports( sourceText, (modPath, importedElements) => updatedExternalLocation(modPath, libName, options, importedElements), fileName, options.rewriteConstructsImports, ); } /** * Re-writes READMEs of "hyper-modular" CDK imports (most packages in `@aws-cdk/*`) * to the relevant "mono" CDK import path. The re-writing will only modify the imported * library path, presrving the existing quote style, etc... * * Syntax errors in the README snippets being processed may cause some import * statements to not be re-written. * * Supported import statement forms are: * - `import * as lib from '@aws-cdk/lib';` * - `import { Type } from '@aws-cdk/lib';` * - `import '@aws-cdk/lib';` * - `import lib = require('@aws-cdk/lib');` * - `import { Type } = require('@aws-cdk/lib'); * - `require('@aws-cdk/lib'); * * @param sourceText the README where snippet imports should be re-written. * @param libName the mono CDK library name. * @param fileName a customized file name to provide the TypeScript processor. * * @returns the updated source code. */ export function rewriteReadmeImports(sourceText: string, libName: string, fileName: string = 'index.ts', options: RewriteOptions = {}): string { return sourceText.replace(/(```(?:ts|typescript|text)[^\n]*\n)(.*?)(\n\s*```)/gs, (_m, prefix, body, suffix) => { return prefix + rewriteImports( body, (modPath, importedElements) => updatedExternalLocation(modPath, libName, options, importedElements), fileName, options.rewriteConstructsImports, ) + suffix; }); } /** * Re-writes "hyper-modular" CDK imports (most packages in `@aws-cdk/*`) to the * relevant "mono" CDK import path. The re-writing will only modify the imported * library path, presrving the existing quote style, etc... * * Syntax errors in the source file being processed may cause some import * statements to not be re-written. * * Supported import statement forms are: * - `import * as lib from '@aws-cdk/lib';` * - `import { Type } from '@aws-cdk/lib';` * - `import '@aws-cdk/lib';` * - `import lib = require('@aws-cdk/lib');` * - `import { Type } = require('@aws-cdk/lib'); * - `require('@aws-cdk/lib'); * * @param sourceText the source code where imports should be re-written. * @param updatedLocation a function that returns the updated location of the import. * @param fileName a customized file name to provide the TypeScript processor. * * @returns the updated source code. */ export function rewriteImports( sourceText: string, updatedLocation: (modulePath: string, importedElements?: ts.NodeArray<ts.ImportSpecifier>) => string | undefined, fileName: string = 'index.ts', rewriteConstructsImports: boolean = false, ): string { const sourceFile = ts.createSourceFile(fileName, sourceText, ts.ScriptTarget.ES2018, true); const rewriter = new ImportRewriter(sourceFile, updatedLocation, rewriteConstructsImports); ts.transform(sourceFile, [rewriter.rewriteTransformer()]); return rewriter.rewriteImports(); } class ImportRewriter { private static CONSTRUCTS_TYPES = ['Construct', 'IConstruct']; private readonly replacements = new Array<{ original: ts.Node, updatedLocation: string, quoted: boolean }>(); // Constructs rewrites private readonly constructsNamedImports: Set<ts.ImportSpecifier> = new Set(); private readonly constructsId = 'constructs'; private firstImportNode?: ts.Node; private constructsNamespaceImportRequired: boolean = false; public constructor( private readonly sourceFile: ts.SourceFile, private readonly updatedLocation: (modulePath: string, importedElements?: ts.NodeArray<ts.ImportSpecifier>) => string | undefined, private readonly rewriteConstructsImports: boolean, ) { } public rewriteTransformer(): ts.TransformerFactory<ts.SourceFile> { const coreNamespaceImports: Set<string> = new Set(); return (context) => { return (sourceFile) => { const visitor = <T extends ts.Node>(node: T): ts.VisitResult<T> => { const moduleSpecifier = getModuleSpecifier(node); if (moduleSpecifier) { return this.visitImportNode<T>(node, coreNamespaceImports, moduleSpecifier); } // Rewrite any access or type references with a format `foo.Construct`, // where `foo` matches the name of a namespace import for '@aws-cdk/core' // Simple identifiers (e.g., readonly foo: Construct) do not need to be written, // only qualified identifiers (e.g., cdk.Construct). if (ts.isIdentifier(node) && ImportRewriter.CONSTRUCTS_TYPES.includes(node.text)) { if (ts.isPropertyAccessExpression(node.parent) && ts.isIdentifier(node.parent.expression) && coreNamespaceImports.has(node.parent.expression.text)) { this.replacements.push({ original: node.parent, updatedLocation: `${this.constructsId}.${node.text}`, quoted: false }); this.constructsNamespaceImportRequired = true; } else if (ts.isQualifiedName(node.parent) && ts.isIdentifier(node.parent.left) && coreNamespaceImports.has(node.parent.left.text)) { this.replacements.push({ original: node.parent, updatedLocation: `${this.constructsId}.${node.text}`, quoted: false }); this.constructsNamespaceImportRequired = true; } } return ts.visitEachChild(node, visitor, context); }; return ts.visitNode(sourceFile, visitor); }; }; } /** * Visit import nodes where a module specifier of some kind has been found. * * For most nodes, this simply involves rewritting the location of the module via `this.updatedLocation`. * * Assumes the current node is an import (of some type) that imports '@aws-cdk/core'. * * The following import types are suported: * - import * as core1 from '@aws-cdk/core'; * - import core2 = require('@aws-cdk/core'); * - import { Type1, Type2 as CoreType2 } from '@aws-cdk/core'; * - import { Type1, Type2 as CoreType2 } = require('@aws-cdk/core'); * * For all namespace imports, capture the namespace used so any references later can be updated. * For example, 'core1.Construct' needs to be renamed to 'constructs.Construct'. * For all named imports: * - If all named imports are constructs types, simply rename the import from core to constructs. * - If there's a split, the constructs types are removed and captured for later to go into a new import. * * @returns true iff all other transforms should be skipped for this node. */ private visitImportNode<T extends ts.Node>(node: T, coreNamespaceImports: Set<string>, moduleSpecifier: ts.StringLiteral) { // Used later for constructs imports generation, to mark location and get indentation if (!this.firstImportNode) { this.firstImportNode = node; } // Special-case @aws-cdk/core for the case of constructs imports. if (this.rewriteConstructsImports && moduleSpecifier.text === '@aws-cdk/core') { if (ts.isImportEqualsDeclaration(node)) { // import core = require('@aws-cdk/core'); coreNamespaceImports.add(node.name.text); } else if (ts.isImportDeclaration(node) && node.importClause?.namedBindings) { const bindings = node.importClause?.namedBindings; if (ts.isNamespaceImport(bindings)) { // import * as core from '@aws-cdk/core'; coreNamespaceImports.add(bindings.name.text); } else if (ts.isNamedImports(bindings)) { // import { Type1, Type2 as CoreType2 } from '@aws-cdk/core'; // import { Type1, Type2 as CoreType2 } = require('@aws-cdk/core'); // Segment the types into core vs construct types const constructsImports: ts.ImportSpecifier[] = []; const coreImports: ts.ImportSpecifier[] = []; bindings.elements.forEach((e) => { if (ImportRewriter.CONSTRUCTS_TYPES.includes(e.name.text) || (e.propertyName && ImportRewriter.CONSTRUCTS_TYPES.includes(e.propertyName.text))) { constructsImports.push(e); } else { coreImports.push(e); } }); // Three cases: // 1. There are no constructs imports. No special-casing to do. // 2. There are ONLY constructs imports. The whole import can be replaced. // 3. There is a mix. We must remove the constructs imports, and add them to a dedicated line. if (constructsImports.length > 0) { if (coreImports.length === 0) { // Rewrite the module to constructs, skipping the normal updateLocation replacement. this.replacements.push({ original: moduleSpecifier, updatedLocation: this.constructsId, quoted: true }); return node; } else { // Track these named imports to add to a dedicated import statement later. constructsImports.forEach((i) => this.constructsNamedImports.add(i)); // This replaces the interior of the import statement, between the braces: // import { Stack as CdkStack, StackProps } ... const coreBindings = ' ' + coreImports.map((e) => e.getText()).join(', ') + ' '; this.replacements.push({ original: bindings, updatedLocation: coreBindings, quoted: true }); } } } } } const newTarget = this.updatedLocation(moduleSpecifier.text, getImportedElements(node)); if (newTarget != null) { this.replacements.push({ original: moduleSpecifier, updatedLocation: newTarget, quoted: true }); } return node; } /** * Rewrites the imports -- and possibly some qualified identifiers -- in the source file, * based on the replacement information gathered via transforming the source through `rewriteTransformer()`. */ public rewriteImports(): string { let updatedSourceText = this.sourceFile.text; // Applying replacements in reverse order, so node positions remain valid. const sortedReplacements = this.replacements.sort( ({ original: l }, { original: r }) => r.getStart(this.sourceFile) - l.getStart(this.sourceFile)); for (const replacement of sortedReplacements) { const offset = replacement.quoted ? 1 : 0; const prefix = updatedSourceText.substring(0, replacement.original.getStart(this.sourceFile) + offset); const suffix = updatedSourceText.substring(replacement.original.getEnd() - offset); updatedSourceText = prefix + replacement.updatedLocation + suffix; } // Lastly, prepend the source with any new constructs imports, as needed. const constructsImports = this.getConstructsImportsPrefix(); if (constructsImports) { const insertionPoint = this.firstImportNode // Start of the line, past any leading comments or shebang lines ? (this.firstImportNode.getStart() - this.getNodeIndentation(this.firstImportNode)) : 0; updatedSourceText = updatedSourceText.substring(0, insertionPoint) + constructsImports + updatedSourceText.substring(insertionPoint); } return updatedSourceText; } /** * If constructs imports are needed (either namespaced or named types), * this returns a string with one (or both) imports that can be prepended to the source. */ private getConstructsImportsPrefix(): string | undefined { if (!this.constructsNamespaceImportRequired && this.constructsNamedImports.size === 0) { return undefined; } const indentation = ' '.repeat(this.getNodeIndentation(this.firstImportNode)); let constructsImportPrefix = ''; if (this.constructsNamespaceImportRequired) { constructsImportPrefix += `${indentation}import * as ${this.constructsId} from 'constructs';\n`; } if (this.constructsNamedImports.size > 0) { const namedImports = [...this.constructsNamedImports].map(i => i.getText()).join(', '); constructsImportPrefix += `${indentation}import { ${namedImports} } from 'constructs';\n`; } return constructsImportPrefix; } /** * For a given node, attempts to determine and return how many spaces of indentation are used. */ private getNodeIndentation(node?: ts.Node): number { if (!node) { return 0; } // Get leading spaces for the final line in the node's trivia const fullText = node.getFullText(); const trivia = fullText.substring(0, fullText.length - node.getWidth()); const m = /( *)$/.exec(trivia); return m ? m[1].length : 0; } } /** * Returns the module specifier (location) of an import statement in one of the following forms: * import from 'location'; * import * as name from 'location'; * import { Type } from 'location'; * import { Type } = require('location'); * import name = require('location'); * require('location'); */ function getModuleSpecifier(node: ts.Node): ts.StringLiteral | undefined { if (ts.isImportDeclaration(node)) { // import style const moduleSpecifier = node.moduleSpecifier; if (ts.isStringLiteral(moduleSpecifier)) { // import from 'location'; // import * as name from 'location'; // import { Foo } from 'location'; return moduleSpecifier; } else if (ts.isBinaryExpression(moduleSpecifier) && ts.isCallExpression(moduleSpecifier.right)) { // import { Type } = require('location'); return getModuleSpecifier(moduleSpecifier.right); } } else if ( ts.isImportEqualsDeclaration(node) && ts.isExternalModuleReference(node.moduleReference) && ts.isStringLiteral(node.moduleReference.expression) ) { // import name = require('location'); return node.moduleReference.expression; } else if ( (ts.isCallExpression(node)) && ts.isIdentifier(node.expression) && node.expression.escapedText === 'require' && node.arguments.length === 1 ) { // require('location'); const argument = node.arguments[0]; if (ts.isStringLiteral(argument)) { return argument; } } else if (ts.isExpressionStatement(node) && ts.isCallExpression(node.expression)) { // require('location'); // This is an alternate AST version of it return getModuleSpecifier(node.expression); } return undefined; } const EXEMPTIONS = new Set([ '@aws-cdk/cloudformation-diff', // The dev-tools '@aws-cdk/cdk-build-tools', '@aws-cdk/cdk-integ-tools', '@aws-cdk/cfn2ts', '@aws-cdk/eslint-plugin', '@aws-cdk/pkglint', ]); function updatedExternalLocation( modulePath: string, libName: string, options: RewriteOptions, importedElements?: ts.NodeArray<ts.ImportSpecifier>, ): string | undefined { const customModulePath = options.customModules?.[modulePath]; if (customModulePath) { let awsCdkLibLocation = undefined; importedElements?.forEach(e => { if (e.name.text.startsWith('Cfn') || e.propertyName?.text.startsWith('Cfn')) { // This is an L1 import, so don't return the customModulePath (which is the alpha module). // Return the relevant aws-cdk-lib location. awsCdkLibLocation = `${libName}/${modulePath.substring('@aws-cdk/'.length)}`; } }); if (awsCdkLibLocation) { return awsCdkLibLocation; } return customModulePath; } if (options.rewriteCfnImports && modulePath.endsWith(`${options.packageUnscopedName?.slice('aws-'.length)}.generated`)) { return `${libName}/${options.packageUnscopedName}`; } if ( !modulePath.startsWith('@aws-cdk/') || EXEMPTIONS.has(modulePath) || Array.from(EXEMPTIONS).some((ex) => modulePath.startsWith(`${ex}/`)) ) { return undefined; } if (modulePath.startsWith('@aws-cdk/core/lib')) { return `${libName}/core/lib/${modulePath.substring('@aws-cdk/core/lib/'.length)}`; } if (modulePath === '@aws-cdk/core') { return libName; } // These 2 are unchanged if (modulePath === '@aws-cdk/assert') { return '@aws-cdk/assert'; } // can't use simple equality here, // because we have imports like "import '@aws-cdk/assert-internal/jest'" if (modulePath.startsWith('@aws-cdk/assert-internal')) { return modulePath.replace(/^@aws-cdk\/assert-internal/, '@aws-cdk/assert'); } if (modulePath === '@aws-cdk/assert/jest') { return '@aws-cdk/assert/jest'; } return `${libName}/${modulePath.substring('@aws-cdk/'.length)}`; } /** * Returns the names of all types imported via named imports of the form: * import { Type } from 'location' */ function getImportedElements(node: ts.Node): ts.NodeArray<ts.ImportSpecifier> | undefined { if ( ts.isImportDeclaration(node) && ts.isStringLiteral(node.moduleSpecifier) && node.importClause && node.importClause.namedBindings && ts.isNamedImports(node.importClause.namedBindings) ) { return node.importClause.namedBindings.elements; } return undefined; }
the_stack
import Node from './node'; import { Key, Value } from './types'; export type Comparator<Key> = (a:Key, b:Key) => number; export type Visitor<Key, Value> = (node:Node<Key, Value>) => void; export type NodePrinter<Key, Value> = (node:Node<Key, Value>) => string; /* follows "An implementation of top-down splaying" * by D. Sleator <sleator@cs.cmu.edu> March 1992 */ function DEFAULT_COMPARE (a:Key, b:Key) : number { return a > b ? 1 : a < b ? -1 : 0; } type TreeNodeList<Key, Value> = { head:Node<Key, Value>|null }; /** * Simple top down splay, not requiring i to be in the tree t. */ function splay (i:Key, t:Node<Key, Value>|null, comparator:Comparator<Key>) : Node<Key, Value> { const N = new Node(null, null); let l = N; let r = N; while (true) { const cmp = comparator(i, t.key); //if (i < t.key) { if (cmp < 0) { if (t.left === null) break; //if (i < t.left.key) { if (comparator(i, t.left.key) < 0) { const y = t.left; /* rotate right */ t.left = y.right; y.right = t; t = y; if (t.left === null) break; } r.left = t; /* link right */ r = t; t = t.left; //} else if (i > t.key) { } else if (cmp > 0) { if (t.right === null) break; //if (i > t.right.key) { if (comparator(i, t.right.key) > 0) { const y = t.right; /* rotate left */ t.right = y.left; y.left = t; t = y; if (t.right === null) break; } l.right = t; /* link left */ l = t; t = t.right; } else break; } /* assemble */ l.right = t.left; r.left = t.right; t.left = N.right; t.right = N.left; return t; } function insert ( i:Key, data:Value, t:Node<Key, Value>, comparator:Comparator<Key>, ) : Node<Key, Value> { const node = new Node(i, data); if (t === null) { node.left = node.right = null; return node; } t = splay(i, t, comparator); const cmp = comparator(i, t.key); if (cmp < 0) { node.left = t.left; node.right = t; t.left = null; } else if (cmp >= 0) { node.right = t.right; node.left = t; t.right = null; } return node; } function split (key:Key, v:Node<Key, Value>, comparator:Comparator<Key>) : { left:Node<Key, Value>|null, right:Node<Key, Value>|null, } { let left = null; let right = null; if (v) { v = splay(key, v, comparator); const cmp = comparator(v.key, key); if (cmp === 0) { left = v.left; right = v.right; } else if (cmp < 0) { right = v.right; v.right = null; left = v; } else { left = v.left; v.left = null; right = v; } } return { left, right }; } function merge ( left:Node<Key, Value>|null, right:Node<Key, Value>|null, comparator:Comparator<Key>, ) { if (right === null) return left; if (left === null) return right; right = splay(left.key, right, comparator); right.left = left; return right; } type StringCollector = (s:string) => void; /** * Prints level of the tree */ function printRow ( root:Node<Key, Value>, prefix:string, isTail:boolean, out:StringCollector, printNode:NodePrinter<Key, Value>, ) { if (root) { out(`${ prefix }${ isTail ? '└── ' : '├── ' }${ printNode(root) }\n`); const indent = prefix + (isTail ? ' ' : '│ '); if (root.left) printRow(root.left, indent, false, out, printNode); if (root.right) printRow(root.right, indent, true, out, printNode); } } export default class Tree<Key=number, Value=any> { private _comparator:Comparator<Key>; private _root:Node<Key, Value>|null = null; private _size:number = 0; constructor (comparator = DEFAULT_COMPARE) { this._comparator = comparator; } /** * Inserts a key, allows duplicates */ public insert (key:Key, data?:Value) : Node<Key, Value> { this._size++; return this._root = insert(key, data, this._root, this._comparator); } /** * Adds a key, if it is not present in the tree */ public add (key:Key, data?:Value) : Node<Key, Value> { const node = new Node(key, data); if (this._root === null) { node.left = node.right = null; this._size++; this._root = node; } const comparator = this._comparator; const t = splay(key, this._root, comparator); const cmp = comparator(key, t.key); if (cmp === 0) this._root = t; else { if (cmp < 0) { node.left = t.left; node.right = t; t.left = null; } else if (cmp > 0) { node.right = t.right; node.left = t; t.right = null; } this._size++; this._root = node; } return this._root; } /** * @param {Key} key * @return {Node|null} */ public remove (key:Key) : void { this._root = this._remove(key, this._root, this._comparator); } /** * Deletes i from the tree if it's there */ private _remove ( i:Key, t:Node<Key, Value>, comparator:Comparator<Key>) : Node<Key, Value> { let x; if (t === null) return null; t = splay(i, t, comparator); const cmp = comparator(i, t.key); if (cmp === 0) { /* found it */ if (t.left === null) { x = t.right; } else { x = splay(i, t.left, comparator); x.right = t.right; } this._size--; return x; } return t; /* It wasn't there */ } /** * Removes and returns the node with smallest key */ public pop () : { key:Key, data:Value }|null { let node = this._root; if (node) { while (node.left) node = node.left; this._root = splay(node.key, this._root, this._comparator); this._root = this._remove(node.key, this._root, this._comparator); return { key: node.key, data: node.data }; } return null; } /** * Find without splaying */ public findStatic (key:Key) : Node<Key, Value>|null { let current = this._root; const compare = this._comparator; while (current) { const cmp = compare(key, current.key); if (cmp === 0) return current; else if (cmp < 0) current = current.left; else current = current.right; } return null; } public find (key:Key) : Node<Key, Value>|null { if (this._root) { this._root = splay(key, this._root, this._comparator); if (this._comparator(key, this._root.key) !== 0) return null; } return this._root; } public contains (key:Key) : boolean { let current = this._root; const compare = this._comparator; while (current) { const cmp = compare(key, current.key); if (cmp === 0) return true; else if (cmp < 0) current = current.left; else current = current.right; } return false; } public forEach (visitor:Visitor<Key, Value>, ctx?:any) : Tree<Key, Value> { let current = this._root; const Q = []; /* Initialize stack s */ let done = false; while (!done) { if (current !== null) { Q.push(current); current = current.left; } else { if (Q.length !== 0) { current = Q.pop(); visitor.call(ctx, current); current = current.right; } else done = true; } } return this; } /** * Walk key range from `low` to `high`. Stops if `fn` returns a value. */ public range (low:Key, high:Key, fn:Visitor<Key, Value>, ctx?:any) : Tree<Key, Value> { const Q = []; const compare = this._comparator; let node = this._root; let cmp; while (Q.length !== 0 || node) { if (node) { Q.push(node); node = node.left; } else { node = Q.pop(); cmp = compare(node.key, high); if (cmp > 0) { break; } else if (compare(node.key, low) >= 0) { if (fn.call(ctx, node)) return this; // stop if smth is returned } node = node.right; } } return this; } /** * Returns array of keys */ public keys () : Key[] { const keys:Key[] = []; this.forEach(({ key }) => keys.push(key)); return keys; } /** * Returns array of all the data in the nodes */ public values () : Value[] { const values:Value[] = []; this.forEach(({ data }) => values.push(data)); return values; } public min() : Key|null { if (this._root) return this.minNode(this._root).key; return null; } public max() : Key|null { if (this._root) return this.maxNode(this._root).key; return null; } public minNode(t = this._root) : Node<Key, Value> { if (t) while (t.left) t = t.left; return t; } public maxNode(t = this._root) : Node<Key, Value> { if (t) while (t.right) t = t.right; return t; } /** * Returns node at given index */ public at (index:number) : Node<Key, Value>|null { let current = this._root; let done = false; let i = 0; const Q = []; while (!done) { if (current) { Q.push(current); current = current.left; } else { if (Q.length > 0) { current = Q.pop(); if (i === index) return current; i++; current = current.right; } else done = true; } } return null; } public next (d:Node<Key, Value>) : Node<Key, Value>|null { let root = this._root; let successor = null; if (d.right) { successor = d.right; while (successor.left) successor = successor.left; return successor; } const comparator = this._comparator; while (root) { const cmp = comparator(d.key, root.key); if (cmp === 0) break; else if (cmp < 0) { successor = root; root = root.left; } else root = root.right; } return successor; } public prev (d:Node<Key, Value>) : Node<Key, Value>|null { let root = this._root; let predecessor = null; if (d.left !== null) { predecessor = d.left; while (predecessor.right) predecessor = predecessor.right; return predecessor; } const comparator = this._comparator; while (root) { const cmp = comparator(d.key, root.key); if (cmp === 0) break; else if (cmp < 0) root = root.left; else { predecessor = root; root = root.right; } } return predecessor; } public clear () : Tree<Key, Value> { this._root = null; this._size = 0; return this; } public toList() { return toList(this._root); } /** * Bulk-load items. Both array have to be same size */ public load (keys:Key[], values:Value[] = [], presort:boolean = false) { let size = keys.length; const comparator = this._comparator; // sort if needed if (presort) sort(keys, values, 0, size - 1, comparator); if (this._root === null) { // empty tree this._root = loadRecursive(keys, values, 0, size); this._size = size; } else { // that re-builds the whole tree from two in-order traversals const mergedList = mergeLists(this.toList(), createList(keys, values), comparator); size = this._size + size; this._root = sortedListToBST({ head: mergedList }, 0, size); } return this; } public isEmpty() : boolean { return this._root === null; } get size () : number { return this._size; } get root () : Node<Key, Value>|null { return this._root; } public toString (printNode:NodePrinter<Key, Value> = (n) => String(n.key)) : string { const out:string[] = []; printRow(this._root, '', true, (v) => out.push(v), printNode); return out.join(''); } public update (key:Key, newKey:Key, newData?:Value) : void { const comparator = this._comparator; let { left, right } = split(key, this._root, comparator); if (comparator(key, newKey) < 0) { right = insert(newKey, newData, right, comparator); } else { left = insert(newKey, newData, left, comparator); } this._root = merge(left, right, comparator); } public split(key:Key) { return split(key, this._root, this._comparator); } } function loadRecursive (keys:Key[], values:Value[], start:number, end:number) : Node<Key, Value>|null { const size = end - start; if (size > 0) { const middle = start + Math.floor(size / 2); const key = keys[middle]; const data = values[middle]; const node = new Node(key, data); node.left = loadRecursive(keys, values, start, middle); node.right = loadRecursive(keys, values, middle + 1, end); return node; } return null; } function createList(keys:Key[], values:Value[]) : Node<Key, Value> { const head = new Node<Key, Value>(null, null); let p:Node<Key, Value> = head; for (let i = 0; i < keys.length; i++) { p = p.next = new Node(keys[i], values[i]); } p.next = null; return head.next; } function toList (root:Node<Key, Value>) : Node<Key, Value> { let current = root; const Q = []; let done = false; const head = new Node<Key, Value>(null, null); let p = head; while (!done) { if (current) { Q.push(current); current = current.left; } else { if (Q.length > 0) { current = p = p.next = Q.pop(); current = current.right; } else done = true; } } p.next = null; // that'll work even if the tree was empty return head.next; } function sortedListToBST(list:TreeNodeList<Key, Value>, start:number, end:number) : Node<Key, Value> { const size = end - start; if (size > 0) { const middle = start + Math.floor(size / 2); const left = sortedListToBST(list, start, middle); const root = list.head; root.left = left; list.head = list.head.next; root.right = sortedListToBST(list, middle + 1, end); return root; } return null; } function mergeLists<Key, Value> ( l1:Node<Key, Value>, l2:Node<Key, Value>, compare:Comparator<Key>) : Node<Key, Value> { const head:Node<Key, Value> = new Node<Key, Value>(null, null); // dummy let p = head; let p1:Node<Key, Value> = l1; let p2:Node<Key, Value> = l2; while (p1 !== null && p2 !== null) { if (compare(p1.key, p2.key) < 0) { p.next = p1; p1 = p1.next; } else { p.next = p2; p2 = p2.next; } p = p.next; } if (p1 !== null) { p.next = p1; } else if (p2 !== null) { p.next = p2; } return head.next; } function sort( keys:Key[], values:Value[], left:number, right:number, compare:Comparator<Key>, ) { if (left >= right) return; const pivot = keys[(left + right) >> 1]; let i = left - 1; let j = right + 1; while (true) { do i++; while (compare(keys[i], pivot) < 0); do j--; while (compare(keys[j], pivot) > 0); if (i >= j) break; let tmp = keys[i]; keys[i] = keys[j]; keys[j] = tmp; tmp = values[i]; values[i] = values[j]; values[j] = tmp; } sort(keys, values, left, j, compare); sort(keys, values, j + 1, right, compare); }
the_stack
import '@aws-cdk/assert-internal/jest'; import { SynthUtils } from '@aws-cdk/assert-internal'; import { CfnResource, Stack } from '@aws-cdk/core'; import { Cluster, KubernetesManifest, KubernetesVersion, HelmChart } from '../lib'; import { testFixtureNoVpc, testFixtureCluster } from './util'; /* eslint-disable max-len */ const CLUSTER_VERSION = KubernetesVersion.V1_16; describe('k8s manifest', () => { test('basic usage', () => { // GIVEN const { stack } = testFixtureNoVpc(); const cluster = new Cluster(stack, 'cluster', { version: CLUSTER_VERSION }); const manifest = [ { apiVersion: 'v1', kind: 'Service', metadata: { name: 'hello-kubernetes', }, spec: { type: 'LoadBalancer', ports: [ { port: 80, targetPort: 8080 }, ], selector: { app: 'hello-kubernetes', }, }, }, { apiVersion: 'apps/v1', kind: 'Deployment', metadata: { name: 'hello-kubernetes', }, spec: { replicas: 2, selector: { matchLabels: { app: 'hello-kubernetes', }, }, template: { metadata: { labels: { app: 'hello-kubernetes', }, }, spec: { containers: [ { name: 'hello-kubernetes', image: 'paulbouwer/hello-kubernetes:1.5', ports: [ { containerPort: 8080 }, ], }, ], }, }, }, }, ]; // WHEN new KubernetesManifest(stack, 'manifest', { cluster, manifest, }); expect(stack).toHaveResource(KubernetesManifest.RESOURCE_TYPE, { Manifest: JSON.stringify(manifest), }); }); test('can be added to an imported cluster with minimal config', () => { // GIVEN const stack = new Stack(); const cluster = Cluster.fromClusterAttributes(stack, 'MyCluster', { clusterName: 'my-cluster-name', kubectlRoleArn: 'arn:aws:iam::1111111:role/iam-role-that-has-masters-access', }); // WHEN cluster.addManifest('foo', { bar: 2334 }); cluster.addHelmChart('helm', { chart: 'hello-world' }); // THEN expect(stack).toHaveResource(KubernetesManifest.RESOURCE_TYPE, { Manifest: '[{"bar":2334}]', ClusterName: 'my-cluster-name', RoleArn: 'arn:aws:iam::1111111:role/iam-role-that-has-masters-access', }); expect(stack).toHaveResource(HelmChart.RESOURCE_TYPE, { ClusterName: 'my-cluster-name', RoleArn: 'arn:aws:iam::1111111:role/iam-role-that-has-masters-access', Release: 'myclustercharthelm78d2c26a', Chart: 'hello-world', Namespace: 'default', CreateNamespace: true, }); }); test('default child is a CfnResource', () => { const stack = new Stack(); const cluster = Cluster.fromClusterAttributes(stack, 'MyCluster', { clusterName: 'my-cluster-name', kubectlRoleArn: 'arn:aws:iam::1111111:role/iam-role-that-has-masters-access', }); const manifest = cluster.addManifest('foo', { bar: 2334 }); expect(manifest.node.defaultChild).toBeInstanceOf(CfnResource); }); describe('prune labels', () => { test('base case', () => { // GIVEN const { stack } = testFixtureNoVpc(); // prune is enabled by default const cluster = new Cluster(stack, 'Cluster', { version: KubernetesVersion.V1_16, }); expect(cluster.prune).toEqual(true); // WHEN cluster.addManifest('m1', { apiVersion: 'v1beta1', kind: 'Foo', }); // THEN expect(stack).toHaveResource(KubernetesManifest.RESOURCE_TYPE, { Manifest: JSON.stringify([{ apiVersion: 'v1beta1', kind: 'Foo', metadata: { labels: { 'aws.cdk.eks/prune-c89a5983505f58231ac2a9a86fd82735ccf2308eac': '', }, }, }]), PruneLabel: 'aws.cdk.eks/prune-c89a5983505f58231ac2a9a86fd82735ccf2308eac', }); }); test('multiple resources in the same manifest', () => { // GIVEN const { stack, cluster } = testFixtureCluster({ prune: true }); // WHEN cluster.addManifest('m1', { apiVersion: 'v1beta', kind: 'Foo', }, { apiVersion: 'v1', kind: 'Pod', metadata: { name: 'foo', labels: { bar: 1234, }, }, spec: { containers: [{ name: 'main', image: 'main' }], }, }, ); // THEN expect(stack).toHaveResource(KubernetesManifest.RESOURCE_TYPE, { Manifest: JSON.stringify([ { apiVersion: 'v1beta', kind: 'Foo', metadata: { labels: { 'aws.cdk.eks/prune-c89a5983505f58231ac2a9a86fd82735ccf2308eac': '', }, }, }, { apiVersion: 'v1', kind: 'Pod', metadata: { name: 'foo', labels: { 'aws.cdk.eks/prune-c89a5983505f58231ac2a9a86fd82735ccf2308eac': '', 'bar': 1234, }, }, spec: { containers: [ { name: 'main', image: 'main', }, ], }, }, ]), PruneLabel: 'aws.cdk.eks/prune-c89a5983505f58231ac2a9a86fd82735ccf2308eac', }); }); test('different KubernetesManifest resource use different prune labels', () => { // GIVEN const { stack, cluster } = testFixtureCluster({ prune: true }); // WHEN cluster.addManifest('m1', { apiVersion: 'v1beta', kind: 'Foo', }); cluster.addManifest('m2', { apiVersion: 'v1', kind: 'Pod', metadata: { name: 'foo', labels: { bar: 1234, }, }, spec: { containers: [{ name: 'main', image: 'main' }], }, }); // THEN expect(stack).toHaveResource(KubernetesManifest.RESOURCE_TYPE, { Manifest: JSON.stringify([ { apiVersion: 'v1beta', kind: 'Foo', metadata: { labels: { 'aws.cdk.eks/prune-c89a5983505f58231ac2a9a86fd82735ccf2308eac': '', }, }, }, ]), PruneLabel: 'aws.cdk.eks/prune-c89a5983505f58231ac2a9a86fd82735ccf2308eac', }); expect(stack).toHaveResource(KubernetesManifest.RESOURCE_TYPE, { Manifest: JSON.stringify([ { apiVersion: 'v1', kind: 'Pod', metadata: { name: 'foo', labels: { 'aws.cdk.eks/prune-c8aff6ac817006dd4d644e9d99b2cdbb8c8cd036d9': '', 'bar': 1234, }, }, spec: { containers: [ { name: 'main', image: 'main', }, ], }, }, ]), PruneLabel: 'aws.cdk.eks/prune-c8aff6ac817006dd4d644e9d99b2cdbb8c8cd036d9', }); }); test('ignores resources without "kind"', () => { // GIVEN const { stack, cluster } = testFixtureCluster({ prune: true }); // WHEN cluster.addManifest('m1', { malformed: { resource: 'yes' }, }); // THEN expect(stack).toHaveResource(KubernetesManifest.RESOURCE_TYPE, { Manifest: JSON.stringify([{ malformed: { resource: 'yes' } }]), PruneLabel: 'aws.cdk.eks/prune-c89a5983505f58231ac2a9a86fd82735ccf2308eac', }); }); test('ignores entries that are not objects (invalid type)', () => { // GIVEN const { stack, cluster } = testFixtureCluster({ prune: true }); expect(cluster.prune).toEqual(true); // WHEN cluster.addManifest('m1', ['foo']); // THEN expect(stack).toHaveResource(KubernetesManifest.RESOURCE_TYPE, { Manifest: JSON.stringify([['foo']]), PruneLabel: 'aws.cdk.eks/prune-c89a5983505f58231ac2a9a86fd82735ccf2308eac', }); }); test('no prune labels when "prune" is disabled', () => { // GIVEN const { stack } = testFixtureNoVpc(); const cluster = new Cluster(stack, 'Cluster', { version: KubernetesVersion.V1_16, prune: false, }); // WHEN cluster.addManifest('m1', { apiVersion: 'v1beta', kind: 'Foo' }); // if "prune" is not specified at the manifest level, it is derived from the cluster settings. new KubernetesManifest(stack, 'm2', { cluster, manifest: [{ apiVersion: 'v1', kind: 'Pod' }], }); // can be overridden at the manifest level new KubernetesManifest(stack, 'm3', { cluster, manifest: [{ apiVersion: 'v1', kind: 'Deployment' }], prune: true, }); // THEN const template = SynthUtils.synthesize(stack).template; const m1 = template.Resources.Clustermanifestm1E5FBE3C1.Properties; const m2 = template.Resources.m201F909C5.Properties; const m3 = template.Resources.m3B0AF9264.Properties; expect(m1.Manifest).toEqual(JSON.stringify([{ apiVersion: 'v1beta', kind: 'Foo' }])); expect(m2.Manifest).toEqual(JSON.stringify([{ apiVersion: 'v1', kind: 'Pod' }])); expect(m3.Manifest).toEqual(JSON.stringify([ { apiVersion: 'v1', kind: 'Deployment', metadata: { labels: { 'aws.cdk.eks/prune-c8971972440c5bb3661e468e4cb8069f7ee549414c': '', }, }, }, ])); expect(m1.PruneLabel).toBeFalsy(); expect(m2.PruneLabel).toBeFalsy(); expect(m3.PruneLabel).toEqual('aws.cdk.eks/prune-c8971972440c5bb3661e468e4cb8069f7ee549414c'); }); }); });
the_stack
'use strict' // **Github:** https://github.com/fidm/quic // // **License:** MIT import { suite, it } from 'tman' import { ok, strictEqual, deepEqual, throws } from 'assert' import { bufferFromBytes } from '../common' import { BufferVisitor, toBuffer } from '../../src/internal/common' import { PacketNumber } from '../../src/internal/protocol' import { parseFrame, AckFrame, AckRange, } from '../../src/internal/frame' suite('ACK Frame', function () { suite('parsing', function () { it('a sample ACK frame', function () { const buf = bufferFromBytes([0x40, 0x1c, // largest acked 0x0, 0x0, // delay time 0x1c, // block length 0, ]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x1c) ok(ackFrame.lowestAcked === 0x1) ok(ackFrame.delayTime === 0) ok(ackFrame.hasMissingRanges() === false) // ignore Timestamps ok(toBuffer(ackFrame).equals(bufferFromBytes([0x40, 0x1c, 0x0, 0x0, 0x1c, 0x0]))) }) it('parse with parseFrame', function () { const buf = bufferFromBytes([0x40, 0x1c, // largest acked 0x0, 0x0, // delay time 0x1c, // block length 0, ]) const ackFrame = parseFrame(new BufferVisitor(buf), new PacketNumber(1)) as AckFrame ok(ackFrame.largestAcked === 0x1c) ok(ackFrame.lowestAcked === 0x1) ok(ackFrame.delayTime === 0) ok(ackFrame.hasMissingRanges() === false) // ignore Timestamps ok(toBuffer(ackFrame).equals(bufferFromBytes([0x40, 0x1c, 0x0, 0x0, 0x1c, 0x0]))) }) it('a frame without a timestamp', function () { const buf = bufferFromBytes([0x40, 0x3, 0x15, 0x50, 0x3, 0x0]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) as AckFrame ok(ackFrame.largestAcked === 0x3) ok(ackFrame.lowestAcked === 0x1) ok(ackFrame.delayTime === 6816) ok(ackFrame.hasMissingRanges() === false) }) it('a frame with a 48 bit packet number', function () { const buf = bufferFromBytes([0x4c, 0x37, 0x13, 0xad, 0xfb, 0xca, 0xde, 0x0, 0x0, 0x5, 0x1, 0, 0, 0, 0, 0]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x3713adfbcade) ok(ackFrame.lowestAcked === 0x3713adfbcade - 5 + 1) ok(ackFrame.hasMissingRanges() === false) }) it('a frame with 1 ACKed packet', function () { const buf = bufferFromBytes([0x40, 0x10, 0x8e, 0x0, 0x1, 0x0]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x10) ok(ackFrame.lowestAcked === 0x10) ok(ackFrame.hasMissingRanges() === false) }) it('a frame, when packet 1 was lost', function () { const buf = bufferFromBytes([0x40, 0x9, 0x92, 0x7, 0x8, 0x3, 0x2, 0x69, 0xa3, 0x0, 0x0, 0x1, 0xc9, 0x2, 0x0, 0x46, 0x10]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 9) ok(ackFrame.lowestAcked === 2) ok(ackFrame.hasMissingRanges() === false) }) it('a frame with multiple timestamps', function () { const buf = bufferFromBytes([0x40, 0x10, 0x0, 0x0, 0x10, 0x4, 0x1, 0x6b, 0x26, 0x4, 0x0, 0x3, 0, 0, 0x2, 0, 0, 0x1, 0, 0]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x10) ok(ackFrame.lowestAcked === 1) ok(ackFrame.hasMissingRanges() === false) }) it('errors when the ACK range is too large', function () { // LargestAcked: 0x1c // Length: 0x1d => LowestAcked would be -1 throws(() => { const buf = bufferFromBytes([0x40, 0x1c, 0x8e, 0x0, 0x1d, 0x1, 0x1, 0x6b, 0x26, 0x3, 0x0]) AckFrame.fromBuffer(new BufferVisitor(buf)) }) }) it('errors when the first ACK range is empty', function () { throws(() => { const buf = bufferFromBytes([0x40, 0x9, 0x8e, 0x0, 0x0, 0x1, 0]) AckFrame.fromBuffer(new BufferVisitor(buf)) }) }) }) suite('ACK blocks', function () { it('a frame with one ACK block', function () { const buf = bufferFromBytes([0x60, 0x18, 0x94, 0x1, 0x1, 0x3, 0x2, 0x10, 0x2, 0x1, 0x5c, 0xd5, 0x0, 0x0, 0x0, 0x95, 0x0]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x18) ok(ackFrame.lowestAcked === 0x4) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(22, 24)) deepEqual(ackFrame.ackRanges[1], new AckRange(4, 19)) }) it('rejects a frame that says it has ACK blocks in the typeByte, but doesn\'t have any', function () { const buf = bufferFromBytes([0x63, 0x4, 0xff, 0xff, 0, 2, 0, 0, 0, 0, 0, 0]) throws(() => AckFrame.fromBuffer(new BufferVisitor(buf))) }) it('rejects a frame with invalid ACK ranges', function () { // like the test before, but increased the last ACK range, such that the FirstPacketNumber would be negative const buf = bufferFromBytes([0x60, 0x18, 0x94, 0x1, 0x1, 0x3, 0x2, 0x15, 0x2, 0x1, 0x5c, 0xd5, 0x0, 0x0, 0x0, 0x95, 0x0]) throws(() => AckFrame.fromBuffer(new BufferVisitor(buf))) }) it('a frame with multiple single packets missing', function () { const buf = bufferFromBytes([0x60, 0x27, 0xda, 0x0, 0x6, 0x9, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x13, 0x2, 0x1, 0x71, 0x12, 0x3, 0x0, 0x0, 0x47, 0x2]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x27) ok(ackFrame.lowestAcked === 0x1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 7) deepEqual(ackFrame.ackRanges[0], new AckRange(31, 39)) deepEqual(ackFrame.ackRanges[1], new AckRange(29, 29)) deepEqual(ackFrame.ackRanges[2], new AckRange(27, 27)) deepEqual(ackFrame.ackRanges[3], new AckRange(25, 25)) deepEqual(ackFrame.ackRanges[4], new AckRange(23, 23)) deepEqual(ackFrame.ackRanges[5], new AckRange(21, 21)) deepEqual(ackFrame.ackRanges[6], new AckRange(1, 19)) }) it('a frame with packet 1 and one more packet lost', function () { const buf = bufferFromBytes([0x60, 0xc, 0x92, 0x0, 0x1, 0x1, 0x1, 0x9, 0x2, 0x2, 0x53, 0x43, 0x1, 0x0, 0x0, 0xa7, 0x0]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 12) ok(ackFrame.lowestAcked === 2) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(12, 12)) deepEqual(ackFrame.ackRanges[1], new AckRange(2, 10)) }) it('a frame with multiple longer ACK blocks', function () { const buf = bufferFromBytes([0x60, 0x52, 0xd1, 0x0, 0x3, 0x17, 0xa, 0x10, 0x4, 0x8, 0x2, 0x12, 0x2, 0x1, 0x6c, 0xc8, 0x2, 0x0, 0x0, 0x7e, 0x1]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x52) ok(ackFrame.lowestAcked === 2) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 4) deepEqual(ackFrame.ackRanges[0], new AckRange(60, 0x52)) deepEqual(ackFrame.ackRanges[1], new AckRange(34, 49)) deepEqual(ackFrame.ackRanges[2], new AckRange(22, 29)) deepEqual(ackFrame.ackRanges[3], new AckRange(2, 19)) }) suite('more than 256 lost packets in a row', function () { // 255 missing packets fit into a single ACK block it('a frame with a range of 255 missing packets', function () { const buf = bufferFromBytes([0x60 ^ 0x4, 0x1, 0x15, // largest acked 0x0, 0x0, // delay time 0x1, // num ACK blocks 0x3, // 1st block 0xff, 0x13, // 2nd block 0, ]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x115) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(20 + 255, 0x115)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) // 256 missing packets fit into two ACK blocks it('a frame with a range of 256 missing packets', function () { const buf = bufferFromBytes([0x60 ^ 0x4, 0x1, 0x14, // largest acked 0x0, 0x0, // delay time 0x2, // num ACK blocks 0x1, // 1st block 0xff, 0x0, // 2nd block 0x1, 0x13, // 3rd block 0, ]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x114) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(20 + 256, 0x114)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) it('a frame with an incomplete range at the end', function () { // this is a modified ACK frame that has 5 instead of originally 6 written ranges // each gap is 300 packets and thus takes 2 ranges // the last range is incomplete, and should be completely ignored const buf = bufferFromBytes([0x60 ^ 0x4, 0x3, 0x9b, // largest acked 0x0, 0x0, // delay time 0x5, // num ACK blocks, instead of 0x6 0x1, // 1st block 0xff, 0x0, // 2nd block 0x2d, 0x1, // 3rd block 0xff, 0x0, // 4th block 0x2d, 0x1, // 5th block 0xff, 0x0, /*0x2d, 0x14,*/ // 6th block 0, ]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x39b) ok(ackFrame.lowestAcked === 0x141) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 3) deepEqual(ackFrame.ackRanges[0], new AckRange(20 + 3 * 301, 20 + 3 * 301)) deepEqual(ackFrame.ackRanges[1], new AckRange(20 + 2 * 301, 20 + 2 * 301)) deepEqual(ackFrame.ackRanges[2], new AckRange(20 + 1 * 301, 20 + 1 * 301)) }) it('a frame with one long range, spanning 2 blocks, of missing packets', function () { // 280 missing packets const buf = bufferFromBytes([0x60 ^ 0x4, 0x1, 0x44, // largest acked 0x0, 0x0, // delay time 0x2, // num ACK blocks 0x19, // 1st block 0xff, 0x0, // 2nd block 0x19, 0x13, // 3rd block 0, ]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x144) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(300, 0x144)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) it('a frame with one long range, spanning multiple blocks, of missing packets', function () { // 2345 missing packets const buf = bufferFromBytes([0x60 ^ 0x4, 0x9, 0x5b, // largest acked 0x0, 0x0, // delay time 0xa, // num ACK blocks 0x1f, // 1st block 0xff, 0x0, // 2nd block 0xff, 0x0, // 3rd block 0xff, 0x0, // 4th block 0xff, 0x0, // 5th block 0xff, 0x0, // 6th block 0xff, 0x0, // 7th block 0xff, 0x0, // 8th block 0xff, 0x0, // 9th block 0xff, 0x0, // 10th block 0x32, 0x13, // 11th block 0, ]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x95b) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(2365, 0x95b)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) it.skip('a frame with multiple long ranges of missing packets', function () { const buf = bufferFromBytes([0x60 ^ 0x4 ^ 0x1, 0x9, 0x66, // largest acked 0x0, 0x0, // delay time 0x7, // num ACK blocks 0x0, 0x7, // 1st block 0xff, 0x0, 0x0, // 2nd block 0xf5, 0x2, 0x8a, // 3rd block 0xc8, 0x0, 0xe6, // 4th block 0xff, 0x0, 0x0, // 5th block 0xff, 0x0, 0x0, // 6th block 0xff, 0x0, 0x0, // 7th block 0x23, 0x0, 0x13, // 8th block 0, ]) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x966) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 4) deepEqual(ackFrame.ackRanges[0], new AckRange(2400, 0x966)) deepEqual(ackFrame.ackRanges[1], new AckRange(1250, 1899)) deepEqual(ackFrame.ackRanges[2], new AckRange(820, 1049)) deepEqual(ackFrame.ackRanges[3], new AckRange(1, 19)) }) }) }) it('errors on EOFs', function () { const buf = bufferFromBytes([0x60 ^ 0x4 ^ 0x1, 0x9, 0x66, // largest acked 0x23, 0x1, // delay time 0x7, // num ACk blocks 0x0, 0x7, // 1st block 0xff, 0x0, 0x0, // 2nd block 0xf5, 0x2, 0x8a, // 3rd block 0xc8, 0x0, 0xe6, // 4th block 0xff, 0x0, 0x0, // 5th block 0xff, 0x0, 0x0, // 6th block 0xff, 0x0, 0x0, // 7th block 0x23, 0x0, 0x13, // 8th blocks 0x2, // num timestamps 0x1, 0x13, 0xae, 0xb, 0x0, // 1st timestamp 0x0, 0x80, 0x5, // 2nd timestamp ]) AckFrame.fromBuffer(new BufferVisitor(buf)) for (let i = 0; i < buf.length; i++) { throws(() => AckFrame.fromBuffer(new BufferVisitor(buf.slice(0, i)))) } }) suite('when toBuffer', function () { suite('self-consistency', function () { it('a simple ACK frame', function () { const frame = new AckFrame() frame.largestAcked = 1 frame.lowestAcked = 1 const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 1) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), false) }) it('the correct block length in a simple ACK frame', function () { const frame = new AckFrame() frame.largestAcked = 20 frame.lowestAcked = 10 const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 20) ok(ackFrame.lowestAcked === 10) strictEqual(ackFrame.hasMissingRanges(), false) }) it('a simple ACK frame with a high packet number', function () { const frame = new AckFrame() frame.largestAcked = 0xDEADBEEFCAFE frame.lowestAcked = 0xDEADBEEFCAFE const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0xDEADBEEFCAFE) ok(ackFrame.lowestAcked === 0xDEADBEEFCAFE) strictEqual(ackFrame.hasMissingRanges(), false) }) it('an ACK frame with one packet missing', function () { const frame = new AckFrame() frame.largestAcked = 40 frame.lowestAcked = 1 frame.ackRanges.push(new AckRange(25, 40), new AckRange(1, 23)) const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 40) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(25, 40)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 23)) }) it('an ACK frame with multiple missing packets', function () { const frame = new AckFrame() frame.largestAcked = 25 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(22, 25), new AckRange(15, 18), new AckRange(13, 13), new AckRange(1, 10), ) const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 25) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 4) deepEqual(ackFrame.ackRanges[0], new AckRange(22, 25)) deepEqual(ackFrame.ackRanges[1], new AckRange(15, 18)) deepEqual(ackFrame.ackRanges[2], new AckRange(13, 13)) deepEqual(ackFrame.ackRanges[3], new AckRange(1, 10)) }) it('rejects a frame with incorrect LargestObserved value', function () { const frame = new AckFrame() frame.largestAcked = 26 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(12, 25), new AckRange(1, 10), ) throws(() => toBuffer(frame)) }) it('rejects a frame with incorrect LargestObserved value 2', function () { const frame = new AckFrame() frame.largestAcked = 25 frame.lowestAcked = 2 frame.ackRanges.push( new AckRange(12, 25), new AckRange(1, 10), ) throws(() => toBuffer(frame)) }) suite('longer gaps between ACK blocks', function () { it('only one block for 254 lost packets', function () { const frame = new AckFrame() frame.largestAcked = 300 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(20 + 254, 300), new AckRange(1, 19), ) strictEqual(frame.numWritableNackRanges(), 2) const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 300) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(20 + 254, 300)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) it('only one block for 255 lost packets', function () { const frame = new AckFrame() frame.largestAcked = 300 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(20 + 255, 300), new AckRange(1, 19), ) strictEqual(frame.numWritableNackRanges(), 2) const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 300) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(20 + 255, 300)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) it('two block for 256 lost packets', function () { const frame = new AckFrame() frame.largestAcked = 300 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(20 + 256, 300), new AckRange(1, 19), ) strictEqual(frame.numWritableNackRanges(), 3) const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 300) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(20 + 256, 300)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) it('two block for 510 lost packets', function () { const frame = new AckFrame() frame.largestAcked = 600 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(20 + 510, 600), new AckRange(1, 19), ) strictEqual(frame.numWritableNackRanges(), 3) const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 600) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(20 + 510, 600)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) it('two block for 511 lost packets', function () { const frame = new AckFrame() frame.largestAcked = 600 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(20 + 511, 600), new AckRange(1, 19), ) strictEqual(frame.numWritableNackRanges(), 4) const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 600) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(20 + 511, 600)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) it('two block for 512 lost packets', function () { const frame = new AckFrame() frame.largestAcked = 600 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(20 + 512, 600), new AckRange(1, 19), ) strictEqual(frame.numWritableNackRanges(), 4) const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 600) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(20 + 512, 600)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) it('multiple blocks for a lot of lost packets', function () { const frame = new AckFrame() frame.largestAcked = 3000 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(2900, 3000), new AckRange(1, 19), ) const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 3000) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(2900, 3000)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 19)) }) it('multiple longer blocks for 256 lost packets', function () { const frame = new AckFrame() frame.largestAcked = 3600 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(2900, 3600), new AckRange(1000, 2500), new AckRange(1, 19), ) const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 3600) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 3) deepEqual(ackFrame.ackRanges[0], new AckRange(2900, 3600)) deepEqual(ackFrame.ackRanges[1], new AckRange(1000, 2500)) deepEqual(ackFrame.ackRanges[2], new AckRange(1, 19)) }) }) suite('longer ACK blocks', function () { it('a 1 byte Missing Sequence Number Delta', function () { const frame = new AckFrame() frame.largestAcked = 200 frame.lowestAcked = 1 const buf = toBuffer(frame) strictEqual(buf[0] & 0x3, 0x0) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 200) ok(ackFrame.lowestAcked === 1) }) it('a 2 byte Missing Sequence Number Delta', function () { const frame = new AckFrame() frame.largestAcked = 0x100 frame.lowestAcked = 1 const buf = toBuffer(frame) strictEqual(buf[0] & 0x3, 0x1) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x100) ok(ackFrame.lowestAcked === 1) }) it('a 4 byte Missing Sequence Number Delta', function () { const frame = new AckFrame() frame.largestAcked = 0x10000 frame.lowestAcked = 1 const buf = toBuffer(frame) strictEqual(buf[0] & 0x3, 0x2) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x10000) ok(ackFrame.lowestAcked === 1) }) it('a 6 byte Missing Sequence Number Delta', function () { const frame = new AckFrame() frame.largestAcked = 0x100000000 frame.lowestAcked = 1 const buf = toBuffer(frame) strictEqual(buf[0] & 0x3, 0x3) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 0x100000000) ok(ackFrame.lowestAcked === 1) }) it('a 1 byte Missing Sequence Number Delta, if all ACK blocks are short', function () { const frame = new AckFrame() frame.largestAcked = 5001 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(5000, 5001), new AckRange(250, 300), new AckRange(1, 200), ) const buf = toBuffer(frame) strictEqual(buf[0] & 0x3, 0x0) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 5001) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 3) deepEqual(ackFrame.ackRanges[0], new AckRange(5000, 5001)) deepEqual(ackFrame.ackRanges[1], new AckRange(250, 300)) deepEqual(ackFrame.ackRanges[2], new AckRange(1, 200)) }) it('a 2 byte Missing Sequence Number Delta, for a frame with 2 ACK ranges', function () { const frame = new AckFrame() frame.largestAcked = 10000 frame.lowestAcked = 1 frame.ackRanges.push( new AckRange(9990, 10000), new AckRange(1, 256), ) const buf = toBuffer(frame) strictEqual(buf[0] & 0x3, 0x1) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === 10000) ok(ackFrame.lowestAcked === 1) strictEqual(ackFrame.hasMissingRanges(), true) strictEqual(ackFrame.ackRanges.length, 2) deepEqual(ackFrame.ackRanges[0], new AckRange(9990, 10000)) deepEqual(ackFrame.ackRanges[1], new AckRange(1, 256)) }) }) suite('too many ACK blocks', function () { it('skips the lowest ACK ranges, if there are more than 255 AckRanges', function () { const ackRanges = new Array(300) for (let i = 1; i <= 300; i++) { ackRanges[300 - i] = new AckRange(3 * i, 3 * i + 1) } const frame = new AckFrame() frame.largestAcked = ackRanges[0].last frame.lowestAcked = ackRanges[ackRanges.length - 1].first frame.ackRanges = ackRanges const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === frame.largestAcked) ok(ackFrame.lowestAcked === ackRanges[254].first) strictEqual(ackFrame.ackRanges.length, 0xff) strictEqual(ackFrame.validateAckRanges(), true) }) it('skips the lowest ACK ranges, if the gaps are large', function () { const ackRanges = new Array(100) // every AckRange will take 4 written ACK ranges for (let i = 1; i <= 100; i++) { ackRanges[100 - i] = new AckRange(1000 * i, 1000 * i + 1) } const frame = new AckFrame() frame.largestAcked = ackRanges[0].last frame.lowestAcked = ackRanges[ackRanges.length - 1].first frame.ackRanges = ackRanges const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === frame.largestAcked) ok(ackFrame.lowestAcked === ackRanges[ackFrame.ackRanges.length - 1].first) strictEqual(ackFrame.ackRanges.length, 256 / 4) strictEqual(ackFrame.validateAckRanges(), true) }) it('works with huge gaps', function () { const ackRanges = [ new AckRange(2 * 255 * 200, 2 * 255 * 200 + 1), new AckRange(1 * 255 * 200, 1 * 255 * 200 + 1), new AckRange(1, 2), ] const frame = new AckFrame() frame.largestAcked = ackRanges[0].last frame.lowestAcked = ackRanges[ackRanges.length - 1].first frame.ackRanges = ackRanges const buf = toBuffer(frame) const ackFrame = AckFrame.fromBuffer(new BufferVisitor(buf)) ok(ackFrame.largestAcked === frame.largestAcked) ok(ackFrame.lowestAcked === ackRanges[ackFrame.ackRanges.length - 1].first) strictEqual(ackFrame.ackRanges.length, 2) strictEqual(ackFrame.validateAckRanges(), true) }) }) }) }) suite('ACK range validator', function () { it('accepts an ACK without NACK Ranges', function () { const frame = new AckFrame() frame.largestAcked = 7 strictEqual(frame.validateAckRanges(), true) }) it('rejects ACK ranges with a single range', function () { const frame = new AckFrame() frame.largestAcked = 10 frame.ackRanges.push(new AckRange(1, 10)) strictEqual(frame.validateAckRanges(), false) }) it('rejects ACK ranges with LastPacketNumber of the first range unequal to LargestObserved', function () { const frame = new AckFrame() frame.largestAcked = 10 frame.ackRanges.push( new AckRange(8, 9), new AckRange(2, 3), ) strictEqual(frame.validateAckRanges(), false) }) it('rejects ACK ranges with FirstPacketNumber greater than LastPacketNumber', function () { const frame = new AckFrame() frame.largestAcked = 10 frame.ackRanges.push( new AckRange(8, 10), new AckRange(4, 3), ) strictEqual(frame.validateAckRanges(), false) }) it('rejects ACK ranges with FirstPacketNumber greater than LargestObserved', function () { const frame = new AckFrame() frame.largestAcked = 5 frame.ackRanges.push( new AckRange(4, 10), new AckRange(1, 2), ) strictEqual(frame.validateAckRanges(), false) }) it('rejects ACK ranges in the wrong order', function () { const frame = new AckFrame() frame.largestAcked = 7 frame.ackRanges.push( new AckRange(2, 2), new AckRange(6, 7), ) strictEqual(frame.validateAckRanges(), false) }) it('rejects with overlapping ACK ranges', function () { const frame = new AckFrame() frame.largestAcked = 7 frame.ackRanges.push( new AckRange(5, 7), new AckRange(2, 5), ) strictEqual(frame.validateAckRanges(), false) }) it('rejects ACK ranges that are part of a larger ACK range', function () { const frame = new AckFrame() frame.largestAcked = 7 frame.ackRanges.push( new AckRange(4, 7), new AckRange(5, 6), ) strictEqual(frame.validateAckRanges(), false) }) it('rejects with directly adjacent ACK ranges', function () { const frame = new AckFrame() frame.largestAcked = 7 frame.ackRanges.push( new AckRange(5, 7), new AckRange(2, 4), ) strictEqual(frame.validateAckRanges(), false) }) it('accepts an ACK with one lost packet', function () { const frame = new AckFrame() frame.largestAcked = 10 frame.ackRanges.push( new AckRange(5, 10), new AckRange(1, 3), ) strictEqual(frame.validateAckRanges(), true) }) it('accepts an ACK with multiple lost packets', function () { const frame = new AckFrame() frame.largestAcked = 20 frame.ackRanges.push( new AckRange(15, 20), new AckRange(10, 12), new AckRange(1, 3), ) strictEqual(frame.validateAckRanges(), true) }) }) suite('check if ACK frame acks a certain packet', function () { it('works with an ACK without any ranges', function () { const frame = new AckFrame() frame.largestAcked = 10 frame.lowestAcked = 5 strictEqual(frame.acksPacket(1), false) strictEqual(frame.acksPacket(4), false) strictEqual(frame.acksPacket(5), true) strictEqual(frame.acksPacket(8), true) strictEqual(frame.acksPacket(10), true) strictEqual(frame.acksPacket(11), false) strictEqual(frame.acksPacket(20), false) }) it('works with an ACK with multiple ACK ranges', function () { const frame = new AckFrame() frame.largestAcked = 20 frame.lowestAcked = 5 frame.ackRanges.push( new AckRange(15, 20), new AckRange(5, 8), ) strictEqual(frame.acksPacket(4), false) strictEqual(frame.acksPacket(5), true) strictEqual(frame.acksPacket(7), true) strictEqual(frame.acksPacket(8), true) strictEqual(frame.acksPacket(9), false) strictEqual(frame.acksPacket(14), false) strictEqual(frame.acksPacket(15), true) strictEqual(frame.acksPacket(18), true) strictEqual(frame.acksPacket(20), true) strictEqual(frame.acksPacket(21), false) }) }) })
the_stack
function chart_examples() { const chart = c3.generate({ data: {}, bindto: '#myContainer', size: { width: 640, height: 480, }, padding: { top: 20, right: 20, bottom: 20, left: 20, }, color: { pattern: ['#1f77b4', '#aec7e8'], }, interaction: { enabled: false, }, transition: { duration: 500, }, oninit: () => { /* code*/ }, onrendered: () => { /* code*/ }, onmouseover: () => { /* code*/ }, onmouseout: () => { /* code*/ }, onresize: () => { /* code*/ }, onresized: () => { /* code*/ }, }); const chart2 = c3.generate({ bindto: document.getElementById('myContainer'), data: {}, }); const chart3 = c3.generate({ bindto: d3.select('#myContainer'), data: {}, }); } function data_examples() { const chart = c3.generate({ data: { url: '/data/c3_test.csv', json: [ { name: 'www.site1.com', upload: 200, download: 200, total: 400, }, { name: 'www.site2.com', upload: 100, download: 300, total: 400, }, { name: 'www.site3.com', upload: 300, download: 200, total: 500, }, { name: 'www.site4.com', upload: 400, download: 100, total: 500, }, ], mimeType: 'json', rows: [ ['data1', 'data2', 'data3'], [90, 120, 300], [40, 160, 240], [50, 200, 290], [120, 160, 230], [80, 130, 300], [90, 220, 320], ], columns: [ ['data1', 30, 20, 50, 40, 60, 50], ['data2', 200, 130, 90, 240, 130, 220], ['data3', 300, 200, 160, 400, 250, 250], ], keys: { x: 'name', // it's possible to specify 'x' when category axis value: ['upload', 'download'], }, x: 'date', xs: { data1: 'x1', data2: 'x2', }, xFormat: '%Y-%m-%d %H:%M:%S', names: { data1: 'Data Name 1', data2: 'Data Name 2', }, classes: { data1: 'additional-data1-class', data2: 'additional-data2-class', }, groups: [ ['data1', 'data2'], ['data3'] ], axes: { data1: 'y', data2: 'y2', }, type: 'bar', types: { data1: 'bar', data2: 'spline', }, labels: true, order: 'asc', regions: { data1: [ { start: 1, end: 2, style: 'dashed' }, { start: 3, label: 'Region 2', paddingX: 2, paddingY: 2, vertical: true } ], }, color: (color, d) => '#ff0000', colors: { data1: '#ff0000', /* ... */ }, hide: true, empty: { label: { text: 'No Data', }, }, selection: { enabled: true, grouped: true, multiple: true, draggable: true, isselectable: (d) => true, }, stack: { normalize: true }, onclick: (d, element) => { /* code */ }, onmouseover: (d) => { /* code */ }, onmouseout: (d) => { /* code */ }, }, }); const chart2 = c3.generate({ data: { labels: { format: (v, id, i, j) => { return 'string'; } }, hide: ['data1'], order: (data1, data2) => 0 }, }); const chart3 = c3.generate({ data: { labels: { format: { data1: (v, id, i, j) => { return 'string'; } } }, }, }); } function axis_examples() { const chart = c3.generate({ data: {}, axis: { rotated: true, x: { show: true, type: 'timeseries', localtime: true, categories: ['Category 1', 'Category 2'], tick: { centered: true, format: (x: Date) => x.getFullYear(), culling: false, count: 5, fit: true, values: [1, 2, 4, 8, 16, 32], rotate: 60, outer: false, multiline: true, multilineMax: 2, width: 100, }, max: 100, min: -100, padding: { left: 0, right: 0, }, height: 20, extent: [5, 10], label: 'Your X Axis', }, y: { show: false, inner: true, type: 'linear', max: 1000, min: 1000, inverted: true, center: 0, label: 'Your Y Axis', tick: { format: d3.format('$,'), outer: false, values: [100, 1000, 10000], count: 5, }, padding: { top: 100, bottom: 100, }, default: [0, 1000], }, y2: { show: true, inner: true, type: 'linear', max: 1000, min: 1000, inverted: true, center: 0, label: 'Your Y2 Axis', tick: { format: d3.format('$,'), outer: false, values: [100, 1000, 10000], count: 5, }, padding: { top: 100, bottom: 100, }, default: [0, 1000], }, }, }); const chart2 = c3.generate({ data: {}, axis: { x: { tick: { culling: { max: 5, }, }, label: { text: 'Your X Axis', position: 'outer-center', }, }, y: { label: { text: 'Your X Axis', position: 'outer-middle', }, tick: { format: (d) => '$' + d, }, }, y2: { label: { text: 'Your X Axis', position: 'outer-middle', }, tick: { format: (d) => '$' + d, }, }, }, }); } function grid_examples() { const chart = c3.generate({ data: {}, grid: { x: { show: true, lines: [ { value: 2, text: 'Label on 2' }, { value: 5, text: 'Label on 5', class: 'label-5' }, { value: 6, text: 'Label on 6', position: 'start' }, ], }, y: { show: true, lines: [ { value: 100, text: 'Label on 100' }, { value: 200, text: 'Label on 200', class: 'label-200' }, { value: 300, text: 'Label on 300', position: 'middle' }, ], }, }, }); } function region_examples() { const chart = c3.generate({ data: {}, regions: [{ axis: 'x', start: 1, end: 4, class: 'region-1-4' }], }); } function legend_examples() { const chart = c3.generate({ data: {}, legend: { show: true, hide: true, position: 'bottom', inset: { anchor: 'top-left', x: 10, y: 0, step: undefined, }, item: { onclick: (id) => { /* code */ }, onmouseover: (id) => { /* code */ }, onmouseout: (id) => { /* code */ }, }, }, }); const chart2 = c3.generate({ data: {}, legend: { hide: 'data1', inset: { anchor: 'top-right', x: 20, y: 10, step: 2, }, }, }); const chart3 = c3.generate({ data: {}, legend: { hide: ['data1', 'data2'], }, }); } function tooltip_examples() { const chart = c3.generate({ data: {}, tooltip: { show: true, grouped: true, format: { title: (x, index) => 'Data ' + x, name: (name, ratio, id, index) => name, value: (value, ratio, id, index) => ratio?.toString(), }, position: (data, width, height, element) => { return {top: 0, left: 0}; }, contents: (d, defaultTitleFormat, defaultValueFormat, color) => '<p>title</p>', // formatted html as you want horizontal: true } }); } function subchart_examples() { const chart = c3.generate({ data: {}, subchart: { show: true, size: { height: 20, }, onbrush: (domain) => { /* code */ }, axis: { x: { show: true } } }, }); } function zoom_examples() { const chart = c3.generate({ data: {}, zoom: { enabled: false, type: 'scroll', rescale: true, extent: [1, 100], // enable more zooming onzoom: (domain) => { /* code */ }, onzoomstart: (event) => { /* code */ }, onzoomend: (domain) => { /* code */ }, disableDefaultBehavior: true, }, }); } function point_examples() { const chart = c3.generate({ data: {}, point: { show: false, r: 5, focus: { expand: { enabled: true, r: 1, }, }, sensitivity: 35, select: { r: 3, }, }, }); } function line_examples() { const chart = c3.generate({ data: {}, line: { connectNull: true, step: { type: 'step-after', }, }, }); } function area_examples() { const chart = c3.generate({ data: {}, area: { zerobased: false, }, }); } function bar_examples() { const chart = c3.generate({ data: {}, bar: { width: 10, zerobased: false, }, }); const chart2 = c3.generate({ data: {}, bar: { width: { ratio: 0.2, }, zerobased: false, }, }); } function pie_examples() { const chart = c3.generate({ data: {}, pie: { label: { show: false, format: (value, ratio, id) => { return d3.format('$')(value); }, threshold: 0.1, }, expand: false, padAngle: .1, }, }); } function donut_examples() { const chart = c3.generate({ data: {}, donut: { label: { show: false, format: (value, ratio, id) => { return d3.format('$')(value); }, threshold: 0.05, }, expand: false, padAngle: .1, width: 10, title: 'Title', }, }); } function gauge_examples() { const chart = c3.generate({ data: {}, gauge: { label: { show: false, format: (value, ratio) => { return d3.format('$')(value); }, }, expand: false, min: -100, max: 100, units: ' %', width: 10, fullCircle: true, }, }); } function spline_examples() { const chart = c3.generate({ data: {}, spline: { interpolation: { type: 'monotone' } } }); } function stanford_examples() { const chart = c3.generate({ data: {}, stanford: { scaleMin: 1, scaleMax: 10000, scaleWidth: 20, scaleFormat: 'pow10', scaleValues: (minValue, maxValue) => { const step = (maxValue - minValue) / 10; return d3.range(minValue, maxValue + step, step); }, colors: d3.interpolatePlasma, padding: { top: 15, right: 0, bottom: 0, left: 0, }, texts: [ { x: 1, y: 4, content: 'my custom text here', class: 'text-1-4' } ], lines: [ { value_x1: 0, value_y1: 0, value_x2: 65, value_y2: 65, class: "line-0-65" }, ], regions: [ { points: [ // add points counter-clockwise {x: 0, y: 0}, {x: 40, y: 40}, {x: 0, y: 40}, ], text: (value, percentage) => `Normal Operations: ${value} (${percentage}%)`, opacity: 0.2, // 0 to 1 class: "region-triangle-1" } ] }, }); const chart1 = c3.generate({ data: {}, stanford: { scaleFormat: d3.format("d"), }, }); const chart2 = c3.generate({ data: {}, stanford: { scaleFormat: (d) => 'format', }, }); } ////////////////// // API tests ///////////////// function api() { const chart = c3.generate({ data: {} }); chart.focus('data1'); chart.focus(['data1', 'data2']); chart.focus(); chart.defocus('data1'); chart.defocus(['data1', 'data2']); chart.defocus(); chart.revert('data1'); chart.revert(['data1', 'data2']); chart.revert(); chart.show('data1'); chart.show(['data1', 'data2']); chart.show(); chart.show('data1', { withLegend: true }); chart.hide('data1'); chart.hide(['data1', 'data2']); chart.hide(); chart.hide('data1', {withLegend: true}); chart.toggle('data1'); chart.toggle(['data1', 'data2']); chart.toggle(); chart.toggle('data1', {withLegend: true}); chart.load({ data: {}, url: 'http://sample.url', json: { x: [0, 1, 2], y: [1, 2, 3] }, rows: [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]], columns: [['data1', 100, 200, 150], ['b', 2, 3], ['c', 5, 6]], classes: { id1: 'class', id2: 'class' }, categories: [['cat1', 'cat2']], axes: { id1: 'x', id2: 'y' }, colors: { x1: d3.rgb(0, 0, 0), x2: '#000000' }, type: 'bar', types: { c1: 'area', c2: 'spline' }, unload: ['data2', 'data3'], done: () => { /* on done */ }, }); chart.load({ unload: true }); chart.unload({ ids: ['data2', 'data3'], done: () => { /* on done */ }, }); chart.flow({ rows: [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]], columns: [ ['x', '2013-01-11', '2013-01-21'], ['data1', 500, 200], ['data2', 100, 300], ['data3', 200, 120] ], done: () => { chart.flow({ columns: [ ['x', '2013-02-11', '2013-02-12', '2013-02-13', '2013-02-14'], ['data1', 200, 300, 100, 250], ['data2', 100, 90, 40, 120], ['data3', 100, 100, 300, 500] ], length: 0 }); } }); chart.select(['data1']); chart.select(['data1'], [1, 3, 5]); chart.select(['data1'], [1, 3, 5], true); chart.unselect(['data1']); chart.unselect(['data1'], [1, 3, 5]); chart.selected(); chart.selected('data1'); chart.transform('bar'); chart.transform('bar', 'data1'); chart.transform('bar', ['data1', 'data2']); chart.groups([['data1', 'data2']]); chart.xgrids([ {value: 1, text: 'Label 1'}, {value: 4, text: 'Label 4'} ]); chart.xgrids.add( {value: 4, text: 'Label 4'} ); chart.xgrids.add([ {value: 2, text: 'Label 2'}, {value: 4, text: 'Label 4'} ]); chart.xgrids.remove({value: 2}); chart.xgrids.remove({class: 'grid-A'}); chart.xgrids.remove(); chart.ygrids([ {value: 100, text: 'Label 1'}, {value: 400, text: 'Label 4'} ]); chart.ygrids.add( {value: 400, text: 'Label 4'} ); // Add new y grid lines chart.ygrids.add([ {value: 200, text: 'Label 2'}, {value: 400, text: 'Label 4'} ]); chart.ygrids.remove({value: 200}); chart.ygrids.remove({class: 'grid-A'}); chart.ygrids.remove(); chart.regions([ {axis: 'x', start: 5, class: 'regionX'}, {axis: 'y', end: 50, class: 'regionY'} ]); chart.regions.add( {axis: 'x', start: 5, class: 'regionX'} ); chart.regions.add([ {axis: 'x', start: 5, class: 'regionX'}, {axis: 'y', end: 50, class: 'regionY'} ]); chart.regions.remove({classes: ['region-A', 'region-B']}); chart.regions.remove(); chart.data('data1'); chart.data(['data1', 'data2']); chart.data(); chart.data.shown('data1'); chart.data.shown(['data1', 'data2']); chart.data.shown(); chart.data.values('data1'); chart.data.names(); chart.data.names({ data1: 'New Name 1', data2: 'New Name 2' }); chart.data.colors(); chart.data.colors({ data1: '#FFFFFF', data2: '#000000' }); chart.data.axes(); chart.data.axes({ data1: 'y', data2: 'y2' }); chart.x(); chart.x([100, 200, 300, 400]); chart.x({ data1: [10, 20, 30, 40], data2: [100, 200, 300, 400] }); chart.xs(); chart.xs({ data1: [10, 20, 30, 40], data2: [100, 200, 300, 400] }); chart.axis.labels({ x: 'New X Axis Label', y: 'New Y Axis Label' }); chart.axis.min({ x: -10, y: 1000, y2: 100 }); chart.axis.max({ x: 100, y: 1000, y2: 10000 }); chart.axis.range({ min: { x: -10, y: -1000, y2: -10000 }, max: { x: 100, y: 1000, y2: 10000 } }); chart.axis.types({ x: 'linear', y2: 'log', }); chart.legend.show('data1'); chart.legend.show(['data1', 'data2']); chart.legend.show(); chart.legend.hide('data1'); chart.legend.hide(['data1', 'data2']); chart.legend.hide(); chart.subchart.isShown(); chart.subchart.show(); chart.subchart.hide(); chart.zoom([10, 20]); chart.zoom(); chart.unzoom(); chart.zoom.enable(true); chart.resize({ height: 640, width: 480 }); chart.flush(); chart.destroy(); } ////////////////// // Chart tests ///////////////// function simple_multiple() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25] ] }, }); chart.load({ columns: [ ['data1', 230, 190, 300, 500, 300, 400] ] }); chart.load({ columns: [ ['data3', 130, 150, 200, 300, 200, 100] ] }); chart.unload({ ids: 'data1' }); } function timeseries() { const chart = c3.generate({ data: { x: 'x', xFormat: '%Y%m%d', // 'xFormat' can be used as custom format of 'x' columns: [ ['x', '2013-01-01', '2013-01-02', '2013-01-03', '2013-01-04', '2013-01-05', '2013-01-06'], ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 340, 200, 500, 250, 350] ] }, axis: { x: { type: 'timeseries', tick: { format: '%Y-%m-%d' } } } }); chart.load({ columns: [ ['data3', 400, 500, 450, 700, 600, 500] ] }); } function chart_spline() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 100, 140, 200, 150, 50] ], type: 'spline' }, }); } function simple_xy() { const chart = c3.generate({ data: { x: 'x', columns: [ ['x', 30, 50, 100, 230, 300, 310], ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 300, 200, 300, 250, 450] ] } }); chart.load({ columns: [ ['data1', 100, 250, 150, 200, 100, 350] ] }); chart.load({ columns: [ ['data3', 80, 150, 100, 180, 80, 150] ] }); chart.unload({ ids: 'data2' }); } function simple_xy_multiple() { const chart = c3.generate({ data: { xs: { data1: 'x1', data2: 'x2', }, columns: [ ['x1', 10, 30, 45, 50, 70, 100], ['x2', 30, 50, 75, 100, 120], ['data1', 30, 200, 100, 400, 150, 250], ['data2', 20, 180, 240, 100, 190] ] }, }); } function simple_regions() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25] ], regions: { data1: [{ start: 1, end: 2, style: 'dashed' }, { start: 3 }], // currently 'dashed' style only data2: [{ end: 3 }] } }, }); } function chart_step() { const chart = c3.generate({ data: { columns: [ ['data1', 300, 350, 300, 0, 0, 100], ['data2', 130, 100, 140, 200, 150, 50] ], types: { data1: 'step', data2: 'area-step' } }, }); } function chart_area() { const chart = c3.generate({ data: { columns: [ ['data1', 300, 350, 300, 0, 0, 0], ['data2', 130, 100, 140, 200, 150, 50] ], types: { data1: 'area', data2: 'area-spline' } } }); } function chart_area_stacked() { const chart = c3.generate({ data: { columns: [ ['data1', 300, 350, 300, 0, 0, 120], ['data2', 130, 100, 140, 200, 150, 50] ], types: { data1: 'area-spline', data2: 'area-spline' // 'line', 'spline', 'step', 'area', 'area-step' are also available to stack }, groups: [['data1', 'data2']] }, }); } function chart_bar() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 100, 140, 200, 150, 50] ], type: 'bar' }, bar: { width: { ratio: 0.5 // this makes bar width 50% of length between ticks } // or // width: 100 // this makes bar width 100px } }); chart.load({ columns: [ ['data3', 130, -150, 200, 300, -200, 100] ] }); } function chart_bar_stacked() { const chart = c3.generate({ data: { columns: [ ['data1', -30, 200, 200, 400, -150, 250], ['data2', 130, 100, -100, 200, -150, 50], ['data3', -230, 200, 200, -300, 250, 250] ], type: 'bar', groups: [ ['data1', 'data2'] ] }, grid: { y: { lines: [{ value: 0 }] } } }); chart.groups([['data1', 'data2', 'data3']]); chart.load({ columns: [['data4', 100, -50, 150, 200, -300, -100]] }); chart.groups([['data1', 'data2', 'data3', 'data4']]); } function chart_scatter() { const chart = c3.generate({ data: { xs: { setosa: 'setosa_x', versicolor: 'versicolor_x', }, // iris data from R columns: [ ["setosa_x", 3.5, 3.0, 3.2, 3.1, 3.6, 3.9, 3.4, 3.4, 2.9, 3.1, 3.7, 3.4, 3.0, 3.0, 4.0, 4.4, 3.9, 3.5, 3.8, 3.8, 3.4, 3.7, 3.6, 3.3, 3.4, 3.0, 3.4, 3.5, 3.4, 3.2, 3.1, 3.4, 4.1, 4.2, 3.1, 3.2, 3.5, 3.6, 3.0, 3.4, 3.5, 2.3, 3.2, 3.5, 3.8, 3.0, 3.8, 3.2, 3.7, 3.3], ["versicolor_x", 3.2, 3.2, 3.1, 2.3, 2.8, 2.8, 3.3, 2.4, 2.9, 2.7, 2.0, 3.0, 2.2, 2.9, 2.9, 3.1, 3.0, 2.7, 2.2, 2.5, 3.2, 2.8, 2.5, 2.8, 2.9, 3.0, 2.8, 3.0, 2.9, 2.6, 2.4, 2.4, 2.7, 2.7, 3.0, 3.4, 3.1, 2.3, 3.0, 2.5, 2.6, 3.0, 2.6, 2.3, 2.7, 3.0, 2.9, 2.9, 2.5, 2.8], ["setosa", 0.2, 0.2, 0.2, 0.2, 0.2, 0.4, 0.3, 0.2, 0.2, 0.1, 0.2, 0.2, 0.1, 0.1, 0.2, 0.4, 0.4, 0.3, 0.3, 0.3, 0.2, 0.4, 0.2, 0.5, 0.2, 0.2, 0.4, 0.2, 0.2, 0.2, 0.2, 0.4, 0.1, 0.2, 0.2, 0.2, 0.2, 0.1, 0.2, 0.2, 0.3, 0.3, 0.2, 0.6, 0.4, 0.3, 0.2, 0.2, 0.2, 0.2], ["versicolor", 1.4, 1.5, 1.5, 1.3, 1.5, 1.3, 1.6, 1.0, 1.3, 1.4, 1.0, 1.5, 1.0, 1.4, 1.3, 1.4, 1.5, 1.0, 1.5, 1.1, 1.8, 1.3, 1.5, 1.2, 1.3, 1.4, 1.4, 1.7, 1.5, 1.0, 1.1, 1.0, 1.2, 1.6, 1.5, 1.6, 1.5, 1.3, 1.3, 1.3, 1.2, 1.4, 1.2, 1.0, 1.3, 1.2, 1.3, 1.3, 1.1, 1.3], ], type: 'scatter' }, axis: { x: { label: 'Sepal.Width', tick: { fit: false } }, y: { label: 'Petal.Width' } }, }); chart.load({ xs: { virginica: 'virginica_x' }, columns: [ ["virginica_x", 3.3, 2.7, 3.0, 2.9, 3.0, 3.0, 2.5, 2.9, 2.5, 3.6, 3.2, 2.7, 3.0, 2.5, 2.8, 3.2, 3.0, 3.8, 2.6, 2.2, 3.2, 2.8, 2.8, 2.7, 3.3, 3.2, 2.8, 3.0, 2.8, 3.0, 2.8, 3.8, 2.8, 2.8, 2.6, 3.0, 3.4, 3.1, 3.0, 3.1, 3.1, 3.1, 2.7, 3.2, 3.3, 3.0, 2.5, 3.0, 3.4, 3.0], ["virginica", 2.5, 1.9, 2.1, 1.8, 2.2, 2.1, 1.7, 1.8, 1.8, 2.5, 2.0, 1.9, 2.1, 2.0, 2.4, 2.3, 1.8, 2.2, 2.3, 1.5, 2.3, 2.0, 2.0, 1.8, 2.1, 1.8, 1.8, 1.8, 2.1, 1.6, 1.9, 2.0, 2.2, 1.5, 1.4, 2.3, 2.4, 1.8, 1.8, 2.1, 2.4, 2.3, 1.9, 2.3, 2.5, 2.3, 1.9, 2.0, 2.3, 1.8], ] }); chart.unload({ ids: 'setosa' }); chart.load({ columns: [ ["virginica", 0.2, 0.2, 0.2, 0.2, 0.2, 0.4, 0.3, 0.2, 0.2, 0.1, 0.2, 0.2, 0.1, 0.1, 0.2, 0.4, 0.4, 0.3, 0.3, 0.3, 0.2, 0.4, 0.2, 0.5, 0.2, 0.2, 0.4, 0.2, 0.2, 0.2, 0.2, 0.4, 0.1, 0.2, 0.2, 0.2, 0.2, 0.1, 0.2, 0.2, 0.3, 0.3, 0.2, 0.6, 0.4, 0.3, 0.2, 0.2, 0.2, 0.2], ] }); } function chart_pie() { const chart = c3.generate({ data: { // iris data from R columns: [ ['data1', 30], ['data2', 120], ], type : 'pie', onclick: (d, i) => { console.log("onclick", d, i); }, onmouseover: (d, i) => { console.log("onmouseover", d, i); }, onmouseout: (d, i) => { console.log("onmouseout", d, i); } }, }); chart.load({ columns: [ ["setosa", 0.2, 0.2, 0.2, 0.2, 0.2, 0.4, 0.3, 0.2, 0.2, 0.1, 0.2, 0.2, 0.1, 0.1, 0.2, 0.4, 0.4, 0.3, 0.3, 0.3, 0.2, 0.4, 0.2, 0.5, 0.2, 0.2, 0.4, 0.2, 0.2, 0.2, 0.2, 0.4, 0.1, 0.2, 0.2, 0.2, 0.2, 0.1, 0.2, 0.2, 0.3, 0.3, 0.2, 0.6, 0.4, 0.3, 0.2, 0.2, 0.2, 0.2], ["versicolor", 1.4, 1.5, 1.5, 1.3, 1.5, 1.3, 1.6, 1.0, 1.3, 1.4, 1.0, 1.5, 1.0, 1.4, 1.3, 1.4, 1.5, 1.0, 1.5, 1.1, 1.8, 1.3, 1.5, 1.2, 1.3, 1.4, 1.4, 1.7, 1.5, 1.0, 1.1, 1.0, 1.2, 1.6, 1.5, 1.6, 1.5, 1.3, 1.3, 1.3, 1.2, 1.4, 1.2, 1.0, 1.3, 1.2, 1.3, 1.3, 1.1, 1.3], ["virginica", 2.5, 1.9, 2.1, 1.8, 2.2, 2.1, 1.7, 1.8, 1.8, 2.5, 2.0, 1.9, 2.1, 2.0, 2.4, 2.3, 1.8, 2.2, 2.3, 1.5, 2.3, 2.0, 2.0, 1.8, 2.1, 1.8, 1.8, 1.8, 2.1, 1.6, 1.9, 2.0, 2.2, 1.5, 1.4, 2.3, 2.4, 1.8, 1.8, 2.1, 2.4, 2.3, 1.9, 2.3, 2.5, 2.3, 1.9, 2.0, 2.3, 1.8], ] }); chart.unload({ ids: 'data1' }); chart.unload({ ids: 'data2' }); } function chart_donut() { const chart = c3.generate({ data: { columns: [ ['data1', 30], ['data2', 120], ], type : 'donut', onclick: (d, i) => { console.log("onclick", d, i); }, onmouseover: (d, i) => { console.log("onmouseover", d, i); }, onmouseout: (d, i) => { console.log("onmouseout", d, i); } }, donut: { title: "Iris Petal Width" }, }); chart.load({ columns: [ ["setosa", 0.2, 0.2, 0.2, 0.2, 0.2, 0.4, 0.3, 0.2, 0.2, 0.1, 0.2, 0.2, 0.1, 0.1, 0.2, 0.4, 0.4, 0.3, 0.3, 0.3, 0.2, 0.4, 0.2, 0.5, 0.2, 0.2, 0.4, 0.2, 0.2, 0.2, 0.2, 0.4, 0.1, 0.2, 0.2, 0.2, 0.2, 0.1, 0.2, 0.2, 0.3, 0.3, 0.2, 0.6, 0.4, 0.3, 0.2, 0.2, 0.2, 0.2], ["versicolor", 1.4, 1.5, 1.5, 1.3, 1.5, 1.3, 1.6, 1.0, 1.3, 1.4, 1.0, 1.5, 1.0, 1.4, 1.3, 1.4, 1.5, 1.0, 1.5, 1.1, 1.8, 1.3, 1.5, 1.2, 1.3, 1.4, 1.4, 1.7, 1.5, 1.0, 1.1, 1.0, 1.2, 1.6, 1.5, 1.6, 1.5, 1.3, 1.3, 1.3, 1.2, 1.4, 1.2, 1.0, 1.3, 1.2, 1.3, 1.3, 1.1, 1.3], ["virginica", 2.5, 1.9, 2.1, 1.8, 2.2, 2.1, 1.7, 1.8, 1.8, 2.5, 2.0, 1.9, 2.1, 2.0, 2.4, 2.3, 1.8, 2.2, 2.3, 1.5, 2.3, 2.0, 2.0, 1.8, 2.1, 1.8, 1.8, 1.8, 2.1, 1.6, 1.9, 2.0, 2.2, 1.5, 1.4, 2.3, 2.4, 1.8, 1.8, 2.1, 2.4, 2.3, 1.9, 2.3, 2.5, 2.3, 1.9, 2.0, 2.3, 1.8], ] }); chart.unload({ ids: 'data1' }); chart.unload({ ids: 'data2' }); } function chart_gauge() { const chart = c3.generate({ data: { columns: [ ['data', 91.4] ], type: 'gauge', onclick: (d, i) => { console.log("onclick", d, i); }, onmouseover: (d, i) => { console.log("onmouseover", d, i); }, onmouseout: (d, i) => { console.log("onmouseout", d, i); } }, gauge: { label: { format: (value, ratio) => value, show: false // to turn off the min/max labels. }, min: 0, // 0 is default, //can handle negative min e.g. vacuum / voltage / current flow / rate of change max: 100, // 100 is default units: ' %', width: 39 // for adjusting arc thickness }, color: { pattern: ['#FF0000', '#F97600', '#F6C600', '#60B044'], // the three color levels for the percentage values. threshold: { unit: 'value', // percentage is default max: 200, // 100 is default values: [30, 60, 90, 100] } }, size: { height: 180 }, }); chart.load({ columns: [['data', 10]] }); chart.load({ columns: [['data', 50]] }); chart.load({ columns: [['data', 70]] }); chart.load({ columns: [['data', 0]] }); chart.load({ columns: [['data', 100]] }); } function chart_stanford() { const chart = c3.generate({ data: { x: 'HPE', epochs: 'Epochs', columns: [ ['HPE', 2.5, 2.5, 2.5, 2.5, 2.5, 3, 3, 3, 3.5, 3.5, 3.5, 3.5, 3.5, 3.5, 3.5, 3.5, 3.5, 3.5, 3.5, 3.5, 3.5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4.5, 4.5, 4.5, 4.5, 4.5, 4.5, 4.5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5.5, 5.5, 5.5, 2, 2.5, 2.5, 3, 3, 3.5, 3.5, 3.5, 3.5, 3.5, 3.5, 4, 4, 4, 4, 4, 4.5, 4.5, 4.5, 4.5, 4.5, 5, 5, 5, 5, 5, 5, 5.5, 5.5, 2.5, 3, 3, 3.5, 3.5, 3.5, 3.5, 4, 4, 4, 4, 4, 4.5, 4.5, 4.5, 5, 5, 5, 5, 5, 5, 5.5, 5.5, 5.5, 5.5, 2.5, 2.5, 2.5, 3, 3.5, 3.5, 3.5, 3.5, 3.5, 4, 4.5, 4.5, 4.5, 4.5, 4.5, 5, 5, 5, 5.5, 2, 3.5, 3.5, 3.5, 3.5, 3.5, 4, 4.5, 4.5, 4.5, 4.5, 5, 2, 2, 3, 3, 3.5, 3.5, 3.5, 3.5, 4, 4, 4, 4, 5, 2, 3, 3, 3.5, 3.5, 3.5, 3.5, 4, 4.5, 5, 5, 5, 5.5, 5.5, 2.5, 3, 3, 3, 3.5, 4, 2.5, 3, 3.5, 4, 4, 4.5, 5, 3.5, 4, 4, 4, 4, 4.5, 3.5, 4, 4.5, 5, 5, 2.5, 3, 3.5, 3.5, 4, 4.5, 4.5, 4, 5, 3, 4, 4, 2, 4.5, 3.5, 2.5, 3.5, 4, 4, 2.5, 2.5, 3, 3, 4, 4.5, 5, 5, 4.5, 2.5, 3, 4, 3, 3.5, 3.5, 4, 2.5, 3.5, 2.5, 3.5, 2.5, 2.5, 3.5, 2.5, 4.5, 3, 4, 2.5, 4.5, 2.5, 4, 4, 2.5, 3, 3.5, 2.5, 3.5, 3.5, 3.5, 2.5, 3.5, 3.5, 4, 4, 3.5, 4, 4, 4], ['HPL', 24.5, 24, 27.5, 56.5, 26.5, 26, 51.5, 50, 39, 39.5, 54, 48.5, 54.5, 53, 52, 13.5, 16.5, 15.5, 14.5, 19, 19.5, 41, 40, 42.5, 40.5, 41.5, 30, 56, 47, 11.5, 11, 12, 14.5, 55, 56.5, 54, 55.5, 56, 48.5, 19, 56, 56.5, 53.5, 51.5, 52, 31.5, 36.5, 38.5, 22, 21, 22.5, 37, 38, 38.5, 11, 55, 14.5, 12.5, 56, 22, 11, 48, 12.5, 14, 17, 13.5, 43, 55.5, 53.5, 10.5, 49.5, 54.5, 51.5, 19.5, 24, 52.5, 49.5, 47, 45.5, 46, 20, 34.5, 37.5, 28, 10, 26.5, 22.5, 13, 18.5, 20, 29, 39.5, 48.5, 50.5, 19.5, 29, 27.5, 52.5, 50.5, 53, 37, 36, 34.5, 20.5, 31.5, 33, 32, 36, 29, 28.5, 31.5, 29, 30, 11.5, 49, 52.5, 20.5, 49.5, 28, 24.5, 53, 50, 23.5, 47.5, 38, 35, 34, 12, 21, 36.5, 51, 12, 58.5, 36.5, 28.5, 51, 50.5, 20, 50, 56, 55, 29.5, 28.5, 23, 17.5, 38.5, 57.5, 29.5, 38.5, 49, 52.5, 34, 11.5, 27, 30, 10, 51.5, 50.5, 18, 20.5, 23, 49, 51, 48, 33.5, 32.5, 27, 28, 25.5, 57.5, 10.5, 52, 29.5, 27.5, 50, 28.5, 51.5, 21.5, 35.5, 49.5, 37.5, 39, 50, 51, 22.5, 58, 20, 25.5, 48.5, 32, 30, 24.5, 23.5, 29.5, 23, 25, 21, 38, 32.5, 12, 22, 37, 55.5, 22, 38, 55.5, 29, 23.5, 21, 12.5, 14, 11.5, 56.5, 21.5, 20.5, 33, 33.5, 27, 13, 10.5, 22.5, 57, 24, 28.5, 28, 10, 37, 56, 37.5, 11, 10.5, 28, 13.5, 26, 11, 27.5, 12, 26.5, 26, 24.5, 24, 25, 25, 25, 11.5, 25.5, 26.5, 26, 25.5, 27.5, 27, 25, 27, 24.5, 26, 26.5, 25.5], ['Epochs', 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 11, 11, 11, 12, 12, 13, 13, 13, 13, 13, 13, 13, 14, 14, 15, 15, 15, 16, 16, 17, 18, 18, 18, 18, 19, 19, 19, 19, 19, 20, 20, 20, 22, 23, 23, 23, 24, 24, 24, 24, 25, 28, 29, 29, 36, 38, 39, 43, 44, 47, 50, 54, 54, 59, 62, 62, 70, 70, 81, 84, 85, 86, 88, 89, 93, 94, 95, 106, 110, 111, 115, 170] ], type: 'stanford', }, legend: { hide: true }, point: { focus: { expand: { r: 5 } }, r: 2 }, axis: { x: { show: true, label: { text: 'HPE (m)', position: 'outer-center' }, min: 0, max: 61, tick: { values: d3.range(0, 65, 10) }, padding: { top: 0, bottom: 0, left: 0, right: 0 }, }, y: { show: true, label: { text: 'HPL (m)', position: 'outer-middle' }, min: 0, max: 60, tick: { values: d3.range(0, 65, 10) }, padding: { top: 5, bottom: 0, left: 0, right: 0 }, } }, stanford: { scaleMin: 1, scaleMax: 10000, scaleFormat: 'pow10', padding: { top: 15, right: 0, bottom: 0, left: 0 } } }); } function chart_combination() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 20, 50, 40, 60, 50], ['data2', 200, 130, 90, 240, 130, 220], ['data3', 300, 200, 160, 400, 250, 250], ['data4', 200, 130, 90, 240, 130, 220], ['data5', 130, 120, 150, 140, 160, 150], ['data6', 90, 70, 20, 50, 60, 120], ], type: 'bar', types: { data3: 'spline', data4: 'line', data6: 'area', }, groups: [['data1', 'data2']], }, }); } //////////////////// // Axis tests //////////////////// function categorized() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250, 50, 100, 250] ] }, axis: { x: { type: 'category', categories: ['cat1', 'cat2', 'cat3', 'cat4', 'cat5', 'cat6', 'cat7', 'cat8', 'cat9'] } } }); } function axes_rotated() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], types: { data1: 'bar', }, }, axis: { rotated: true, }, }); } function axes_y2() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], axes: { data1: 'y', data2: 'y2', }, }, axis: { y2: { show: true, }, }, }); } function axes_x_tick_format() { const chart = c3.generate({ data: { x: 'x', columns: [ [ 'x', '2010-01-01', '2011-01-01', '2012-01-01', '2013-01-01', '2014-01-01', '2015-01-01' ], [ 'sample', 30, 200, 100, 400, 150, 250 ], ], }, axis: { x: { type: 'timeseries', tick: { format: (x: Date) => x.getFullYear(), // format: '%Y' // format string is also available for timeseries data }, }, }, }); } function axes_x_tick_count() { const chart = c3.generate({ data: { x: 'x', columns: [ [ 'x', '2013-01-01', '2013-01-02', '2013-01-03', '2013-01-04', '2013-01-05', '2013-01-06', '2013-01-07', '2013-01-08', '2013-01-09', '2013-01-10', '2013-01-11', '2013-01-12' ], [ 'sample', 30, 200, 100, 400, 150, 250, 30, 200, 100, 400, 150, 250 ], ], }, axis: { x: { type: 'timeseries', tick: { count: 4, format: '%Y-%m-%d', }, }, }, }); } function axes_x_tick_values() { const chart = c3.generate({ data: { x: 'x', columns: [ [ 'x', '2013-01-01', '2013-01-02', '2013-01-03', '2013-01-04', '2013-01-05', '2013-01-06', '2013-01-07', '2013-01-08', '2013-01-09', '2013-01-10', '2013-01-11', '2013-01-12' ], [ 'sample', 30, 200, 100, 400, 150, 250, 30, 200, 100, 400, 150, 250 ], ], }, axis: { x: { type: 'timeseries', tick: { // this also works for non timeseries data values: ['2013-01-05', '2013-01-10'], }, }, }, }); } function axes_x_tick_culling() { const chart = c3.generate({ data: { columns: [ [ 'sample', 30, 200, 100, 400, 150, 250, 30, 200, 100, 400, 150, 250, 30, 200, 100, 400, 150, 250, 200, 100, 400, 150, 250 ], ], }, axis: { x: { type: 'category', tick: { culling: { max: 4, // the number of tick texts will be adjusted to less than this value }, // for normal axis, default on // for category axis, default off }, }, }, }); } function axes_x_tick_fit() { const chart = c3.generate({ data: { x: 'x', columns: [ ['x', '2013-10-31', '2013-12-31', '2014-01-31', '2014-02-28'], ['sample', 30, 100, 400, 150], ], }, axis: { x: { type: 'timeseries', tick: { fit: true, format: '%e %b %y', }, }, }, }); } function axes_x_tick_width() { const chart = c3.generate({ data: { x: 'x', columns: [ ['x', '2013-10-31', '2013-12-31', '2014-01-31', '2014-02-28'], ['sample', 30, 100, 400, 150], ], }, axis: { x: { tick: { width: 100, }, }, }, }); } function axes_x_localtime() { const chart = c3.generate({ data: { x: 'x', xFormat: '%Y', columns: [ // ["x", "2012-12-31", "2013-01-01", "2013-01-02", "2013-01-03", "2013-01-04", "2013-01-05"], ['x', '2010', '2011', '2012', '2013', '2014', '2015'], ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 340, 200, 500, 250, 350], ], }, axis: { x: { type: 'timeseries', // if true, treat x value as localtime (Default) // if false, convert to UTC internally localtime: false, tick: { format: '%Y-%m-%d %H:%M:%S', }, }, }, }); } function axes_x_tick_rotate() { const chart = c3.generate({ data: { x: 'x', columns: [ [ 'x', 'www.somesitename1.com', 'www.somesitename2.com', 'www.somesitename3.com', 'www.somesitename4.com', 'www.somesitename5.com', 'www.somesitename6.com', 'www.somesitename7.com', 'www.somesitename8.com', 'www.somesitename9.com', 'www.somesitename10.com', 'www.somesitename11.com', 'www.somesitename12.com' ], [ 'pv', 90, 100, 140, 200, 100, 400, 90, 100, 140, 200, 100, 400 ], ], type: 'bar', }, axis: { x: { type: 'category', tick: { rotate: 75, multiline: false, }, height: 130, }, }, }); } function axes_y_tick_format() { const chart = c3.generate({ data: { columns: [['sample', 30, 200, 100, 400, 150, 2500]], }, axis: { y: { tick: { format: d3.format('$,'), // format: (d) => { return "$" + d; } }, }, }, }); } function axes_y_padding() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], axes: { data1: 'y', data2: 'y2', }, }, axis: { y: { padding: { top: 200, bottom: 0 }, }, y2: { padding: { top: 100, bottom: 100 }, show: true, }, }, }); } function axes_y_range() { const chart = c3.generate({ data: { columns: [['sample', 30, 200, 100, 400, 150, 250]], }, axis: { y: { max: 400, min: -400, // Range includes padding, set 0 if no padding needed // padding: {top:0, bottom:0} }, }, }); } function axes_label() { const chart = c3.generate({ data: { columns: [ ['sample', 30, 200, 100, 400, 150, 250], ['sample2', 130, 300, 200, 500, 250, 350], ], axes: { sample2: 'y2', }, }, axis: { x: { label: 'X Label', }, y: { label: 'Y Label', }, y2: { show: true, label: 'Y2 Label', }, }, }); } function axes_label_position() { const chart = c3.generate({ data: { columns: [ ['sample1', 30, 200, 100, 400, 150, 250], ['sample2', 430, 300, 500, 400, 650, 250], ], axes: { sample1: 'y', sample2: 'y2', }, }, axis: { x: { label: { text: 'X Label', position: 'outer-center', // inner-right : default // inner-center // inner-left // outer-right // outer-center // outer-left }, }, y: { label: { text: 'Y Label', position: 'outer-middle', // inner-top : default // inner-middle // inner-bottom // outer-top // outer-middle // outer-bottom }, }, y2: { show: true, label: { text: 'Y2 Label', position: 'outer-middle', // inner-top : default // inner-middle // inner-bottom // outer-top // outer-middle // outer-bottom }, }, }, }); } /////////////////// // Data Tests /////////////////// function data_columned() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 20, 50, 40, 60, 50], ['data2', 200, 130, 90, 240, 130, 220], ['data3', 300, 200, 160, 400, 250, 250], ], }, }); } function data_rowed() { const chart = c3.generate({ data: { rows: [ ['data1', 'data2', 'data3'], [90, 120, 300], [40, 160, 240], [50, 200, 290], [120, 160, 230], [80, 130, 300], [90, 220, 320], ], }, }); } function data_json() { let chart = c3.generate({ data: { json: { data1: [30, 20, 50, 40, 60, 50], data2: [200, 130, 90, 240, 130, 220], data3: [300, 200, 160, 400, 250, 250], }, }, }); setTimeout(() => { chart = c3.generate({ data: { json: [ { name: 'www.site1.com', upload: 200, download: 200, total: 400, }, { name: 'www.site2.com', upload: 100, download: 300, total: 400, }, { name: 'www.site3.com', upload: 300, download: 200, total: 500, }, { name: 'www.site4.com', upload: 400, download: 100, total: 500, }, ], keys: { x: "name", // it's possible to specify 'x' when category axis value: ['upload', 'download'], }, }, axis: { x: { type: "category" }, }, }); }, 1000); setTimeout(() => { chart.load({ json: [ { name: 'www.site1.com', upload: 800, download: 500, total: 400, }, { name: 'www.site2.com', upload: 600, download: 600, total: 400, }, { name: 'www.site3.com', upload: 400, download: 800, total: 500, }, { name: 'www.site4.com', upload: 400, download: 700, total: 500, }, ], keys: { value: ['upload', 'download'], }, }); }, 2000); } function data_url() { const chart = c3.generate({ data: { url: '/data/c3_test.csv', }, }); setTimeout(() => { c3.generate({ data: { url: '/data/c3_test.json', mimeType: 'json', }, }); }, 1000); } function data_stringx() { const chart = c3.generate({ data: { x: 'x', columns: [ [ 'x', 'www.site1.com', 'www.site2.com', 'www.site3.com', 'www.site4.com', ], ['download', 30, 200, 100, 400], ['loading', 90, 100, 140, 200], ], groups: [['download', 'loading']], type: 'bar', }, axis: { x: { type: 'category', // this needed to load string x value }, }, }); setTimeout(() => { chart.load({ columns: [ [ 'x', 'www.siteA.com', 'www.siteB.com', 'www.siteC.com', 'www.siteD.com', ], ['download', 130, 200, 150, 350], ['loading', 190, 180, 190, 140], ], }); }, 1000); setTimeout(() => { chart.load({ columns: [ ['x', 'www.siteE.com', 'www.siteF.com', 'www.siteG.com'], ['download', 30, 300, 200], ['loading', 90, 130, 240], ], }); }, 2000); setTimeout(() => { chart.load({ columns: [ [ 'x', 'www.site1.com', 'www.site2.com', 'www.site3.com', 'www.site4.com', ], ['download', 130, 300, 200, 470], ['loading', 190, 130, 240, 340], ], }); }, 3000); setTimeout(() => { chart.load({ columns: [ ['download', 30, 30, 20, 170], ['loading', 90, 30, 40, 40], ], }); }, 4000); setTimeout(() => { chart.load({ url: '/data/c3_string_x.csv', }); }, 5000); } function data_load() { const chart = c3.generate({ data: { url: '/data/c3_test.csv', type: 'line', }, }); setTimeout(() => { chart.load({ url: '/data/c3_test2.csv', }); }, 1000); setTimeout(() => { chart.load({ columns: [ ['data1', 130, 120, 150, 140, 160, 150], ['data4', 30, 20, 50, 40, 60, 50], ], unload: ['data2', 'data3'], }); }, 2000); setTimeout(() => { chart.load({ rows: [ ['data2', 'data3'], [120, 300], [160, 240], [200, 290], [160, 230], [130, 300], [220, 320], ], unload: 'data4', }); }, 3000); setTimeout(() => { chart.load({ columns: [['data4', 30, 20, 50, 40, 60, 50, 100, 200]], type: 'bar', }); }, 4000); setTimeout(() => { chart.unload({ ids: 'data4', }); }, 5000); setTimeout(() => { chart.load({ columns: [['data2', null, 30, 20, 50, 40, 60, 50]], }); }, 6000); setTimeout(() => { chart.unload(); }, 7000); setTimeout(() => { chart.load({ rows: [ ['data4', 'data2', 'data3'], [90, 120, 300], [40, 160, 240], [50, 200, 290], [120, 160, 230], [80, 130, 300], [90, 220, 320], ], type: 'bar', }); }, 8000); setTimeout(() => { chart.load({ rows: [ ['data5', 'data6'], [190, 420], [140, 460], [150, 500], [220, 460], [180, 430], [190, 520], ], type: 'line', }); }, 9000); setTimeout(() => { chart.unload({ ids: ['data2', 'data3'], }); }, 10000); } function data_name() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], names: { data1: 'Name 1', data2: 'Name 2', }, }, }); } function isDataPoint(d: string | c3.DataPoint | c3.DataSeries): d is c3.DataPoint { return typeof d === "object" && "value" in d; } function data_color() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 20, 50, 40, 60, 50], ['data2', 200, 130, 90, 240, 130, 220], ['data3', 300, 200, 160, 400, 250, 250], ], type: 'bar', colors: { data1: '#ff0000', data2: '#00ff00', data3: '#0000ff', }, color: (color, d) => { // d will be 'id' when called for legends if (isDataPoint(d)) { return d.id && d.id === 'data3' ? d3.rgb(color).darker(d.value / 150) : color; } return color; }, }, }); } function data_order() { const chart = c3.generate({ data: { columns: [ ['data1', 130, 200, 320, 400, 530, 750], ['data2', -130, 10, 130, 200, 150, 250], ['data3', -130, -50, -10, -200, -250, -150], ], type: 'bar', groups: [['data1', 'data2', 'data3']], order: 'desc', // stack order by sum of values descendantly. this is default. // order: "asc" // stack order by sum of values ascendantly. // order: null // stack order by data definition. }, grid: { y: { lines: [{ value: 0 }], }, }, }); setTimeout(() => { chart.load({ columns: [['data4', 1200, 1300, 1450, 1600, 1520, 1820]], }); }, 1000); setTimeout(() => { chart.load({ columns: [['data5', 200, 300, 450, 600, 520, 820]], }); }, 2000); setTimeout(() => { chart.groups([['data1', 'data2', 'data3', 'data4', 'data5']]); }, 3000); } function data_label() { const chart = c3.generate({ data: { columns: [ ['data1', 30, -200, -100, 400, 150, 250], ['data2', -50, 150, -150, 150, -50, -150], ['data3', -100, 100, -40, 100, -150, -50], ], groups: [['data1', 'data2']], type: 'bar', labels: true, }, grid: { y: { lines: [{ value: 0 }], }, }, }); } function data_label_format() { const chart = c3.generate({ data: { columns: [ ['data1', 30, -200, -100, 400, 150, 250], ['data2', -50, 150, -150, 150, -50, -150], ['data3', -100, 100, -40, 100, -150, -50], ], groups: [['data1', 'data2']], type: 'bar', labels: { // format: function (v, id, i, j) { return "Default Format"; }, format: { data1: d3.format('$'), // data1: function (v, id, i, j) { return "Format for data1"; }, }, }, }, grid: { y: { lines: [{ value: 0 }], }, }, }); } function data_number_format_l10n() { // Locale for Russian (ru_RU) const d3locale = d3.formatDefaultLocale({ decimal: ",", thousands: "\u00A0", grouping: [3], currency: ["", " руб."], }); // More about locale settings: https://github.com/mbostock/d3/wiki/Localization const chart = c3.generate({ data: { columns: [ ['data1', 30000, 20000, 10000, 40000, 15000, 250000], ['data2', 100.5, 1200.46, 100.1, 40.12, 150.1, 250] ], axes: { data2: 'y2' } }, axis : { y : { tick: { format: d3locale.format(",") } }, y2: { show: true, tick: { format: d3locale.format(",") } } } }); } /////////////////// // Grid Tests /////////////////// function options_gridline() { const chart = c3.generate({ data: { columns: [['sample', 30, 200, 100, 400, 150, 250, 120, 200]], }, grid: { x: { show: true, }, y: { show: true, }, }, }); } function grid_x_lines() { const chart = c3.generate({ data: { columns: [['sample', 30, 200, 100, 400, 150, 250]], }, grid: { x: { lines: [ { value: 1, text: 'Lable 1' }, { value: 3, text: 'Lable 3', position: 'middle' }, { value: 4.5, text: 'Lable 4.5', position: 'start' }, ], }, }, }); } function grid_y_lines() { const chart = c3.generate({ data: { columns: [ ['sample', 30, 200, 100, 400, 150, 250], ['sample2', 1300, 1200, 1100, 1400, 1500, 1250], ], axes: { sample2: 'y2', }, }, axis: { y2: { show: true, }, }, grid: { y: { lines: [ { value: 50, text: 'Label 50 for y' }, { value: 1300, text: 'Label 1300 for y2', position: 'start', }, { value: 350, text: 'Label 350 for y', position: 'middle' }, ], }, }, }); } /////////////////// // Region Tests /////////////////// function region() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250, 400], ['data2', 830, 1200, 1100, 1400, 1150, 1250, 1500], ], axes: { data2: 'y2', }, }, axis: { y2: { show: true, }, }, regions: [ { axis: 'x', end: 1, class: 'regionX' }, { axis: 'x', start: 2, end: 4, class: 'regionX' }, { axis: 'x', start: 5, class: 'regionX' }, { axis: 'y', end: 50, class: 'regionY' }, { axis: 'y', start: 80, end: 140, class: 'regionY' }, { axis: 'y', start: 400, class: 'regionY' }, { axis: 'y2', end: 900, class: 'regionY2' }, { axis: 'y2', start: 1150, end: 1250, class: 'regionY2' }, { axis: 'y2', start: 1300, class: 'regionY2' }, ], }); } function region_timeseries() { const chart = c3.generate({ data: { x: 'date', columns: [ [ 'date', '2014-01-01', '2014-01-10', '2014-01-20', '2014-01-30', '2014-02-01' ], ['sample', 30, 200, 100, 400, 150, 250], ], }, axis: { x: { type: 'timeseries', }, }, regions: [ { start: '2014-01-05', end: '2014-01-10' }, { start: new Date('2014/01/15'), end: new Date('20 Jan 2014') }, { start: 1390575600000, end: 1391007600000 }, // start => 2014-01-25 00:00:00, end => 2014-01-30 00:00:00 ], }); } ///////////////////// // Interaction Tests ///////////////////// function options_subchart() { const chart = c3.generate({ data: { columns: [['sample', 30, 200, 100, 400, 150, 250]], }, subchart: { show: true, }, }); } function interaction_zoom() { const chart = c3.generate({ data: { columns: [ [ 'sample', 30, 200, 100, 400, 150, 250, 150, 200, 170, 240, 350, 150, 100, 400, 150, 250, 150, 200, 170, 240, 100, 150, 250, 150, 200, 170, 240, 30, 200, 100, 400, 150, 250, 150, 200, 170, 240, 350, 150, 100, 400, 350, 220, 250, 300, 270, 140, 150, 90, 150, 50, 120, 70, 40 ], ], }, zoom: { enabled: true, }, }); } ///////////////////// // Legend Tests ///////////////////// function options_legend() { const chart = c3.generate({ data: { columns: [['sample', 30, 200, 100, 400, 150, 250]], }, legend: { show: false, }, }); } function legend_position() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], }, legend: { position: 'right', }, }); setTimeout(() => { chart.load({ columns: [['data3', 130, 150, 200, 300, 200, 100]], }); }, 1000); setTimeout(() => { chart.unload({ ids: 'data1', }); }, 2000); setTimeout(() => { chart.transform('pie'); }, 3000); setTimeout(() => { chart.transform('line'); }, 4000); } function legend_custom() { const chart = c3.generate({ data: { columns: [['data1', 100], ['data2', 300], ['data3', 200]], type: 'pie', }, legend: { show: false, }, }); function toggle(id: any) { chart.toggle(id); } d3.select('.container') .insert('div', '.chart') .attr('class', 'legend') .selectAll('span') .data(['data1', 'data2', 'data3']) .enter() .append('span') .attr('data-id', (id) => id) .html((id) => id) .each(function(id) { d3.select(this).style('background-color', chart.color(id)); }) .on('mouseover', (id) => { chart.focus(id); }) .on('mouseout', (id) => { chart.revert(); }) .on('click', (id) => { chart.toggle(id); }); } function legend_tiles() { const chart = c3.generate({ data: { columns: [['sample', 30, 200, 100, 400, 150, 250]], }, legend: { // amount of padding to put between each legend element padding: 5, // define custom height and width for the legend item tile item: { tile: { width: 15, height: 2, }, }, }, }); } ///////////////////// // Tooltip Tests ///////////////////// function tooltip_show() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], }, tooltip: { show: false, }, }); } function tooltip_grouped() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ['data3', 500, 320, 210, 340, 215, 125], ], }, tooltip: { grouped: false, // Default true }, }); } function tooltip_format() { const chart = c3.generate({ data: { columns: [ ['data1', 30000, 20000, 10000, 40000, 15000, 250000], ['data2', 100, 200, 100, 40, 150, 250], ], axes: { data2: 'y2', }, }, axis: { y: { tick: { format: d3.format('s'), }, }, y2: { show: true, tick: { format: d3.format('$'), }, }, }, tooltip: { format: { title: (d: any) => 'Data ' + d, value: (value: any, ratio: any, id: any) => { const format = id === 'data1' ? d3.format(',') : d3.format('$'); return format(value); }, // value: d3.format(",") // apply this format to both y and y2 }, }, }); } function tooltip_order() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], }, tooltip: { order: 'asc', }, }); } //////////////////////// // Chart options Tests //////////////////////// function options_size() { const chart = c3.generate({ size: { height: 240, width: 480, }, data: { columns: [['sample', 30, 200, 100, 400, 150, 250]], }, }); } function options_padding() { const chart = c3.generate({ padding: { top: 40, right: 100, bottom: 40, left: 100, }, data: { columns: [['sample', 30, 200, 100, 400, 150, 250000000000]], }, }); } function options_color() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ['data3', 130, 220, 140, 200, 250, 450], ['data4', 250, 320, 210, 240, 215, 225], ['data5', 430, 500, 400, 280, 290, 350], ['data6', 100, 120, 310, 340, 415, 225], ], }, color: { pattern: [ '#1f77b4', '#aec7e8', '#ff7f0e', '#ffbb78', '#2ca02c', '#98df8a', '#d62728', '#ff9896', '#9467bd', '#c5b0d5', '#8c564b', '#c49c94', '#e377c2', '#f7b6d2', '#7f7f7f', '#c7c7c7', '#bcbd22', '#dbdb8d', '#17becf', '#9edae5' ], }, }); } function transition_duration() { const chart = c3.generate({ data: { url: '/data/c3_test.csv', }, transition: { duration: 100, }, }); setTimeout(() => { chart.load({ url: '/data/c3_test2.csv', }); }, 500); setTimeout(() => { chart.load({ columns: [ ['data1', 30, 20, 50, 40, 60, 50], ['data2', 200, 130, 90, 240, 130, 220], ['data3', 300, 200, 160, 400, 250, 250], ], }); }, 1000); setTimeout(() => { chart.load({ rows: [ ['data1', 'data2', 'data3'], [90, 120, 300], [40, 160, 240], [50, 200, 290], [120, 160, 230], [80, 130, 300], [90, 220, 320], ], }); }, 1500); setTimeout(() => { chart.load({ columns: [['data1', null, 30, 20, 50, 40, 60, 50, 100, 200]], }); }, 2000); } ///////////////////////////// // Line Chart options Tests ///////////////////////////// function point_show() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], }, point: { show: false, }, }); } //////////////////////////// // Pie Chart options Tests //////////////////////////// function pie_label_format() { const chart = c3.generate({ data: { columns: [['data1', 30], ['data2', 50]], type: 'pie', }, pie: { label: { format: (value: any, ratio: any, id: any) => { return d3.format('$')(value); }, }, }, }); } ///////////////////// // API Tests ///////////////////// function api_flow() { const chart = c3.generate({ data: { x: 'x', columns: [ ['x', '2012-12-29', '2012-12-30', '2012-12-31'], ['data1', 230, 300, 330], ['data2', 190, 230, 200], ['data3', 90, 130, 180], ], }, axis: { x: { type: 'timeseries', tick: { format: '%m/%d', }, }, }, }); setTimeout(() => { chart.flow({ columns: [ ['x', '2013-01-11', '2013-01-21'], ['data1', 500, 200], ['data2', 100, 300], ['data3', 200, 120], ], duration: 1500, done: () => { chart.flow({ columns: [ [ 'x', '2013-02-11', '2013-02-12', '2013-02-13', '2013-02-14', ], ['data1', 200, 300, 100, 250], ['data2', 100, 90, 40, 120], ['data3', 100, 100, 300, 500], ], length: 0, duration: 1500, done: () => { chart.flow({ columns: [ ['x', '2013-03-01', '2013-03-02'], ['data1', 200, 300], ['data2', 150, 250], ['data3', 100, 100], ], length: 2, duration: 1500, done: () => { chart.flow({ columns: [ ['x', '2013-03-21', '2013-04-01'], ['data1', 500, 200], ['data2', 100, 150], ['data3', 200, 400], ], to: '2013-03-01', duration: 1500, }); }, }); }, }); }, }); }, 1000); } function api_data_name() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], names: { data1: 'Name 1', data2: 'Name 2', }, }, }); setTimeout(() => { chart.data.names({ data1: 'New name for data1', data2: 'New name for data2', }); }, 1000); setTimeout(() => { chart.data.names({ data1: 'New name for data1 again' }); }, 2000); } function api_data_color() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 20, 50, 40, 60, 50], ['data2', 200, 130, 90, 240, 130, 220], ['data3', 300, 200, 160, 400, 250, 250], ], type: 'bar', colors: { data1: '#ff0000', data2: '#00ff00', data3: '#0000ff', }, labels: true, }, }); setTimeout(() => { chart.data.colors({ data1: d3.rgb('#ff0000').darker(1), data2: d3.rgb('#00ff00').darker(1), data3: d3.rgb('#0000ff').darker(1), }); }, 1000); setTimeout(() => { chart.data.colors({ data1: d3.rgb('#ff0000').darker(2), data2: d3.rgb('#00ff00').darker(2), data3: d3.rgb('#0000ff').darker(2), }); }, 2000); } function api_axis_label() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], axes: { data1: 'y', data2: 'y2', }, }, axis: { y: { label: 'Y Axis Label', }, y2: { show: true, label: 'Y2 Axis Label', }, }, }); setTimeout(() => { chart.axis.labels({ y2: 'New Y2 Axis Label' }); }, 1000); setTimeout(() => { chart.axis.labels({ y: 'New Y Axis Label', y2: 'New Y2 Axis Label Again', }); }, 2000); } function api_axis_range() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], axes: { data1: 'y', data2: 'y2', }, }, axis: { y2: { show: true, }, }, }); setTimeout(() => { chart.axis.max(500); }, 1000); setTimeout(() => { chart.axis.min(-500); }, 2000); setTimeout(() => { chart.axis.max({ y: 600, y2: 100 }); }, 3000); setTimeout(() => { chart.axis.min({ y: -600, y2: -100 }); }, 4000); setTimeout(() => { chart.axis.range({ max: 1000, min: -1000 }); }, 5000); setTimeout(() => { chart.axis.range({ max: { y: 600, y2: 100 }, min: { y: -100, y2: 0 } }); }, 6000); setTimeout(() => { chart.axis.max({ x: 10 }); }, 7000); setTimeout(() => { chart.axis.min({ x: -10 }); }, 8000); setTimeout(() => { chart.axis.range({ max: { x: 5 }, min: { x: 0 } }); }, 9000); } function api_resize() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ], }, }); setTimeout(() => { chart.resize({ height: 100, width: 300 }); }, 1000); setTimeout(() => { chart.resize({ height: 200 }); }, 2000); setTimeout(() => { chart.resize(); }, 3000); } function api_grid_x() { const chart = c3.generate({ bindto: '#chart', data: { columns: [['sample', 30, 200, 100, 400, 150, 250]], }, }); setTimeout(() => { chart.xgrids([ { value: 1, text: 'Label 1' }, { value: 4, text: 'Label 4' }, ]); }, 1000); setTimeout(() => { chart.xgrids([{ value: 2, text: 'Label 2' }]); }, 2000); setTimeout(() => { chart.xgrids.add([{ value: 3, text: 'Label 3', class: 'hoge' }]); }, 3000); setTimeout(() => { chart.xgrids.remove({ value: 2 }); }, 4000); setTimeout(() => { chart.xgrids.remove({ class: 'hoge' }); }, 5000); setTimeout(() => { chart.xgrids([ { value: 1, text: 'Label 1' }, { value: 4, text: 'Label 4' }, ]); }, 6000); setTimeout(() => { chart.xgrids.remove(); }, 7000); } ///////////////////// // Transform Tests ///////////////////// function transform_line() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 100, 140, 200, 150, 50], ], type: 'bar', }, }); setTimeout(() => { chart.transform('line', 'data1'); }, 1000); setTimeout(() => { chart.transform('line', 'data2'); }, 2000); setTimeout(() => { chart.transform('bar'); }, 3000); setTimeout(() => { chart.transform('line'); }, 4000); } function transform_spline() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 100, 140, 200, 150, 50], ], type: 'bar', }, }); setTimeout(() => { chart.transform('spline', 'data1'); }, 1000); setTimeout(() => { chart.transform('spline', 'data2'); }, 2000); setTimeout(() => { chart.transform('bar'); }, 3000); setTimeout(() => { chart.transform('spline'); }, 4000); } function transform_bar() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 100, 140, 200, 150, 50], ], type: 'line', }, }); setTimeout(() => { chart.transform('bar', 'data1'); }, 1000); setTimeout(() => { chart.transform('bar', 'data2'); }, 2000); setTimeout(() => { chart.transform('line'); }, 3000); setTimeout(() => { chart.transform('bar'); }, 4000); } function transform_area() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 100, 140, 200, 150, 50], ], type: 'bar', }, }); setTimeout(() => { chart.transform('area', 'data1'); }, 1000); setTimeout(() => { chart.transform('area', 'data2'); }, 2000); setTimeout(() => { chart.transform('bar'); }, 3000); setTimeout(() => { chart.transform('area'); }, 4000); } function transform_areaspline() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 100, 140, 200, 150, 50], ], type: 'bar', }, }); setTimeout(() => { chart.transform('area-spline', 'data1'); }, 1000); setTimeout(() => { chart.transform('area-spline', 'data2'); }, 2000); setTimeout(() => { chart.transform('bar'); }, 3000); setTimeout(() => { chart.transform('area-spline'); }, 4000); } function transform_scatter() { const chart = c3.generate({ data: { xs: { setosa: 'setosa_x', versicolor: 'versicolor_x', }, // iris data from R columns: [ [ 'setosa_x', 3.5, 3.0, 3.2, 3.1, 3.6, 3.9, 3.4, 3.4, 2.9, 3.1, 3.7, 3.4, 3.0, 3.0, 4.0, 4.4, 3.9, 3.5, 3.8, 3.8, 3.4, 3.7, 3.6, 3.3, 3.4, 3.0, 3.4, 3.5, 3.4, 3.2, 3.1, 3.4, 4.1, 4.2, 3.1, 3.2, 3.5, 3.6, 3.0, 3.4, 3.5, 2.3, 3.2, 3.5, 3.8, 3.0, 3.8, 3.2, 3.7, 3.3 ], [ 'versicolor_x', 3.2, 3.2, 3.1, 2.3, 2.8, 2.8, 3.3, 2.4, 2.9, 2.7, 2.0, 3.0, 2.2, 2.9, 2.9, 3.1, 3.0, 2.7, 2.2, 2.5, 3.2, 2.8, 2.5, 2.8, 2.9, 3.0, 2.8, 3.0, 2.9, 2.6, 2.4, 2.4, 2.7, 2.7, 3.0, 3.4, 3.1, 2.3, 3.0, 2.5, 2.6, 3.0, 2.6, 2.3, 2.7, 3.0, 2.9, 2.9, 2.5, 2.8 ], [ 'setosa', 0.2, 0.2, 0.2, 0.2, 0.2, 0.4, 0.3, 0.2, 0.2, 0.1, 0.2, 0.2, 0.1, 0.1, 0.2, 0.4, 0.4, 0.3, 0.3, 0.3, 0.2, 0.4, 0.2, 0.5, 0.2, 0.2, 0.4, 0.2, 0.2, 0.2, 0.2, 0.4, 0.1, 0.2, 0.2, 0.2, 0.2, 0.1, 0.2, 0.2, 0.3, 0.3, 0.2, 0.6, 0.4, 0.3, 0.2, 0.2, 0.2, 0.2 ], [ 'versicolor', 1.4, 1.5, 1.5, 1.3, 1.5, 1.3, 1.6, 1.0, 1.3, 1.4, 1.0, 1.5, 1.0, 1.4, 1.3, 1.4, 1.5, 1.0, 1.5, 1.1, 1.8, 1.3, 1.5, 1.2, 1.3, 1.4, 1.4, 1.7, 1.5, 1.0, 1.1, 1.0, 1.2, 1.6, 1.5, 1.6, 1.5, 1.3, 1.3, 1.3, 1.2, 1.4, 1.2, 1.0, 1.3, 1.2, 1.3, 1.3, 1.1, 1.3 ], ], type: 'pie', }, axis: { x: { label: 'Sepal.Width', tick: { fit: false, }, }, y: { label: 'Petal.Width', }, }, }); setTimeout(() => { chart.transform('scatter'); }, 1000); setTimeout(() => { chart.transform('pie'); }, 2000); setTimeout(() => { chart.transform('scatter'); }, 3000); } function transform_pie() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 100, 140, 200, 150, 50], ], }, }); setTimeout(() => { chart.transform('pie'); }, 1000); setTimeout(() => { chart.transform('line'); }, 2000); setTimeout(() => { chart.transform('pie'); }, 3000); } function transform_donut() { const chart = c3.generate({ data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 130, 100, 140, 200, 150, 50], ], }, }); setTimeout(() => { chart.transform('donut'); }, 1000); setTimeout(() => { chart.transform('line'); }, 2000); setTimeout(() => { chart.transform('pie'); }, 3000); setTimeout(() => { chart.transform('donut'); }, 4000); } ///////////////////// // Style Tests ///////////////////// function style_region() { const chart = c3.generate({ data: { columns: [['sample', 30, 200, 100, 400, 150, 250]], }, regions: [{ start: 0, end: 1 }, { start: 2, end: 4, class: 'foo' }], }); } function style_grid() { const chart = c3.generate({ data: { columns: [['data1', 100, 200, 1000, 900, 500]], }, grid: { x: { lines: [ { value: 2 }, { value: 4, class: 'grid4', text: 'LABEL 4' }, ], }, y: { lines: [ { value: 500 }, { value: 800, class: 'grid800', text: 'LABEL 800' }, ], }, }, }); } // set colors via function c3.generate({ bindto: '#chart', data: { columns: [['data1', ...[]]], type: 'bar', colors: { data1: (d: any) => { return d.value > 90 ? 'green' : 'orange'; }, }, }, });
the_stack
import { remote, webContents } from 'electron'; import { initialAppState } from './initialAppState'; import { TYPES } from '$Actions/tabs_actions'; import { makeValidAddressBarUrl } from '$Utils/urlHelpers'; import { isRunningUnpacked } from '$Constants'; import { logger } from '$Logger'; const initialState = initialAppState.tabs; const handleTabPayload = ( state, payload ) => { if ( payload ) { if ( payload.constructor !== Object ) { throw new Error( 'Payload must be an Object.' ); } const { tabId } = payload; if ( state[tabId] !== undefined ) { const tabToMerge = { ...state[tabId] }; return { tabId, tabToMerge }; } } return state; }; const addTab = ( state, tab ) => { const tabUrl = makeValidAddressBarUrl( tab.url || '' ); const { tabId } = tab; if ( !tabId ) { logger.error( new Error( 'No tab ID passed into addTab' ) ); return state; } const faviconPath = isRunningUnpacked ? '../resources/favicon.ico' : '../../favicon.ico'; const newTab = { url: tabUrl, tabId, historyIndex: 0, ui: { addressBarIsSelected: false, pageIsLoading: false, shouldFocusWebview: false }, shouldToggleDevTools: false, webId: undefined, webContentsId: undefined, history: [tabUrl], favicon: faviconPath }; const newState = { ...state, [tabId]: newTab }; return newState; }; const moveTabFowards = ( state, payload ) => { const { tabId, tabToMerge } = handleTabPayload( state, payload ); const updatedTab = tabToMerge; const { history } = updatedTab; const nextHistoryIndex = updatedTab.historyIndex + 1 || 1; if ( !history || history.length < 2 || !history[nextHistoryIndex] ) { return state; } const newUrl = history[nextHistoryIndex]; updatedTab.historyIndex = nextHistoryIndex; updatedTab.url = newUrl; const updatedState = { ...state, [tabId]: updatedTab }; return updatedState; }; const moveTabBackwards = ( state, payload ) => { const { tabId, tabToMerge } = handleTabPayload( state, payload ); const updatedTab = tabToMerge; const { history } = updatedTab; const nextHistoryIndex = updatedTab.historyIndex - 1; if ( !history || history.length < 2 || !history[nextHistoryIndex] || nextHistoryIndex < 0 ) { return state; } const newUrl = history[nextHistoryIndex]; updatedTab.historyIndex = nextHistoryIndex; updatedTab.url = newUrl; const updatedState = { ...state, [tabId]: updatedTab }; return updatedState; }; const updateTabHistory = ( tabToMerge, payload ) => { const url = makeValidAddressBarUrl( payload.url ); let updatedTab = { ...tabToMerge, ...payload }; const ancientHistory = tabToMerge.history; let newHistory = [...ancientHistory]; const currentIndex = tabToMerge.historyIndex; if ( url && url !== tabToMerge.url ) { if ( ancientHistory && ancientHistory[currentIndex] !== url ) { updatedTab.historyIndex += 1; // if we're not at last index split array there. if ( ancientHistory.length - 1 !== currentIndex ) { newHistory = newHistory.slice( 0, currentIndex + 1 ); } // else, a simple addition to array updatedTab.history = newHistory; updatedTab.history.push( url ); } } updatedTab = { ...updatedTab, url }; return updatedTab; }; const updateTab = ( tabId, tabToMerge, payload, state ) => { const updatedTab = { ...tabToMerge, ...payload }; const updatedState = { ...state, [tabId]: updatedTab }; return updatedState; }; const updateTabUrl = ( state, payload ) => { const { tabId, tabToMerge } = handleTabPayload( state, payload ); if ( tabId === undefined && tabToMerge === undefined ) { return state; } if ( payload.url ) { let updatedTab = { ...tabToMerge, ...payload }; if ( payload.url ) { updatedTab = updateTabHistory( tabToMerge, payload ); } const updatedState = { ...state, [tabId]: updatedTab }; return updatedState; } logger.error( 'No URL passed' ); return state; }; const updateTabWebId = ( state, payload ) => { const { tabId, tabToMerge } = handleTabPayload( state, payload ); if ( tabId === undefined && tabToMerge === undefined ) { logger.error( 'Tab does not exist' ); return state; } if ( payload.webId ) { const newState = updateTab( tabId, tabToMerge, payload, state ); return newState; } logger.error( 'No WebId to Update' ); return state; }; const updateTabWebContentsId = ( state, payload ) => { const { tabId, tabToMerge } = handleTabPayload( state, payload ); if ( tabId === undefined && tabToMerge === undefined ) { logger.error( 'Tab does not exist' ); return state; } if ( payload.webContentsId ) { const newState = updateTab( tabId, tabToMerge, payload, state ); return newState; } logger.error( 'No WebContentsId provided to update' ); return state; }; const toggleDevelopmentTools = ( state, payload ) => { const { tabId, tabToMerge } = handleTabPayload( state, payload ); if ( tabId === undefined && tabToMerge === undefined ) { logger.error( 'tab Does not exist' ); return state; } if ( typeof payload.shouldToggleDevTools === 'boolean' ) { const newState = updateTab( tabId, tabToMerge, payload, state ); return newState; } logger.error( 'Invalid Tab Toggle DevTools Parameter passed' ); return state; }; const tabShouldReload = ( state, payload ) => { const { tabId, tabToMerge } = handleTabPayload( state, payload ); if ( tabId === undefined && tabToMerge === undefined ) { logger.error( 'tab Does not exist' ); return state; } if ( typeof payload.shouldReload === 'boolean' ) { const newState = updateTab( tabId, tabToMerge, payload, state ); return newState; } logger.error( 'Invalid Reload Parameter passed' ); return state; }; const updateTabTitle = ( state, payload ) => { const { tabId, tabToMerge } = handleTabPayload( state, payload ); if ( tabId === undefined && tabToMerge === undefined ) { logger.error( 'tab Does not exist' ); return state; } if ( payload.title ) { const newState = updateTab( tabId, tabToMerge, payload, state ); return newState; } logger.error( 'No Title Parameter passed' ); return state; }; const updateTabFavicon = ( state, payload ) => { const { tabId, tabToMerge } = handleTabPayload( state, payload ); if ( tabId === undefined && tabToMerge === undefined ) { logger.error( 'tab Does not exist' ); return state; } if ( payload.favicon ) { const newState = updateTab( tabId, tabToMerge, payload, state ); return newState; } logger.error( 'No Favicon Parameter passed' ); return state; }; const tabLoad = ( state, payload ) => { const { tabId, tabToMerge } = handleTabPayload( state, payload ); if ( tabId === undefined && tabToMerge === undefined ) { logger.error( 'tab Does not exist' ); return state; } if ( typeof payload.isLoading === 'boolean' ) { const newState = updateTab( tabId, tabToMerge, payload, state ); return newState; } logger.error( 'Invalid tabLoad Parameter passed' ); return state; }; // TODO: sort this out const focusWebview = ( state, tab ) => { const { tabId, shouldFocus } = tab; if ( !tabId ) { logger.error( 'No tabId provided to focusWebview' ); return state; } const tabtoMerge = { ...state[tabId] }; const newTab = { ...tabtoMerge, ui: { ...tabtoMerge.ui, shouldFocusWebview: shouldFocus } }; const newState = { ...state, [tabId]: newTab }; return newState; }; const blurAddressBar = ( state, tab ) => { const { tabId } = tab; const newTab = { ...state[tabId], ui: { ...state[tabId].ui, addressBarIsSelected: false } }; const newState = { ...state, [tabId]: newTab }; return newState; }; const selectAddressBar = ( state, tab ) => { const { tabId } = tab; const newTab = { ...state[tabId], ui: { ...state[tabId].ui, addressBarIsSelected: true } }; const newState = { ...state, [tabId]: newTab }; return newState; }; const deselectAddressBar = ( state, tab ) => { const { tabId } = tab; const newTab = { ...state[tabId], ui: { ...state[tabId].ui, addressBarIsSelected: false } }; const newState = { ...state, [tabId]: newTab }; return newState; }; const resetStore = ( payload ) => { const { tabId } = payload; const newTabStartLocation = makeValidAddressBarUrl( 'safe://hello' ); const faviconPath = isRunningUnpacked ? '../resources/favicon.ico' : '../favicon.ico'; const newTab = { url: newTabStartLocation, tabId, historyIndex: 0, ui: { addressBarIsSelected: false, pageIsLoading: false, shouldFocusWebview: false }, shouldToggleDevTools: false, webId: undefined, history: [newTabStartLocation], favicon: faviconPath }; const newState = { ...initialState, [tabId]: newTab }; return newState; }; /** * Tabs reducer. Should handle all tab states, including window/tab id and the individual tab history * @param { array } state array of tabs * @param { object } action action Object * @return { array } updatd state object */ export function tabs( state: Record<string, unknown> = initialState, action ) { const { payload } = action; if ( action.error ) { logger.error( 'ERROR IN ACTION', action.error ); return state; } switch ( action.type ) { case TYPES.ADD_TAB: { return addTab( state, payload ); } case TYPES.UPDATE_TAB_URL: { return updateTabUrl( state, payload ); } case TYPES.UPDATE_TAB_WEB_ID: { return updateTabWebId( state, payload ); } case TYPES.UPDATE_TAB_WEB_CONTENTS_ID: { return updateTabWebContentsId( state, payload ); } case TYPES.TOGGLE_DEV_TOOLS: { return toggleDevelopmentTools( state, payload ); } case TYPES.TAB_SHOULD_RELOAD: { return tabShouldReload( state, payload ); } case TYPES.UPDATE_TAB_TITLE: { return updateTabTitle( state, payload ); } case TYPES.UPDATE_TAB_FAVICON: { return updateTabFavicon( state, payload ); } case TYPES.TAB_LOAD: { return tabLoad( state, payload ); } case TYPES.TAB_FORWARDS: { return moveTabFowards( state, payload ); } case TYPES.TAB_BACKWARDS: { return moveTabBackwards( state, payload ); } case TYPES.FOCUS_WEBVIEW: { return focusWebview( state, payload ); } case TYPES.BLUR_ADDRESS_BAR: { return blurAddressBar( state, payload ); } case TYPES.SELECT_ADDRESS_BAR: { return selectAddressBar( state, payload ); } case TYPES.DESELECT_ADDRESS_BAR: { return deselectAddressBar( state, payload ); } case TYPES.TABS_RESET_STORE: { return resetStore( payload ); } default: return state; } }
the_stack
import { Injectable } from '@angular/core'; import { SafeResourceUrl } from '@angular/platform-browser'; import { downgradeInjectable } from '@angular/upgrade/static'; import { AppConstants } from 'app.constants'; import { Exploration } from 'domain/exploration/ExplorationObjectFactory'; import { ExtractImageFilenamesFromModelService } from 'pages/exploration-player-page/services/extract-image-filenames-from-model.service'; import { AssetsBackendApiService } from 'services/assets-backend-api.service'; import { ComputeGraphService } from 'services/compute-graph.service'; import { ContextService } from 'services/context.service'; import { SvgSanitizerService } from 'services/svg-sanitizer.service'; interface ImageCallback { resolveMethod: (_: string) => void; rejectMethod: () => void; } export interface ImageDimensions { width: number; height: number; verticalPadding?: number; } @Injectable({ providedIn: 'root' }) export class ImagePreloaderService { constructor( private assetsBackendApiService: AssetsBackendApiService, private computeGraphService: ComputeGraphService, private contextService: ContextService, private ExtractImageFilenamesFromModelService: ExtractImageFilenamesFromModelService, private svgSanitizerService: SvgSanitizerService) {} private filenamesOfImageCurrentlyDownloading: string[] = []; private filenamesOfImageToBeDownloaded: string[] = []; private filenamesOfImageFailedToDownload: string[] = []; private exploration: Exploration = null; private imagePreloaderServiceHasStarted: boolean = false; // Variable imageLoadedCallback is an object of objects (identified by the // filenames which are being downloaded at the time they are required by the // directive).The object contains the resolve method of the promise // attached with getInImageUrl method. private imageLoadedCallback: {[filename: string]: ImageCallback} = {}; init(exploration: Exploration): void { this.exploration = exploration; this.imagePreloaderServiceHasStarted = true; } /** * Checks if the given filename is in this.filenamesOfImageFailedToDownload or * not. * @param {string} filename - The filename of the image which is to be * removed from the * this.filenamesOfImageFailedToDownload array. */ isInFailedDownload(filename: string): boolean { return this.filenamesOfImageFailedToDownload.includes(filename); } /** * Initiates the image preloader beginning from the sourceStateName. * @param {string} sourceStateName - The name of the state from which * preloader should start. */ kickOffImagePreloader(sourceStateName: string): void { this.filenamesOfImageToBeDownloaded = ( this.getImageFilenamesInBfsOrder(sourceStateName)); const imageFilesInGivenState = ( this.ExtractImageFilenamesFromModelService.getImageFilenamesInState( this.exploration.states.getState(sourceStateName))); this.filenamesOfImageFailedToDownload = ( this.filenamesOfImageFailedToDownload.filter( filename => !imageFilesInGivenState.includes(filename))); while (this.filenamesOfImageCurrentlyDownloading.length < AppConstants.MAX_NUM_IMAGE_FILES_TO_DOWNLOAD_SIMULTANEOUSLY && this.filenamesOfImageToBeDownloaded.length > 0) { const imageFilename = this.filenamesOfImageToBeDownloaded.shift(); this.filenamesOfImageCurrentlyDownloading.push(imageFilename); this.loadImage(imageFilename); } } /** * Cancels the preloading of the images that are being downloaded. */ cancelPreloading(): void { this.assetsBackendApiService.abortAllCurrentImageDownloads(); this.filenamesOfImageCurrentlyDownloading.length = 0; } /** * When the state changes, it decides whether to restart the preloader * starting from the 'stateName' state or not. * @param {string} stateName - The name of the state the user shifts to. */ onStateChange(stateName: string): void { if (stateName !== this.exploration.getInitialState().name) { this.imageLoadedCallback = {}; const state = this.exploration.states.getState(stateName); let numImageFilesCurrentlyDownloading = 0; let numImagesNeitherInCacheNorDownloading = 0; this.ExtractImageFilenamesFromModelService.getImageFilenamesInState( state ).forEach(filename => { var isFileCurrentlyDownloading = ( this.filenamesOfImageCurrentlyDownloading.includes(filename)); if (!this.assetsBackendApiService.isCached(filename) && !isFileCurrentlyDownloading) { numImagesNeitherInCacheNorDownloading += 1; } if (isFileCurrentlyDownloading) { numImageFilesCurrentlyDownloading += 1; } }); if (numImagesNeitherInCacheNorDownloading > 0 && numImageFilesCurrentlyDownloading <= 1) { this.cancelPreloading(); this.kickOffImagePreloader(stateName); } } } /** * Gets the dimensions of the images from the filename provided. * @param {string} filename - The string from which the dimensions of the * images should be extracted. */ getDimensionsOfImage(filename: string): ImageDimensions { const dimensionsRegex = RegExp( '[^/]+_height_([0-9]+)_width_([0-9]+)\.(png|jpeg|jpg|gif|svg)$', 'g'); var imageDimensions = dimensionsRegex.exec(filename); if (imageDimensions) { var dimensions = { height: Number(imageDimensions[1]), width: Number(imageDimensions[2]) }; return dimensions; } else { throw new Error( `Input path ${filename} is invalid, it does not contain dimensions.`); } } /** * Gets the dimensions of the math SVGs from the SVG filename provided. * @param {string} filename - The string from which the dimensions of the * math SVGs should be extracted. */ getDimensionsOfMathSvg(filename: string): ImageDimensions { var dimensionsRegex = RegExp( '[^/]+_height_([0-9d]+)_width_([0-9d]+)_vertical_([0-9d]+)\.svg', 'g'); var imageDimensions = dimensionsRegex.exec(filename); if (imageDimensions) { var dimensions = { height: Number(imageDimensions[1].replace('d', '.')), width: Number(imageDimensions[2].replace('d', '.')), verticalPadding: Number(imageDimensions[3].replace('d', '.')) }; return dimensions; } else { throw new Error( `Input path ${filename} is invalid, it does not contain dimensions.`); } } isLoadingImageFile(filename: string): boolean { return this.filenamesOfImageCurrentlyDownloading.includes(filename); } restartImagePreloader(sourceStateName: string): void { this.cancelPreloading(); this.kickOffImagePreloader(sourceStateName); } getFilenamesOfImageCurrentlyDownloading(): string[] { return this.filenamesOfImageCurrentlyDownloading; } private convertImageFileToSafeBase64Url( imageFile: Blob, callback: (src: string | ArrayBuffer | SafeResourceUrl) => void): void { const reader = new FileReader(); reader.onloadend = () => { if (imageFile.type === 'image/svg+xml') { callback( this.svgSanitizerService.getTrustedSvgResourceUrl( reader.result as string)); } else { callback(reader.result); } }; reader.readAsDataURL(imageFile); } /** * Gets the Url for the image file. * @param {string} filename - Filename of the image whose Url is to be * created. * @param {function} onLoadCallback - Function that is called when the * Url of the loaded image is obtained. */ async getImageUrlAsync(filename: string): Promise<string|ArrayBuffer> { return new Promise((resolve, reject) => { if (this.assetsBackendApiService.isCached(filename) || this.isInFailedDownload(filename)) { this.assetsBackendApiService.loadImage( this.contextService.getEntityType(), this.contextService.getEntityId(), filename ).then( loadedImageFile => { if (this.isInFailedDownload(loadedImageFile.filename)) { this.removeFromFailedDownload(loadedImageFile.filename); } this.convertImageFileToSafeBase64Url(loadedImageFile.data, resolve); }, reject); } else { this.imageLoadedCallback[filename] = { resolveMethod: resolve, rejectMethod: reject }; } }); } inExplorationPlayer(): boolean { return this.imagePreloaderServiceHasStarted; } /** * Removes the given filename from the this.filenamesOfImageFailedToDownload. * @param {string} filename - The filename of the file which is to be * removed from the * this.filenamesOfImageFailedToDownload array. */ private removeFromFailedDownload(filename: string): void { var index = this.filenamesOfImageFailedToDownload.indexOf(filename); this.filenamesOfImageFailedToDownload.splice(index, 1); } /** * Gets image files names in Bfs order from the state. * @param {string} sourceStateName - The name of the starting state * from which the filenames should * be obtained. */ private getImageFilenamesInBfsOrder(sourceStateName: string): string[] { var stateNamesInBfsOrder = ( this.computeGraphService.computeBfsTraversalOfStates( this.exploration.getInitialState().name, this.exploration.getStates(), sourceStateName)); var imageFilenames = []; stateNamesInBfsOrder.forEach(stateName => { var state = this.exploration.states.getState(stateName); this.ExtractImageFilenamesFromModelService.getImageFilenamesInState(state) .forEach(filename => imageFilenames.push(filename)); }); return imageFilenames; } /** * Removes the filename from the filenamesOfImageCurrentlyDownloading and * initiates the loading of the next image file. * @param {string} filename - The filename which is to be removed from the * filenamesOfImageCurrentlyDownloading array. */ private removeCurrentAndLoadNextImage(filename: string): void { this.filenamesOfImageCurrentlyDownloading.splice( this.filenamesOfImageCurrentlyDownloading.findIndex( imageFilename => filename === imageFilename), 1); if (this.filenamesOfImageToBeDownloaded.length > 0) { var nextImageFilename = this.filenamesOfImageToBeDownloaded.shift(); this.filenamesOfImageCurrentlyDownloading.push(nextImageFilename); this.loadImage(nextImageFilename); } } /** * Handles the loading of the image file. * @param {string} imageFilename - The filename of the image to be loaded. */ private loadImage(imageFilename: string): void { this.assetsBackendApiService.loadImage( AppConstants.ENTITY_TYPE.EXPLORATION, this.contextService.getExplorationId(), imageFilename ).then( loadedImage => { this.removeCurrentAndLoadNextImage(loadedImage.filename); if (this.imageLoadedCallback[loadedImage.filename]) { var onLoadImageResolve = ( this.imageLoadedCallback[loadedImage.filename].resolveMethod); this.convertImageFileToSafeBase64Url( loadedImage.data, onLoadImageResolve); this.imageLoadedCallback[loadedImage.filename] = null; } }, filename => { if (this.imageLoadedCallback[filename]) { this.imageLoadedCallback[filename].rejectMethod(); this.imageLoadedCallback[filename] = null; } this.filenamesOfImageFailedToDownload.push(filename); this.removeCurrentAndLoadNextImage(filename); }); } } angular.module('oppia').factory( 'ImagePreloaderService', downgradeInjectable(ImagePreloaderService));
the_stack
import * as iam from '@aws-cdk/aws-iam'; import * as kms from '@aws-cdk/aws-kms'; import * as s3 from '@aws-cdk/aws-s3'; import { ArnFormat, Fn, IResource, Names, Resource, Stack } from '@aws-cdk/core'; import * as cr from '@aws-cdk/custom-resources'; import { AwsCustomResource } from '@aws-cdk/custom-resources'; import { Construct } from 'constructs'; import { DataFormat } from './data-format'; import { IDatabase } from './database'; import { CfnTable } from './glue.generated'; import { Column } from './schema'; /** * Properties of a Partition Index. */ export interface PartitionIndex { /** * The name of the partition index. * * @default - a name will be generated for you. */ readonly indexName?: string; /** * The partition key names that comprise the partition * index. The names must correspond to a name in the * table's partition keys. */ readonly keyNames: string[]; } export interface ITable extends IResource { /** * @attribute */ readonly tableArn: string; /** * @attribute */ readonly tableName: string; } /** * Encryption options for a Table. * * @see https://docs.aws.amazon.com/athena/latest/ug/encryption.html */ export enum TableEncryption { UNENCRYPTED = 'Unencrypted', /** * Server side encryption (SSE) with an Amazon S3-managed key. * * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingServerSideEncryption.html */ S3_MANAGED = 'SSE-S3', /** * Server-side encryption (SSE) with an AWS KMS key managed by the account owner. * * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingKMSEncryption.html */ KMS = 'SSE-KMS', /** * Server-side encryption (SSE) with an AWS KMS key managed by the KMS service. */ KMS_MANAGED = 'SSE-KMS-MANAGED', /** * Client-side encryption (CSE) with an AWS KMS key managed by the account owner. * * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html */ CLIENT_SIDE_KMS = 'CSE-KMS' } export interface TableAttributes { readonly tableArn: string; readonly tableName: string; } export interface TableProps { /** * Name of the table. */ readonly tableName: string; /** * Description of the table. * * @default generated */ readonly description?: string; /** * Database in which to store the table. */ readonly database: IDatabase; /** * S3 bucket in which to store data. * * @default one is created for you */ readonly bucket?: s3.IBucket; /** * S3 prefix under which table objects are stored. * * @default - No prefix. The data will be stored under the root of the bucket. */ readonly s3Prefix?: string; /** * Columns of the table. */ readonly columns: Column[]; /** * Partition columns of the table. * * @default table is not partitioned */ readonly partitionKeys?: Column[]; /** * Partition indexes on the table. A maximum of 3 indexes * are allowed on a table. Keys in the index must be part * of the table's partition keys. * * @default table has no partition indexes */ readonly partitionIndexes?: PartitionIndex[]; /** * Storage type of the table's data. */ readonly dataFormat: DataFormat; /** * Indicates whether the table's data is compressed or not. * * @default false */ readonly compressed?: boolean; /** * The kind of encryption to secure the data with. * * You can only provide this option if you are not explicitly passing in a bucket. * * If you choose `SSE-KMS`, you *can* provide an un-managed KMS key with `encryptionKey`. * If you choose `CSE-KMS`, you *must* provide an un-managed KMS key with `encryptionKey`. * * @default Unencrypted */ readonly encryption?: TableEncryption; /** * External KMS key to use for bucket encryption. * * The `encryption` property must be `SSE-KMS` or `CSE-KMS`. * * @default key is managed by KMS. */ readonly encryptionKey?: kms.IKey; /** * Indicates whether the table data is stored in subdirectories. * * @default false */ readonly storedAsSubDirectories?: boolean; } /** * A Glue table. */ export class Table extends Resource implements ITable { public static fromTableArn(scope: Construct, id: string, tableArn: string): ITable { const tableName = Fn.select(1, Fn.split('/', Stack.of(scope).splitArn(tableArn, ArnFormat.SLASH_RESOURCE_NAME).resourceName!)); return Table.fromTableAttributes(scope, id, { tableArn, tableName, }); } /** * Creates a Table construct that represents an external table. * * @param scope The scope creating construct (usually `this`). * @param id The construct's id. * @param attrs Import attributes */ public static fromTableAttributes(scope: Construct, id: string, attrs: TableAttributes): ITable { class Import extends Resource implements ITable { public readonly tableArn = attrs.tableArn; public readonly tableName = attrs.tableName; } return new Import(scope, id); } /** * Database this table belongs to. */ public readonly database: IDatabase; /** * Indicates whether the table's data is compressed or not. */ public readonly compressed: boolean; /** * The type of encryption enabled for the table. */ public readonly encryption: TableEncryption; /** * The KMS key used to secure the data if `encryption` is set to `CSE-KMS` or `SSE-KMS`. Otherwise, `undefined`. */ public readonly encryptionKey?: kms.IKey; /** * S3 bucket in which the table's data resides. */ public readonly bucket: s3.IBucket; /** * S3 Key Prefix under which this table's files are stored in S3. */ public readonly s3Prefix: string; /** * Name of this table. */ public readonly tableName: string; /** * ARN of this table. */ public readonly tableArn: string; /** * Format of this table's data files. */ public readonly dataFormat: DataFormat; /** * This table's columns. */ public readonly columns: Column[]; /** * This table's partition keys if the table is partitioned. */ public readonly partitionKeys?: Column[]; /** * This table's partition indexes. */ public readonly partitionIndexes?: PartitionIndex[]; /** * Partition indexes must be created one at a time. To avoid * race conditions, we store the resource and add dependencies * each time a new partition index is created. */ private partitionIndexCustomResources: AwsCustomResource[] = []; constructor(scope: Construct, id: string, props: TableProps) { super(scope, id, { physicalName: props.tableName, }); this.database = props.database; this.dataFormat = props.dataFormat; this.s3Prefix = props.s3Prefix ?? ''; validateSchema(props.columns, props.partitionKeys); this.columns = props.columns; this.partitionKeys = props.partitionKeys; this.compressed = props.compressed ?? false; const { bucket, encryption, encryptionKey } = createBucket(this, props); this.bucket = bucket; this.encryption = encryption; this.encryptionKey = encryptionKey; const tableResource = new CfnTable(this, 'Table', { catalogId: props.database.catalogId, databaseName: props.database.databaseName, tableInput: { name: this.physicalName, description: props.description || `${props.tableName} generated by CDK`, partitionKeys: renderColumns(props.partitionKeys), parameters: { classification: props.dataFormat.classificationString?.value, has_encrypted_data: this.encryption !== TableEncryption.UNENCRYPTED, }, storageDescriptor: { location: `s3://${this.bucket.bucketName}/${this.s3Prefix}`, compressed: this.compressed, storedAsSubDirectories: props.storedAsSubDirectories ?? false, columns: renderColumns(props.columns), inputFormat: props.dataFormat.inputFormat.className, outputFormat: props.dataFormat.outputFormat.className, serdeInfo: { serializationLibrary: props.dataFormat.serializationLibrary.className, }, }, tableType: 'EXTERNAL_TABLE', }, }); this.tableName = this.getResourceNameAttribute(tableResource.ref); this.tableArn = this.stack.formatArn({ service: 'glue', resource: 'table', resourceName: `${this.database.databaseName}/${this.tableName}`, }); this.node.defaultChild = tableResource; // Partition index creation relies on created table. if (props.partitionIndexes) { this.partitionIndexes = props.partitionIndexes; this.partitionIndexes.forEach((index) => this.addPartitionIndex(index)); } } /** * Add a partition index to the table. You can have a maximum of 3 partition * indexes to a table. Partition index keys must be a subset of the table's * partition keys. * * @see https://docs.aws.amazon.com/glue/latest/dg/partition-indexes.html */ public addPartitionIndex(index: PartitionIndex) { const numPartitions = this.partitionIndexCustomResources.length; if (numPartitions >= 3) { throw new Error('Maximum number of partition indexes allowed is 3'); } this.validatePartitionIndex(index); const indexName = index.indexName ?? this.generateIndexName(index.keyNames); const partitionIndexCustomResource = new cr.AwsCustomResource(this, `partition-index-${indexName}`, { onCreate: { service: 'Glue', action: 'createPartitionIndex', parameters: { DatabaseName: this.database.databaseName, TableName: this.tableName, PartitionIndex: { IndexName: indexName, Keys: index.keyNames, }, }, physicalResourceId: cr.PhysicalResourceId.of( indexName, ), }, policy: cr.AwsCustomResourcePolicy.fromSdkCalls({ resources: cr.AwsCustomResourcePolicy.ANY_RESOURCE, }), }); this.grantToUnderlyingResources(partitionIndexCustomResource, ['glue:UpdateTable']); // Depend on previous partition index if possible, to avoid race condition if (numPartitions > 0) { this.partitionIndexCustomResources[numPartitions-1].node.addDependency(partitionIndexCustomResource); } this.partitionIndexCustomResources.push(partitionIndexCustomResource); } private generateIndexName(keys: string[]): string { const prefix = keys.join('-') + '-'; const uniqueId = Names.uniqueId(this); const maxIndexLength = 80; // arbitrarily specified const startIndex = Math.max(0, uniqueId.length - (maxIndexLength - prefix.length)); return prefix + uniqueId.substring(startIndex); } private validatePartitionIndex(index: PartitionIndex) { if (index.indexName !== undefined && (index.indexName.length < 1 || index.indexName.length > 255)) { throw new Error(`Index name must be between 1 and 255 characters, but got ${index.indexName.length}`); } if (!this.partitionKeys || this.partitionKeys.length === 0) { throw new Error('The table must have partition keys to create a partition index'); } const keyNames = this.partitionKeys.map(pk => pk.name); if (!index.keyNames.every(k => keyNames.includes(k))) { throw new Error(`All index keys must also be partition keys. Got ${index.keyNames} but partition key names are ${keyNames}`); } } /** * Grant read permissions to the table and the underlying data stored in S3 to an IAM principal. * * @param grantee the principal */ public grantRead(grantee: iam.IGrantable): iam.Grant { const ret = this.grant(grantee, readPermissions); if (this.encryptionKey && this.encryption === TableEncryption.CLIENT_SIDE_KMS) { this.encryptionKey.grantDecrypt(grantee); } this.bucket.grantRead(grantee, this.getS3PrefixForGrant()); return ret; } /** * Grant write permissions to the table and the underlying data stored in S3 to an IAM principal. * * @param grantee the principal */ public grantWrite(grantee: iam.IGrantable): iam.Grant { const ret = this.grant(grantee, writePermissions); if (this.encryptionKey && this.encryption === TableEncryption.CLIENT_SIDE_KMS) { this.encryptionKey.grantEncrypt(grantee); } this.bucket.grantWrite(grantee, this.getS3PrefixForGrant()); return ret; } /** * Grant read and write permissions to the table and the underlying data stored in S3 to an IAM principal. * * @param grantee the principal */ public grantReadWrite(grantee: iam.IGrantable): iam.Grant { const ret = this.grant(grantee, [...readPermissions, ...writePermissions]); if (this.encryptionKey && this.encryption === TableEncryption.CLIENT_SIDE_KMS) { this.encryptionKey.grantEncryptDecrypt(grantee); } this.bucket.grantReadWrite(grantee, this.getS3PrefixForGrant()); return ret; } /** * Grant the given identity custom permissions. */ public grant(grantee: iam.IGrantable, actions: string[]) { return iam.Grant.addToPrincipal({ grantee, resourceArns: [this.tableArn], actions, }); } /** * Grant the given identity custom permissions to ALL underlying resources of the table. * Permissions will be granted to the catalog, the database, and the table. */ public grantToUnderlyingResources(grantee: iam.IGrantable, actions: string[]) { return iam.Grant.addToPrincipal({ grantee, resourceArns: [ this.tableArn, this.database.catalogArn, this.database.databaseArn, ], actions, }); } private getS3PrefixForGrant() { return this.s3Prefix + '*'; } } function validateSchema(columns: Column[], partitionKeys?: Column[]): void { if (columns.length === 0) { throw new Error('you must specify at least one column for the table'); } // Check there is at least one column and no duplicated column names or partition keys. const names = new Set<string>(); (columns.concat(partitionKeys || [])).forEach(column => { if (names.has(column.name)) { throw new Error(`column names and partition keys must be unique, but \'${column.name}\' is duplicated`); } names.add(column.name); }); } // map TableEncryption to bucket's SSE configuration (s3.BucketEncryption) const encryptionMappings = { [TableEncryption.S3_MANAGED]: s3.BucketEncryption.S3_MANAGED, [TableEncryption.KMS_MANAGED]: s3.BucketEncryption.KMS_MANAGED, [TableEncryption.KMS]: s3.BucketEncryption.KMS, [TableEncryption.CLIENT_SIDE_KMS]: s3.BucketEncryption.UNENCRYPTED, [TableEncryption.UNENCRYPTED]: s3.BucketEncryption.UNENCRYPTED, }; // create the bucket to store a table's data depending on the `encryption` and `encryptionKey` properties. function createBucket(table: Table, props: TableProps) { const encryption = props.encryption || TableEncryption.UNENCRYPTED; let bucket = props.bucket; if (bucket && (encryption !== TableEncryption.UNENCRYPTED && encryption !== TableEncryption.CLIENT_SIDE_KMS)) { throw new Error('you can not specify encryption settings if you also provide a bucket'); } let encryptionKey: kms.IKey | undefined; if (encryption === TableEncryption.CLIENT_SIDE_KMS && props.encryptionKey === undefined) { // CSE-KMS should behave the same as SSE-KMS - use the provided key or create one automatically // Since Bucket only knows about SSE, we repeat the logic for CSE-KMS at the Table level. encryptionKey = new kms.Key(table, 'Key'); } else { encryptionKey = props.encryptionKey; } // create the bucket if none was provided if (!bucket) { if (encryption === TableEncryption.CLIENT_SIDE_KMS) { bucket = new s3.Bucket(table, 'Bucket'); } else { bucket = new s3.Bucket(table, 'Bucket', { encryption: encryptionMappings[encryption], encryptionKey, }); encryptionKey = bucket.encryptionKey; } } return { bucket, encryption, encryptionKey, }; } const readPermissions = [ 'glue:BatchGetPartition', 'glue:GetPartition', 'glue:GetPartitions', 'glue:GetTable', 'glue:GetTables', 'glue:GetTableVersion', 'glue:GetTableVersions', ]; const writePermissions = [ 'glue:BatchCreatePartition', 'glue:BatchDeletePartition', 'glue:CreatePartition', 'glue:DeletePartition', 'glue:UpdatePartition', ]; function renderColumns(columns?: Array<Column | Column>) { if (columns === undefined) { return undefined; } return columns.map(column => { return { name: column.name, type: column.type.inputString, comment: column.comment, }; }); }
the_stack
import * as express from "express"; import * as _ from "lodash"; import { Document, model, Schema, Types } from "mongoose"; import * as mongoose from "mongoose"; import * as PodcastRSS from "podcast"; import * as remote from "remote-file-size"; import * as slug from "slug"; import * as urlencode from "urlencode"; import * as convert from "xml-js"; import config from "../config"; import * as auth from "../lib/auth"; import * as itunes from "../lib/itunes"; import { logger } from "../lib/logger"; import { sendColabInvite, sendContactMessage, sendPodcastSubmission} from "../lib/mail"; import Analytic from "../models/analytic"; import Company, { ICompany } from "../models/company"; import Email from "../models/email"; import Episode, { IEpisode } from "../models/episode"; import Podcast, { IPodcast } from "../models/podcast"; import User, { IUser } from "../models/user"; import { buildSocialNet } from "../lib/heroku-tmp/heroku-client"; async function getSelectedPodcast(userId) { const podcasts = await Podcast.find({ owner: userId }); const user = await User.findOne({ _id: userId }); const selectedPodcast = user.selectedPodcast; if (podcasts.length > 0) { if (selectedPodcast) { // The user has a selected podcast const podcast = _.find(podcasts, { _id: selectedPodcast }); if (podcast) { return podcast._id; } else if (podcasts.length > 0) { return podcasts[0]._id; } else { return undefined; } } else { // The user doesn't have a selected podcast falling back to first in array return podcasts[0]._id; } } else { return undefined; } } export default (router: express.Router) => { router.get("/p/:podcast_slug/rss.xml", async (req, res) => { try { const podcast = await Podcast.findOne({ slug: req.params.podcast_slug }).exec(); if (!podcast) { return res.sendStatus(404); } const podcastOwner = await User.findById(podcast.owner).exec(); const episodes = await Episode .find({ podcast: podcast._id, published: true }) .sort({ createdAt: -1 }) .exec(); const fullOwnerName = `${podcast.author}`; const categories = podcast.categories.split(",").map((value) => { return value.replace(/&(?!#?[a-z0-9]+;)/, "&amp;"); }); const podcastUrl = `${config.hostname}/p/${podcast.slug}/`; const firstCategory = categories[0].split("-"); const mainCategory = firstCategory[0].replace(" ", ""); // tslint:disable-next-line:max-line-length const subCategory = firstCategory.length > 1 ? [firstCategory[1].replace(/&(?!#?[a-z0-9]+;)/, "&amp;").replace(" ", "")] : []; const feed = new PodcastRSS({ title: podcast.title, description: podcast.subtitle, feed_url: `${podcastUrl}/rss.xml`, site_url: podcastUrl, image_url: podcast.imageUrl, author: fullOwnerName, language: "en", ttl: "10", itunesAuthor: fullOwnerName, itunesSubtitle: podcast.subtitle, itunesSummary: podcast.subtitle, itunesOwner: { name: fullOwnerName, email: podcastOwner.email }, itunesExplicit: false, itunesImage: podcast.imageUrl, }); episodes.forEach(e => { feed.item({ title: e.title, description: e.fullContent, url: `${podcastUrl}episode/${e._id}/download/${urlencode(e.audioUrl)}`, enclosure: { url: `${podcastUrl}episode/${e._id}/download/${urlencode(e.audioUrl)}` }, guid: e.guid || `${podcastUrl}episode/${e._id}/download/${urlencode(e.audioUrl)}`, categories, author: fullOwnerName, date: e.createdAt.toISOString(), // lat: 33.417974, //optional latitude field for GeoRSS // long: -111.933231, //optional longitude field for GeoRSS itunesAuthor: fullOwnerName, itunesExplicit: false, itunesSubtitle: e.summary, itunesSummary: e.summary, itunesDuration: Math.trunc(e.audioDuration), itunesKeywords: ["javascript", "podcast"], }); }); // We manually add iTunes category because in the podcasts module it doesn't work const jsonFeed = JSON.parse(convert.xml2json(feed.xml(), { compact: false, spaces: 4 })); jsonFeed.elements[0].elements[0].elements.push({ type: "element", name: "itunes:category", attributes: { text: mainCategory, }, elements: subCategory.length > 0 ? [{ type: "element", name: "itunes:category", attributes: { text: subCategory, }, }] : [], }); const rssFeed = convert.json2xml(JSON.stringify(jsonFeed), {}); res.set("Content-Type", "text/xml"); res.send(rssFeed); } catch (e) { logger.error(e); return res.sendStatus(404); } }); router.get("/p/:podcast_slug", async (req, res) => { try { const currentPodcast = await Podcast.findOne({ slug: req.params.podcast_slug }).exec(); if (!currentPodcast) { return res.sendStatus(404); } const podcastOwner = await User.findById(currentPodcast.owner).exec(); const podcastUrl = `${config.hostname}/p/${currentPodcast.slug}/`; const now = new Date(); const episodesDB = await Episode .find({ podcast: currentPodcast._id, $and: [{ published: true }, { publishedAt: { $lt: now } }], }) .sort({ updatedAt: -1 }) .exec(); const episodes = episodesDB.map(({ _id, title, podcast, published, summary, fullContent, createdAt, updatedAt, }: IEpisode) => ({ _id, title, podcast, published, summary, fullContent, createdAt, updatedAt, // Note: we overwrite the audio url, to get some analytics audioUrl: `${podcastUrl}episode/${_id}/download`, })); const fullOwnerName = `${podcastOwner.firstName} ${podcastOwner.lastName}`; const categories = currentPodcast.categories.split(","); res.render("podcast", { locals: { title: currentPodcast.title, author: fullOwnerName, description: currentPodcast.subtitle, keywords: currentPodcast.categories, theme: currentPodcast.theme, about: currentPodcast.about, podcast: JSON.stringify(currentPodcast), episodes: JSON.stringify(episodes), }, }); } catch (e) { logger.error(e); return res.sendStatus(404); } }); router.get("/p/:podcast_slug/preview", async (req, res) => { try { const currentPodcast = await Podcast.findOne({ slug: req.params.podcast_slug }).exec(); if (!currentPodcast) { return res.sendStatus(404); } const podcastOwner = await User.findById(currentPodcast.owner).exec(); const podcastUrl = `${config.hostname}/p/${currentPodcast.slug}/`; const episodesDB = await Episode .find({ podcast: currentPodcast._id, preview: true, }) .sort({ updatedAt: -1 }) .exec(); const episodes = episodesDB.map(({ _id, title, podcast, published, summary, fullContent, createdAt, updatedAt, }: IEpisode) => ({ _id, title, podcast, published, summary, fullContent, createdAt, updatedAt, // Note: we overwrite the audio url, to get some analytics audioUrl: `${podcastUrl}episode/${_id}/download`, })); const fullOwnerName = `${podcastOwner.firstName} ${podcastOwner.lastName}`; const categories = currentPodcast.categories.split(","); // We inject the preview data into currentPodcast currentPodcast.about = currentPodcast.aboutPreview; if (currentPodcast.preview) { currentPodcast.contactMessage = (currentPodcast.preview as IPodcast).contactMessage || ""; currentPodcast.contactFacebook = (currentPodcast.preview as IPodcast).contactFacebook || ""; currentPodcast.contactTwitter = (currentPodcast.preview as IPodcast).contactTwitter || ""; currentPodcast.contactEmail = (currentPodcast.preview as IPodcast).contactEmail || ""; currentPodcast.theme = (currentPodcast.preview as IPodcast).theme || "light"; } res.render("podcast", { locals: { title: currentPodcast.title, author: fullOwnerName, description: currentPodcast.subtitle, keywords: currentPodcast.categories, theme: currentPodcast.theme, podcast: JSON.stringify(currentPodcast), episodes: JSON.stringify(episodes), }, }); } catch (e) { logger.error(e); return res.sendStatus(404); } }); router.get("/p/:podcast_slug/design-preview", async (req, res) => { try { const currentPodcast = await Podcast.findOne({ slug: req.params.podcast_slug }).exec(); if (!currentPodcast) { return res.sendStatus(404); } const podcastOwner = await User.findById(currentPodcast.owner).exec(); const podcastUrl = `${config.hostname}/p/${currentPodcast.slug}/`; const episodesDB = await Episode .find({ podcast: currentPodcast._id, preview: false, }) .sort({ updatedAt: -1 }) .exec(); const episodes = episodesDB.map(({ _id, title, podcast, published, summary, fullContent, createdAt, updatedAt, }: IEpisode) => ({ _id, title, podcast, published, summary, fullContent, createdAt, updatedAt, // Note: we overwrite the audio url, to get some analytics audioUrl: `${podcastUrl}episode/${_id}/download`, })); const fullOwnerName = `${podcastOwner.firstName} ${podcastOwner.lastName}`; const categories = currentPodcast.categories.split(","); // We inject the preview data into currentPodcast if (currentPodcast.preview) { currentPodcast.theme = (currentPodcast.preview as IPodcast).theme || "light"; currentPodcast.layout = (currentPodcast.preview as IPodcast).layout || "classic"; } res.render("podcast", { locals: { title: currentPodcast.title, author: fullOwnerName, description: currentPodcast.subtitle, keywords: currentPodcast.categories, theme: currentPodcast.theme, podcast: JSON.stringify(currentPodcast), episodes: JSON.stringify(episodes), }, }); } catch (e) { logger.error(e); return res.sendStatus(404); } }); router.get("/p/:podcast_slug/episode/:episode_id/download", async (req, res) => { try { const podcast = await Podcast.findOne({ slug: req.params.podcast_slug }).exec(); if (!podcast) { return res.sendStatus(404); } const findQuery = { _id: Types.ObjectId(req.params.episode_id), podcast: podcast._id }; const episode = await Episode.findOne(findQuery).exec(); if (!episode || !episode.audioUrl) { return res.sendStatus(404); } const ip = req.headers["x-forwarded-for"] || req.connection.remoteAddress; await Analytic.downloadEpisode(podcast._id, episode._id, req.headers, ip); res.redirect(episode.audioUrl); } catch (e) { logger.error(e); return res.sendStatus(404); } }); router.get("/p/:podcast_slug/episode/:episode_id/download/:publicFileUrl", async (req, res) => { try { const podcast = await Podcast.findOne({ slug: req.params.podcast_slug }).exec(); if (!podcast) { return res.sendStatus(404); } const findQuery = { _id: Types.ObjectId(req.params.episode_id), podcast: podcast._id }; const episode = await Episode.findOne(findQuery).exec(); if (!episode || !episode.audioUrl) { return res.sendStatus(404); } const ip = req.headers["x-forwarded-for"] || req.connection.remoteAddress; await Analytic.downloadEpisode(podcast._id, episode._id, req.headers, ip); res.redirect(req.params.publicFileUrl); } catch (e) { logger.error(e); return res.sendStatus(404); } }); router.get("/podcast", auth.mustBeLoggedIn, async (req, res) => { const selectedPodcast = await getSelectedPodcast(req.user._id); const podcast = await Podcast.findOne({ _id: selectedPodcast }).exec(); const podcasts = await Podcast.find({ owner: req.user._id }); const storageReset = _.get(podcast, "storageReset", new Date()); const now = new Date(); const subscription = _.get(podcast, "subscription.storageLimit", 100); // If the user has a free plan and its a new month we reset the used storage if (storageReset.getMonth() !== now.getMonth() && subscription !== 200 && subscription !== 500) { await Podcast.findOneAndUpdate({ _id: await getSelectedPodcast(req.user._id) }, { storageReset: new Date(), usedStorage: 0, }); } if (!podcast) { return res.sendStatus(404); } const episodes = await Episode .find({ podcast: podcast._id, preview: { $ne: true } }) .sort({ updatedAt: -1 }) .exec(); // tslint:disable-next-line:one-variable-per-declaration let usr, user; if (req.user._id) { usr = await User.findOne({ _id: req.user._id }); user = { firstName: usr.firstName, lastName: usr.lastName, email: usr.email, verified: usr.verified, }; } res.json({ podcast, episodes, podcasts, user }); }); /* To test from command line, get the cookie value from chrome and then run: curl -X POST \ -d 'title=title&subtitle=subtitle&author=yo&keywords=keys&categories=cats' \ --cookie 'connect.sid=s%3AtiQC1ntIAWu62-5Oiu8uZbzwTbM-Ae1i.M%2Bg8CkYFa82B6VsrTefiu8%2Bc%2BPRDlDxbIH9mRE7cDEE' \ http://lvh.me:3000/podcast */ router.post("/podcast", auth.mustBeLoggedIn, async (req, res) => { // tslint:disable-next-line:max-line-length const { _id, title, subtitle, author, keywords, categories, imageUrl, theme, email, about, aboutPreview, preview, layout, advertisingEnabled, socialNetEnabled, socialNetStatusChange } = req.body; const owner = req.user._id; const selected = await getSelectedPodcast(req.user._id); const search: any = selected ? { _id: selected } : { owner }; const currentPodcast = await Podcast.find(search); slug.defaults.mode = "rfc3986"; const fields = { title, subtitle, author, keywords, categories, imageUrl, theme, layout, email, about, aboutPreview, slug: slug(title), preview, advertisingEnabled, socialNetEnabled, subscription: null, }; let podcastFields; if (currentPodcast.length > 0 && _id !== null) { podcastFields = { $addToSet: { owner }, ...fields, }; } else { podcastFields = { owner: [owner], ...fields, }; } // Enable Social Network if (socialNetEnabled && socialNetStatusChange) { await buildSocialNet(); } // Disable Social Network if (!socialNetEnabled && socialNetStatusChange) { // Delete Social Network } // tslint:disable-next-line:max-line-length // We find atleast one podcast that has been created by the user and copy the subscription information to the new one const ownedPodcast = await Podcast.findOne({ "owner.0": owner }); if (ownedPodcast) { podcastFields.subscription = ownedPodcast.subscription; } if (_id === null) { const podcast = await Podcast.insertMany([podcastFields]); const podcastId = podcast[0]._id; const userFields = { selectedPodcast: podcastId, }; await User.findOneAndUpdate({ _id: req.user._id }, userFields, (error, user) => { if (error) { return res.status(403).send(error.message); } res.json(podcast[0]); }); } else { // tslint:disable-next-line:max-line-length Podcast.findOneAndUpdate(search, podcastFields, { upsert: true, new: true }, async (err, podcast) => { // tslint:disable-next-line:no-console if (err) { console.warn(err); return res.status(403).send(err.message); } const podcastId = podcast._id; const userFields = { selectedPodcast: podcastId, }; await User.findOneAndUpdate({ _id: req.user._id }, userFields, (error, user) => { if (error) { return res.status(403).send(err.message); } res.json(podcast); }); }); } }); router.post("/contact", auth.mustBeLoggedIn, async (req, res) => { const { contactEmail, contactFacebook, contactTwitter, contactMessage, preview } = req.body; const owner = req.user._id; const podcastFields = {} as any; if (contactEmail) { podcastFields.contactEmail = contactEmail; } if (contactFacebook) { podcastFields.contactFacebook = contactFacebook; } if (contactTwitter) { podcastFields.contactTwitter = contactTwitter; } if (contactMessage) { podcastFields.contactMessage = contactMessage; } if (preview) { podcastFields.preview = preview; } // tslint:disable-next-line:max-line-length Podcast.findOneAndUpdate({ _id: await getSelectedPodcast(req.user._id) }, podcastFields, { upsert: true, new: true }, (err, podcast) => { if (err) { return res.status(403).send(err.message); } res.json(podcast); }); }); router.post("/company", auth.mustBeLoggedIn, async (req, res) => { const owner = req.user._id; const { companyName, logo, podcastCategories, podcasts } = req.body; const companyFields = { companyName, logo, podcastCategories, podcasts, }; // tslint:disable-next-line:max-line-length Company.findOneAndUpdate({ _id: await getSelectedPodcast(req.user._id) }, companyFields, { upsert: true, new: true }, (err, podcast) => { if (err) { return res.status(403).send(err.message); } res.json(podcast); }); }); router.post("/invite-collaborator", auth.mustBeLoggedIn, async (req, res) => { const email = req.body.email; const owner = req.user._id; const podcast = await Podcast.findOne({ _id: await getSelectedPodcast(req.user._id) }); const user = await User.findOne({ email }); sendColabInvite(email, user._id, podcast._id); res.status(200).send(); }); router.post("/submit-podcast", auth.mustBeLoggedIn, async(req, res) => { sendPodcastSubmission(req.body.podcastId, req.body.podcastTitle, req.body.podcastEmail, req.body.rssFeed); res.status(200).send(); }); router.post("/submit-contact-message", auth.mustBeLoggedIn, async(req, res) => { sendContactMessage(req.body.name, req.body.email, req.body.message, req.body.podcastEmail); res.status(200).send(); }); router.post("/get-collaborators", auth.mustBeLoggedIn, async (req, res) => { const owner = req.user._id; const podcast = await Podcast.findOne({ _id: await getSelectedPodcast(req.user._id) }); const podcastOwners = podcast.owner; User.find({ _id: { $in: podcastOwners } }, (err, users) => { if (err) { return res.status(403).send(err.message); } const collaborators = users.map((user) => { return { email: user.email, _id: user._id, }; }).filter(user => String(user._id) !== String(owner)); res.json({ collaborators }); }); }); router.post("/remove-collaborators", auth.mustBeLoggedIn, async (req, res) => { const collaborators = req.body.collaborators.map((val) => { return new Types.ObjectId(val); }); const owner = req.user._id; const query = { $pull: { owner: { $in: collaborators } }, }; // tslint:disable-next-line:max-line-length Podcast.findOneAndUpdate({ _id: await getSelectedPodcast(req.user._id) }, query, { upsert: true, new: false }, (err, podcast) => { if (err) { return res.status(403).send(err.message); } const currentPodcast = JSON.parse(JSON.stringify(podcast)); currentPodcast.owner = currentPodcast.owner.filter((value) => { return !req.body.collaborators.includes(String(value)); }); res.json({ podcast: currentPodcast }); }); }); router.get("/email/callback/:callbackID", async (req, res) => { const email = await Email.findOne({ _id: (req.params as any).callbackID }); switch (email.transaction) { case "invite": const podcastFields = { $addToSet: { owner: email.user }, }; // tslint:disable-next-line:max-line-length Podcast.findOneAndUpdate({ _id: email.podcast }, podcastFields, { upsert: true, new: true }, async (err, podcast) => { // tslint:disable-next-line:no-console if (err) { console.warn(err); return res.status(403).send(err.message); } const usr = await User.findOne({ _id: email.user }); req.login(usr, function (error) { res.redirect("/"); }); }); break; case "verification": const userFields = { verified: true, }; const user = await User.findOneAndUpdate({ _id: email.user }, userFields); req.login(user, function (err) { res.redirect("/"); }); break; } }); router.post("/switch-podcast", auth.mustBeLoggedIn, async (req, res) => { const podcastId = req.body.podcastId; const fields = { selectedPodcast: podcastId, }; await User.findOneAndUpdate({ _id: req.user._id }, fields, (err, user) => { if (err) { return res.status(403).send(err.message); } res.status(200).send(); }); }); router.post("/add_storage_usage", auth.mustBeLoggedIn, async (req, res) => { try { if (req.body && req.body.url) { const url = req.body.url; remote(url, async (err, size) => { const mb = 1048576; // tslint:disable-next-line:max-line-length const currentPodcast = await Podcast.findOne({ _id: await getSelectedPodcast(req.user._id) }).exec(); if (!currentPodcast.storageReset) { Podcast.findOneAndUpdate({ _id: await getSelectedPodcast(req.user._id) }, { $inc: { usedStorage: size / mb }, storageReset: new Date(), }, (error, podcast) => { res.status(200).send(); }); } else { Podcast.findOneAndUpdate({ _id: await getSelectedPodcast(req.user._id) }, { $inc: { usedStorage: size / mb }, }, (error, podcast) => { const pod = JSON.parse(JSON.stringify(podcast)); pod.usedStorage += size / mb; res.json({ podcast: pod }); }); } }); } else { throw new Error("No url provided."); } } catch (err) { logger.error("Storage usage update failed: ", err); res.sendStatus(403); } }); };
the_stack
import { Prim, Expr, IntLiteral } from "./micheline"; import { Tuple, NoArgs, ReqArgs, MichelsonError } from "./utils"; import { MichelsonCode, MichelsonType, MichelsonData, MichelsonContract, MichelsonNoArgInstruction, MichelsonInstruction, InstructionList, MichelsonTypeID, MichelsonSimpleComparableTypeID } from "./michelson-types"; // Michelson validator const noArgInstructionIDs: Record<MichelsonNoArgInstruction["prim"], true> = { "ABS": true, "ADD": true, "ADDRESS": true, "AMOUNT": true, "AND": true, "APPLY": true, "BALANCE": true, "BLAKE2B": true, "CAR": true, "CDR": true, "CHAIN_ID": true, "CHECK_SIGNATURE": true, "COMPARE": true, "CONCAT": true, "CONS": true, "EDIV": true, "EQ": true, "EXEC": true, "FAILWITH": true, "GE": true, "GET_AND_UPDATE": true, "GT": true, "HASH_KEY": true, "IMPLICIT_ACCOUNT": true, "INT": true, "ISNAT": true, "JOIN_TICKETS": true, "KECCAK": true, "LE": true, "LEVEL": true, "LSL": true, "LSR": true, "LT": true, "MEM": true, "MUL": true, "NEG": true, "NEQ": true, "NEVER": true, "NOT": true, "NOW": true, "OR": true, "PACK": true, "PAIRING_CHECK": true, "READ_TICKET": true, "SAPLING_VERIFY_UPDATE": true, "SELF": true, "SELF_ADDRESS": true, "SENDER": true, "SET_DELEGATE": true, "SHA256": true, "SHA3": true, "SHA512": true, "SIZE": true, "SLICE": true, "SOME": true, "SOURCE": true, "SPLIT_TICKET": true, "SUB": true, "SWAP": true, "TICKET": true, "TOTAL_VOTING_POWER": true, "TRANSFER_TOKENS": true, "UNIT": true, "VOTING_POWER": true, "XOR": true, "RENAME": true, }; export const instructionIDs: Record<MichelsonInstruction["prim"], true> = Object.assign({}, noArgInstructionIDs, { "CONTRACT": true, "CREATE_CONTRACT": true, "DIG": true, "DIP": true, "DROP": true, "DUG": true, "DUP": true, "EMPTY_BIG_MAP": true, "EMPTY_MAP": true, "EMPTY_SET": true, "GET": true, "IF": true, "IF_CONS": true, "IF_LEFT": true, "IF_NONE": true, "ITER": true, "LAMBDA": true, "LEFT": true, "LOOP": true, "LOOP_LEFT": true, "MAP": true, "NIL": true, "NONE": true, "PAIR": true, "PUSH": true, "RIGHT": true, "SAPLING_EMPTY_STATE": true, "UNPACK": true, "UNPAIR": true, "UPDATE": true, "CAST": true, } as const); const simpleComparableTypeIDs: Record<MichelsonSimpleComparableTypeID, true> = { "unit": true, "never": true, "bool": true, "int": true, "nat": true, "string": true, "chain_id": true, "bytes": true, "mutez": true, "key_hash": true, "key": true, "signature": true, "timestamp": true, "address": true, }; const typeIDs: Record<MichelsonTypeID, true> = Object.assign({}, simpleComparableTypeIDs, { "or": true, "pair": true, "set": true, "big_map": true, "contract": true, "lambda": true, "list": true, "map": true, "operation": true, "option": true, "bls12_381_g1": true, "bls12_381_g2": true, "bls12_381_fr": true, "sapling_transaction": true, "sapling_state": true, "ticket": true, } as const); export class MichelsonValidationError extends MichelsonError { /** * @param val Value of a node caused the error * @param message An error message */ constructor(public val: Expr, message?: string) { super(val, message); Object.setPrototypeOf(this, MichelsonValidationError.prototype); } } function isPrim(ex: Expr): ex is Prim { return "prim" in ex; } function isPrimOrSeq(ex: Expr): ex is Prim | Expr[] { return Array.isArray(ex) || "prim" in ex; } function assertPrim(ex: Expr): ex is Prim { if (isPrim(ex)) { return true; } throw new MichelsonValidationError(ex, "prim expression expected"); } function assertSeq(ex: Expr): ex is Expr[] { if (Array.isArray(ex)) { return true; } throw new MichelsonValidationError(ex, "sequence expression expected"); } function assertPrimOrSeq(ex: Expr): ex is Prim | Expr[] { if (isPrimOrSeq(ex)) { return true; } throw new MichelsonValidationError(ex, "prim or sequence expression expected"); } function assertNatural(i: IntLiteral) { if (i.int[0] === "-") { throw new MichelsonValidationError(i, "natural number expected"); } } function assertIntLiteral(ex: Expr): ex is IntLiteral { if ("int" in ex) { return true; } throw new MichelsonValidationError(ex, "int literal expected"); } function assertArgs<N extends number>(ex: Prim, n: N): ex is N extends 0 ? NoArgs<Prim<string>> : ReqArgs<Prim<string, Tuple<N, Expr>>> { if ((n === 0 && ex.args === undefined) || ex.args?.length === n) { return true; } throw new MichelsonValidationError(ex, `${n} arguments expected`); } /** * Checks if the node is a valid Michelson code (sequence of instructions). * This is a type guard function which either returns true of throws an exception. * @param ex An AST node */ export function assertMichelsonInstruction(ex: Expr): ex is MichelsonCode { if (Array.isArray(ex)) { for (const n of ex) { if (!Array.isArray(n) && !isPrim(n)) { throw new MichelsonValidationError(ex, "sequence or prim expected"); } assertMichelsonInstruction(n); } return true; } if (assertPrim(ex)) { if (Object.prototype.hasOwnProperty.call(noArgInstructionIDs, ex.prim)) { assertArgs(ex, 0); return true; } switch (ex.prim) { case "DROP": case "PAIR": case "UNPAIR": case "DUP": case "UPDATE": case "GET": if (ex.args !== undefined && assertArgs(ex, 1)) { /* istanbul ignore else */ if (assertIntLiteral(ex.args[0])) { assertNatural(ex.args[0]); } } break; case "DIG": case "DUG": case "SAPLING_EMPTY_STATE": /* istanbul ignore else */ if (assertArgs(ex, 1)) { /* istanbul ignore else */ if (assertIntLiteral(ex.args[0])) { assertNatural(ex.args[0]); } } break; case "NONE": case "LEFT": case "RIGHT": case "NIL": case "CAST": /* istanbul ignore else */ if (assertArgs(ex, 1)) { assertMichelsonType(ex.args[0]); } break; case "UNPACK": /* istanbul ignore else */ if (assertArgs(ex, 1)) { assertMichelsonPackableType(ex.args[0]); } break; case "CONTRACT": /* istanbul ignore else */ if (assertArgs(ex, 1)) { assertMichelsonPassableType(ex.args[0]); } break; case "IF_NONE": case "IF_LEFT": case "IF_CONS": case "IF": /* istanbul ignore else */ if (assertArgs(ex, 2)) { /* istanbul ignore else */ if (assertSeq(ex.args[0])) { assertMichelsonInstruction(ex.args[0]); } /* istanbul ignore else */ if (assertSeq(ex.args[1])) { assertMichelsonInstruction(ex.args[1]); } } break; case "MAP": case "ITER": case "LOOP": case "LOOP_LEFT": /* istanbul ignore else */ if (assertArgs(ex, 1)) { assertMichelsonInstruction(ex.args[0]); } break; case "CREATE_CONTRACT": /* istanbul ignore else */ if (assertArgs(ex, 1)) { assertMichelsonContract(ex.args[0]); } break; case "DIP": if (ex.args?.length === 2) { /* istanbul ignore else */ if (assertIntLiteral(ex.args[0])) { assertNatural(ex.args[0]); } /* istanbul ignore else */ if (assertSeq(ex.args[1])) { assertMichelsonInstruction(ex.args[1]); } } else if (ex.args?.length === 1) { /* istanbul ignore else */ if (assertSeq(ex.args[0])) { assertMichelsonInstruction(ex.args[0]); } } else { throw new MichelsonValidationError(ex, "1 or 2 arguments expected"); } break; case "PUSH": /* istanbul ignore else */ if (assertArgs(ex, 2)) { assertMichelsonPushableType(ex.args[0]); assertMichelsonData(ex.args[1]); } break; case "EMPTY_SET": /* istanbul ignore else */ if (assertArgs(ex, 1)) { assertMichelsonComparableType(ex.args[0]); } break; case "EMPTY_MAP": /* istanbul ignore else */ if (assertArgs(ex, 2)) { assertMichelsonComparableType(ex.args[0]); assertMichelsonType(ex.args[1]); } break; case "EMPTY_BIG_MAP": /* istanbul ignore else */ if (assertArgs(ex, 2)) { assertMichelsonComparableType(ex.args[0]); assertMichelsonBigMapStorableType(ex.args[1]); } break; case "LAMBDA": /* istanbul ignore else */ if (assertArgs(ex, 3)) { assertMichelsonType(ex.args[0]); assertMichelsonType(ex.args[1]); /* istanbul ignore else */ if (assertSeq(ex.args[2])) { assertMichelsonInstruction(ex.args[2]); } } break; default: throw new MichelsonValidationError(ex, "instruction expected"); } } return true; } export function assertMichelsonComparableType(ex: Expr): ex is MichelsonType { /* istanbul ignore else */ if (assertPrimOrSeq(ex)) { if (Array.isArray(ex) || ex.prim === "pair" || ex.prim === "or" || ex.prim === "option") { traverseType(ex, (ex) => assertMichelsonComparableType(ex)); } else if (!Object.prototype.hasOwnProperty.call(simpleComparableTypeIDs, ex.prim)) { throw new MichelsonValidationError(ex, `${ex.prim}: type is not comparable`); } } return true; } export function assertMichelsonPackableType(ex: Expr): ex is MichelsonType { /* istanbul ignore else */ if (assertPrimOrSeq(ex)) { if (isPrim(ex)) { if (!Object.prototype.hasOwnProperty.call(typeIDs, ex.prim) || ex.prim === "big_map" || ex.prim === "operation" || ex.prim === "sapling_state" || ex.prim === "ticket") { throw new MichelsonValidationError(ex, `${ex.prim}: type can't be used inside PACK/UNPACK instructions`); } traverseType(ex, (ex) => assertMichelsonPackableType(ex)); } } return true; } export function assertMichelsonPushableType(ex: Expr): ex is MichelsonType { /* istanbul ignore else */ if (assertPrimOrSeq(ex)) { if (isPrim(ex)) { if (!Object.prototype.hasOwnProperty.call(typeIDs, ex.prim) || ex.prim === "big_map" || ex.prim === "operation" || ex.prim === "sapling_state" || ex.prim === "ticket" || ex.prim === "contract") { throw new MichelsonValidationError(ex, `${ex.prim}: type can't be pushed`); } traverseType(ex, (ex) => assertMichelsonPushableType(ex)); } } return true; } export function assertMichelsonStorableType(ex: Expr): ex is MichelsonType { /* istanbul ignore else */ if (assertPrimOrSeq(ex)) { if (isPrim(ex)) { if (!Object.prototype.hasOwnProperty.call(typeIDs, ex.prim) || ex.prim === "operation" || ex.prim === "contract") { throw new MichelsonValidationError(ex, `${ex.prim}: type can't be used as part of a storage`); } traverseType(ex, (ex) => assertMichelsonStorableType(ex)); } } return true; } export function assertMichelsonPassableType(ex: Expr): ex is MichelsonType { /* istanbul ignore else */ if (assertPrimOrSeq(ex)) { if (isPrim(ex)) { if (!Object.prototype.hasOwnProperty.call(typeIDs, ex.prim) || ex.prim === "operation") { throw new MichelsonValidationError(ex, `${ex.prim}: type can't be used as part of a parameter`); } traverseType(ex, (ex) => assertMichelsonPassableType(ex)); } } return true; } export function assertMichelsonBigMapStorableType(ex: Expr): ex is MichelsonType { /* istanbul ignore else */ if (assertPrimOrSeq(ex)) { if (isPrim(ex)) { if (!Object.prototype.hasOwnProperty.call(typeIDs, ex.prim) || ex.prim === "big_map" || ex.prim === "operation" || ex.prim === "sapling_state") { throw new MichelsonValidationError(ex, `${ex.prim}: type can't be used inside a big_map`); } traverseType(ex, (ex) => assertMichelsonBigMapStorableType(ex)); } } return true; } /** * Checks if the node is a valid Michelson type expression. * This is a type guard function which either returns true of throws an exception. * @param ex An AST node */ export function assertMichelsonType(ex: Expr): ex is MichelsonType { /* istanbul ignore else */ if (assertPrimOrSeq(ex)) { if (isPrim(ex)) { if (!Object.prototype.hasOwnProperty.call(typeIDs, ex.prim)) { throw new MichelsonValidationError(ex, "type expected"); } traverseType(ex, (ex) => assertMichelsonType(ex)); } } return true; } function traverseType(ex: Prim | Expr[], cb: (ex: Prim | Expr[]) => void): ex is MichelsonType { if (Array.isArray(ex) || ex.prim === "pair") { const args = Array.isArray(ex) ? ex : ex.args; if (args === undefined || args.length < 2) { throw new MichelsonValidationError(ex, "at least 2 arguments expected"); } args.forEach(a => { if (assertPrimOrSeq(a)) { cb(a); } }); return true; } switch (ex.prim) { case "option": case "list": /* istanbul ignore else */ if (assertArgs(ex, 1) && assertPrimOrSeq(ex.args[0])) { cb(ex.args[0]); } break; case "contract": /* istanbul ignore else */ if (assertArgs(ex, 1)) { assertMichelsonPassableType(ex.args[0]); } break; case "or": /* istanbul ignore else */ if (assertArgs(ex, 2) && assertPrimOrSeq(ex.args[0]) && assertPrimOrSeq(ex.args[1])) { cb(ex.args[0]); cb(ex.args[1]); } break; case "lambda": /* istanbul ignore else */ if (assertArgs(ex, 2)) { assertMichelsonType(ex.args[0]); assertMichelsonType(ex.args[1]); } break; case "set": /* istanbul ignore else */ if (assertArgs(ex, 1)) { assertMichelsonComparableType(ex.args[0]); } break; case "map": /* istanbul ignore else */ if (assertArgs(ex, 2) && assertPrimOrSeq(ex.args[0]) && assertPrimOrSeq(ex.args[1])) { assertMichelsonComparableType(ex.args[0]); cb(ex.args[1]); } break; case "big_map": /* istanbul ignore else */ if (assertArgs(ex, 2) && assertPrimOrSeq(ex.args[0]) && assertPrimOrSeq(ex.args[1])) { assertMichelsonComparableType(ex.args[0]); assertMichelsonBigMapStorableType(ex.args[1]); cb(ex.args[1]); } break; case "ticket": /* istanbul ignore else */ if (assertArgs(ex, 1) && assertPrimOrSeq(ex.args[0])) { assertMichelsonComparableType(ex.args[0]); } break; case "sapling_state": case "sapling_transaction": if (assertArgs(ex, 1)) { assertIntLiteral(ex.args[0]); } break; default: assertArgs(ex, 0); } return true; } /** * Checks if the node is a valid Michelson data literal such as `(Pair {Elt "0" 0} 0)`. * This is a type guard function which either returns true of throws an exception. * @param ex An AST node */ export function assertMichelsonData(ex: Expr): ex is MichelsonData { if (("int" in ex) || ("string" in ex) || ("bytes" in ex)) { return true; } if (Array.isArray(ex)) { let mapElts = 0; for (const n of ex) { if (isPrim(n) && n.prim === "Elt") { /* istanbul ignore else */ if (assertArgs(n, 2)) { assertMichelsonData(n.args[0]); assertMichelsonData(n.args[1]); } mapElts++; } else { assertMichelsonData(n); } } if (mapElts !== 0 && mapElts !== ex.length) { throw new MichelsonValidationError(ex, "data entries and map elements can't be intermixed"); } return true; } if (isPrim(ex)) { switch (ex.prim) { case "Unit": case "True": case "False": case "None": assertArgs(ex, 0); break; case "Pair": /* istanbul ignore else */ if (ex.args === undefined || ex.args.length < 2) { throw new MichelsonValidationError(ex, "at least 2 arguments expected"); } for (const a of ex.args) { assertMichelsonData(a); } break; case "Left": case "Right": case "Some": /* istanbul ignore else */ if (assertArgs(ex, 1)) { assertMichelsonData(ex.args[0]); } break; default: if (Object.prototype.hasOwnProperty.call(instructionIDs, ex.prim)) { assertMichelsonInstruction(ex); } else { throw new MichelsonValidationError(ex, "data entry or instruction expected"); } } } else { throw new MichelsonValidationError(ex, "data entry expected"); } return true; } /** * Checks if the node is a valid Michelson smart contract source containing all required and valid properties such as `parameter`, `storage` and `code`. * This is a type guard function which either returns true of throws an exception. * @param ex An AST node */ export function assertMichelsonContract(ex: Expr): ex is MichelsonContract { /* istanbul ignore else */ if (assertSeq(ex)) { const ent: { [sec: string]: boolean } = {}; for (const sec of ex) { if (assertPrim(sec)) { if (sec.prim !== "code" && sec.prim !== "parameter" && sec.prim !== "storage") { throw new MichelsonValidationError(ex, `unexpected contract section: ${sec.prim}`); } if (sec.prim in ent) { throw new MichelsonValidationError(ex, `duplicate contract section: ${sec.prim}`); } ent[sec.prim] = true; /* istanbul ignore else */ if (assertArgs(sec, 1)) { switch (sec.prim) { case "code": /* istanbul ignore else */ if (assertSeq(sec.args[0])) { assertMichelsonInstruction(sec.args[0]); } break; case "parameter": assertMichelsonPassableType(sec.args[0]); break; case "storage": assertMichelsonStorableType(sec.args[0]); } } } } } return true; } /** * Checks if the node is a valid Michelson smart contract source containing all required and valid properties such as `parameter`, `storage` and `code`. * @param ex An AST node */ export function isMichelsonScript(ex: Expr): ex is MichelsonContract { try { assertMichelsonContract(ex); return true; } catch { return false; } } /** * Checks if the node is a valid Michelson data literal such as `(Pair {Elt "0" 0} 0)`. * @param ex An AST node */ export function isMichelsonData(ex: Expr): ex is MichelsonData { try { assertMichelsonData(ex); return true; } catch { return false; } } /** * Checks if the node is a valid Michelson code (sequence of instructions). * @param ex An AST node */ export function isMichelsonCode(ex: Expr): ex is InstructionList { try { assertMichelsonInstruction(ex); return true; } catch { return false; } } /** * Checks if the node is a valid Michelson type expression. * @param ex An AST node */ export function isMichelsonType(ex: Expr): ex is MichelsonType { try { assertMichelsonType(ex); return true; } catch { return false; } } export function isInstruction(p: Prim): p is MichelsonInstruction { return Object.prototype.hasOwnProperty.call(instructionIDs, p.prim); } export function assertDataListIfAny(d: MichelsonData): d is MichelsonData[] { if (!Array.isArray(d)) { return false; } for (const v of d) { if ("prim" in v) { if (isInstruction(v)) { throw new MichelsonError(d, `Instruction outside of a lambda: ${JSON.stringify(d)}`); } else if (v.prim === "Elt") { throw new MichelsonError(d, `Elt item outside of a map literal: ${JSON.stringify(d)}`); } } } return true; }
the_stack
* Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @emails oncall+relay * @flow * @format */ // flowlint ambiguous-object-type:error 'use strict'; let mockGetPromise = false; const unsubscribe = jest.fn(); jest.doMock('relay-runtime', () => { const originalRuntime = jest.requireActual('relay-runtime'); const originalInternal = originalRuntime.__internal; return { ...originalRuntime, __internal: { ...originalInternal, getPromiseForActiveRequest: (...args) => { if (mockGetPromise) { return Promise.resolve(); } return originalInternal.getPromiseForActiveRequest(...args); }, fetchQuery: (...args) => { const observable = originalInternal.fetchQuery(...args); return { subscribe: (observer) => { return observable.subscribe({ ...observer, start: (originalSubscription) => { const observerStart = observer?.start; observerStart && observerStart({ ...originalSubscription, unsubscribe: () => { originalSubscription.unsubscribe(); unsubscribe(); }, }); }, }); }, }; }, }, }; }); jest.mock('scheduler', () => { return jest.requireActual('scheduler/unstable_mock'); }); jest.mock('fbjs/lib/warning', () => { const f: any = jest.fn(); f.default = jest.fn(); return f; }); import * as React from 'react'; const Scheduler = require('scheduler'); import { OperationDescriptor, Variables } from 'relay-runtime'; const { useMemo, useState, useEffect } = React; import * as TestRenderer from 'react-test-renderer'; import { useRefetchableFragment as useRefetchableFragmentNodeOriginal, RelayEnvironmentProvider, useRelayEnvironment, ReactRelayContext, } from '../src'; const invariant = require('fbjs/lib/invariant'); const warning = require('fbjs/lib/warning'); const { FRAGMENT_OWNER_KEY, FRAGMENTS_KEY, ID_KEY, createOperationDescriptor, } = require('relay-runtime'); describe('useRefetchableFragmentNode', () => { let environment; let gqlQuery; let gqlQueryNestedFragment; let gqlRefetchQuery; let gqlQueryWithArgs; let gqlQueryWithLiteralArgs; let gqlRefetchQueryWithArgs; let gqlFragment; let gqlFragmentWithArgs; let query; let queryNestedFragment; let refetchQuery; let queryWithArgs; let queryWithLiteralArgs; let refetchQueryWithArgs; let variables; let variablesNestedFragment; let forceUpdate; let setEnvironment; let setOwner; let fetchPolicy; let renderPolicy; let createMockEnvironment; let generateAndCompile; let renderFragment; let renderSpy; let refetch; let Renderer; class ErrorBoundary extends React.Component<any, any> { state = { error: null }; componentDidCatch(error) { this.setState({ error }); } render() { const { children, fallback } = this.props; const { error } = this.state; if (error) { return React.createElement(fallback, { error }); } return children; } } function useRefetchableFragmentNode(fragmentNode, fragmentRef) { const { data, refetch: refetchFunction } = useRefetchableFragmentNodeOriginal( fragmentNode, fragmentRef, ); refetch = refetchFunction; renderSpy(data, refetch); return data; } function assertCall(expected, idx) { const actualData = renderSpy.mock.calls[idx][0]; expect(actualData).toEqual(expected.data); } function expectFragmentResults(expectedCalls: ReadonlyArray<{ data: any }>) { // This ensures that useEffect runs TestRenderer.act(() => jest.runAllImmediates()); expect(renderSpy).toBeCalledTimes(expectedCalls.length); expectedCalls.forEach((expected, idx) => assertCall(expected, idx)); renderSpy.mockClear(); } function createFragmentRef(id, owner) { return { [ID_KEY]: id, [FRAGMENTS_KEY]: { NestedUserFragment: {}, }, [FRAGMENT_OWNER_KEY]: owner.request, }; } beforeEach(() => { // Set up mocks jest.resetModules(); jest.spyOn(console, 'warn').mockImplementationOnce(() => {}); renderSpy = jest.fn(); fetchPolicy = 'store-or-network'; renderPolicy = 'partial'; ({ generateAndCompile } = require('./TestCompiler')); ({ createMockEnvironment } = require('relay-test-utils-internal')); // Set up environment and base data environment = createMockEnvironment(); const generated = generateAndCompile( ` fragment NestedUserFragment on User { username } fragment UserFragmentWithArgs on User @refetchable(queryName: "UserFragmentWithArgsRefetchQuery") @argumentDefinitions(scaleLocal: {type: "Float!"}) { id name profile_picture(scale: $scaleLocal) { uri } ...NestedUserFragment } fragment UserFragment on User @refetchable(queryName: "UserFragmentRefetchQuery") { id name profile_picture(scale: $scale) { uri } ...NestedUserFragment } query UserQuery($id: ID!, $scale: Int!) { node(id: $id) { ...UserFragment } } query UserQueryNestedFragment($id: ID!, $scale: Int!) { node(id: $id) { actor { ...UserFragment } } } query UserQueryWithArgs($id: ID!, $scale: Float!) { node(id: $id) { ...UserFragmentWithArgs @arguments(scaleLocal: $scale) } } query UserQueryWithLiteralArgs($id: ID!) { node(id: $id) { ...UserFragmentWithArgs @arguments(scaleLocal: 16) } } `, ); variables = { id: '1', scale: 16 }; variablesNestedFragment = { id: '<feedbackid>', scale: 16 }; gqlQuery = generated.UserQuery; gqlQueryNestedFragment = generated.UserQueryNestedFragment; gqlRefetchQuery = generated.UserFragmentRefetchQuery; gqlQueryWithArgs = generated.UserQueryWithArgs; gqlQueryWithLiteralArgs = generated.UserQueryWithLiteralArgs; gqlRefetchQueryWithArgs = generated.UserFragmentWithArgsRefetchQuery; gqlFragment = generated.UserFragment; gqlFragmentWithArgs = generated.UserFragmentWithArgs; invariant( gqlFragment.metadata?.refetch?.operation === '@@MODULE_START@@UserFragmentRefetchQuery.graphql@@MODULE_END@@', 'useRefetchableFragment-test: Expected refetchable fragment metadata to contain operation.', ); invariant( gqlFragmentWithArgs.metadata?.refetch?.operation === '@@MODULE_START@@UserFragmentWithArgsRefetchQuery.graphql@@MODULE_END@@', 'useRefetchableFragment-test: Expected refetchable fragment metadata to contain operation.', ); // Manually set the refetchable operation for the test. gqlFragment.metadata.refetch.operation = gqlRefetchQuery; gqlFragmentWithArgs.metadata.refetch.operation = gqlRefetchQueryWithArgs; query = createOperationDescriptor(gqlQuery, variables, { force: true }); queryNestedFragment = createOperationDescriptor( gqlQueryNestedFragment, variablesNestedFragment, { force: true }, ); refetchQuery = createOperationDescriptor(gqlRefetchQuery, variables, { force: true, }); queryWithArgs = createOperationDescriptor(gqlQueryWithArgs, variables, { force: true, }); queryWithLiteralArgs = createOperationDescriptor( gqlQueryWithLiteralArgs, { id: variables.id, }, { force: true }, ); refetchQueryWithArgs = createOperationDescriptor(gqlRefetchQueryWithArgs, variables, { force: true, }); environment.commitPayload(query, { node: { __typename: 'User', id: '1', name: 'Alice', username: 'useralice', profile_picture: null, }, }); // Set up renderers Renderer = (props) => null; const Container = (props: { userRef?: any; owner: OperationDescriptor; fragment: any }) => { // We need a render a component to run a Hook const [owner, _setOwner] = useState<OperationDescriptor>(props.owner); const [_, _setCount] = useState(0); const fragment = props.fragment ?? gqlFragment; const artificialUserRef = useMemo( () => ({ [ID_KEY]: owner.request.variables.id ?? owner.request.variables.nodeID, [FRAGMENTS_KEY]: { [fragment.name]: {}, }, [FRAGMENT_OWNER_KEY]: owner.request, }), [owner, fragment.name], ); const userRef = props.hasOwnProperty('userRef') ? props.userRef : artificialUserRef; forceUpdate = _setCount; setOwner = _setOwner; const userData = useRefetchableFragmentNode(fragment, userRef); return <Renderer user={userData} />; }; const ContextProvider = ({ children }) => { const [env, _setEnv] = useState(environment); const relayContext = useMemo(() => ({ environment: env }), [env]); setEnvironment = _setEnv; return ( <ReactRelayContext.Provider value={relayContext}> {children} </ReactRelayContext.Provider> ); }; const Fallback = (): any => { useEffect(() => { Scheduler.unstable_yieldValue('Fallback'); }); return 'Fallback'; }; renderFragment = (args?: { isConcurrent?: boolean; owner?: any; userRef?: any; fragment?: any; }): any => { const { isConcurrent = false, ...props } = args ?? ({} as any); return TestRenderer.create( <ErrorBoundary fallback={({ error }): any => `Error: ${error.message}`}> <React.Suspense fallback={<Fallback />}> <ContextProvider> <Container owner={query} {...props} /> </ContextProvider> </React.Suspense> </ErrorBoundary>, // any[prop-missing] - error revealed when flow-typing ReactTestRenderer { unstable_isConcurrent: isConcurrent } as any, ); }; }); afterEach(() => { environment.mockClear(); renderSpy.mockClear(); warning.mockClear(); unsubscribe.mockClear(); mockGetPromise = false; }); describe('initial render', () => { // The bulk of initial render behavior is covered in useFragmentNode-test, // so this suite covers the basic cases as a sanity check. it('should throw error if fragment is plural', () => { jest.spyOn(console, 'error').mockImplementationOnce(() => {}); const generated = generateAndCompile(` fragment UserFragment on User @relay(plural: true) { id } `); const renderer = renderFragment({ fragment: generated.UserFragment }); expect( renderer.toJSON().includes('Remove `@relay(plural: true)` from fragment'), ).toEqual(true); }); it('should throw error if fragment is missing @refetchable directive', () => { jest.spyOn(console, 'error').mockImplementationOnce(() => {}); const generated = generateAndCompile(` fragment UserFragment on User { id } `); const renderer = renderFragment({ fragment: generated.UserFragment }); expect( renderer .toJSON() .includes('Did you forget to add a @refetchable directive to the fragment?'), ).toEqual(true); }); it('should render fragment without error when data is available', () => { renderFragment(); expectFragmentResults([ { data: { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }, }, ]); }); it('should render fragment without error when ref is null', () => { renderFragment({ userRef: null }); expectFragmentResults([{ data: null }]); }); it('should render fragment without error when ref is undefined', () => { renderFragment({ userRef: undefined }); expectFragmentResults([{ data: null }]); }); it('should update when fragment data changes', () => { renderFragment(); expectFragmentResults([ { data: { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }, }, ]); TestRenderer.act(() => { environment.commitPayload(query, { node: { __typename: 'User', id: '1', // Update name name: 'Alice in Wonderland', }, }); }); expectFragmentResults([ { data: { id: '1', // Assert that name is updated name: 'Alice in Wonderland', profile_picture: null, ...createFragmentRef('1', query), }, }, ]); }); it('should throw a promise if data is missing for fragment and request is in flight', () => { // This prevents console.error output in the test, which is expected jest.spyOn(console, 'error').mockImplementationOnce(() => {}); mockGetPromise = true; const missingDataVariables = { ...variables, id: '4' }; const missingDataQuery = createOperationDescriptor(gqlQuery, missingDataVariables, { force: true, }); // Commit a payload with name and profile_picture are missing TestRenderer.act(() => { environment.commitPayload(missingDataQuery, { node: { __typename: 'User', id: '4', }, }); }); const renderer = renderFragment({ owner: missingDataQuery }); expect(renderer.toJSON()).toEqual('Fallback'); }); }); describe('refetch', () => { let release; beforeEach(() => { jest.resetModules(); ({ generateAndCompile } = require('./TestCompiler')); ({ createMockEnvironment } = require('relay-test-utils-internal')); release = jest.fn(); environment.retain.mockImplementation((...args) => { return { dispose: release, }; }); }); function expectRequestIsInFlight(expected, requestEnvironment = environment) { expect(requestEnvironment.execute).toBeCalledTimes(expected.requestCount); expect( requestEnvironment.mock.isLoading( expected.gqlRefetchQuery ?? gqlRefetchQuery, expected.refetchVariables, { force: true }, ), ).toEqual(expected.inFlight); } function expectFragmentIsRefetching( renderer, expected: { refetchVariables: Variables; refetchQuery?: OperationDescriptor; gqlRefetchQuery?: any; }, env = environment, retain = true, ) { expect(renderSpy).toBeCalledTimes(0); renderSpy.mockClear(); // Assert refetch query was fetched expectRequestIsInFlight({ ...expected, inFlight: true, requestCount: 1 }, env); // Assert component suspended expect(renderSpy).toBeCalledTimes(0); expect(renderer.toJSON()).toEqual('Fallback'); // Assert query is tentatively retained while component is suspended if (retain) { expect(env.retain).toBeCalledTimes(1); expect(env.retain.mock.calls[0][0]).toEqual(expected.refetchQuery ?? refetchQuery); } } it('does not refetch and warns if component has unmounted', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { renderer.unmount(); }); TestRenderer.act(() => { refetch({ id: '4' }); }); expect(warning).toHaveBeenCalledTimes(1); expect( // any[prop-missing] warning.mock.calls[0][1].includes( 'Relay: Unexpected call to `refetch` on unmounted component', ), ).toEqual(true); expect(environment.execute).toHaveBeenCalledTimes(0); }); it('warns if fragment ref passed to useRefetchableFragmentNode() was null', () => { renderFragment({ userRef: null }); expectFragmentResults([{ data: null }]); TestRenderer.act(() => { refetch({ id: '4' }); }); expect(warning).toHaveBeenCalledTimes(1); expect( // any[prop-missing] warning.mock.calls[0][1].includes( 'Relay: Unexpected call to `refetch` while using a null fragment ref', ), ).toEqual(true); expect(environment.execute).toHaveBeenCalledTimes(1); }); /* it('warns if refetch scheduled at high priority', () => { renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { Scheduler.unstable_runWithPriority(Scheduler.unstable_ImmediatePriority, () => { refetch({ id: '4' }); }); }); expect(warning).toHaveBeenCalledTimes(1); expect( // any[prop-missing] warning.mock.calls[0][1].includes( 'Relay: Unexpected call to `refetch` at a priority higher than expected', ), ).toEqual(true); expect(environment.execute).toHaveBeenCalledTimes(1); }); */ it('throws error when error occurs during refetch', () => { jest.spyOn(console, 'error').mockImplementationOnce(() => {}); const callback = jest.fn(); const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch({ id: '4' }, { onComplete: callback }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network error TestRenderer.act(() => { environment.mock.reject(gqlRefetchQuery, new Error('Oops')); }); TestRenderer.act(() => { jest.runAllImmediates(); }); // Assert error is caught in Error boundary expect(renderer.toJSON()).toEqual('Error: Oops'); expect(callback).toBeCalledTimes(1); expect(callback.mock.calls[0][0]).toMatchObject({ message: 'Oops' }); // Assert refetch query wasn't retained TestRenderer.act(() => { jest.runAllTimers(); }); expect(release).toBeCalledTimes(1); expect(environment.retain).toBeCalledTimes(1); }); it('refetches new variables correctly when refetching new id', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch({ id: '4' }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; //expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); }); it('refetches new variables correctly when refetching same id', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch({ scale: 32 }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '1', scale: 32, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, username: 'useralice', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, ...createFragmentRef('1', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); }); it('with correct id from refetchable fragment when using nested fragment', () => { // Populate store with data for query using nested fragment TestRenderer.act(() => { environment.commitPayload(queryNestedFragment, { node: { __typename: 'Feedback', id: '<feedbackid>', actor: { __typename: 'User', id: '1', name: 'Alice', username: 'useralice', profile_picture: null, }, }, }); }); // Get fragment ref for user using nested fragment const userRef = (environment.lookup(queryNestedFragment.fragment).data as any)?.node ?.actor; const renderer = renderFragment({ owner: queryNestedFragment, userRef }); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', queryNestedFragment), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch({ scale: 32 }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { // The id here should correspond to the user id, and not the // feedback id from the query variables (i.e. `<feedbackid>`) id: '1', scale: 32, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, username: 'useralice', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, ...createFragmentRef('1', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); }); it('refetches new variables correctly when using @arguments', () => { const userRef = environment.lookup(queryWithArgs.fragment).data?.node; const renderer = renderFragment({ fragment: gqlFragmentWithArgs, userRef, }); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', queryWithArgs), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch({ scaleLocal: 32 }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '1', scaleLocal: 32, }; refetchQueryWithArgs = createOperationDescriptor( gqlRefetchQueryWithArgs, refetchVariables, { force: true }, ); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery: refetchQueryWithArgs, gqlRefetchQuery: gqlRefetchQueryWithArgs, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQueryWithArgs, { data: { node: { __typename: 'User', id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, username: 'useralice', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, ...createFragmentRef('1', refetchQueryWithArgs), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQueryWithArgs); }); it('refetches new variables correctly when using @arguments with literal values', () => { const userRef = environment.lookup(queryWithLiteralArgs.fragment).data?.node; const renderer = renderFragment({ fragment: gqlFragmentWithArgs, userRef, }); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', queryWithLiteralArgs), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch({ id: '4' }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '4', scaleLocal: 16, }; refetchQueryWithArgs = createOperationDescriptor( gqlRefetchQueryWithArgs, refetchVariables, { force: true }, ); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery: refetchQueryWithArgs, gqlRefetchQuery: gqlRefetchQueryWithArgs, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQueryWithArgs, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQueryWithArgs), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQueryWithArgs); }); it('subscribes to changes in refetched data', () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch({ id: '4' }); }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); // Update refetched data environment.commitPayload(refetchQuery, { node: { __typename: 'User', id: '4', name: 'Mark Updated', }, }); // Assert that refetched data is updated expectFragmentResults([ { data: { id: '4', // Name is updated name: 'Mark Updated', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }, }, ]); }); /* it('resets to parent data when environment changes', () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch({ id: '4' }); }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); // Set new environment const newEnvironment = createMockEnvironment(); newEnvironment.commitPayload(query, { node: { __typename: 'User', id: '1', name: 'Alice in a different env', username: 'useralice', profile_picture: null, }, }); TestRenderer.act(() => { setEnvironment(newEnvironment); }); // Assert that parent data is rendered const expectedUser = { id: '1', name: 'Alice in a different env', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([ { data: expectedUser }, { data: expectedUser }, { data: expectedUser }, ]); // Assert refetch query was released expect(release).toBeCalledTimes(1); expect(environment.retain).toBeCalledTimes(1); // Update data in new environment TestRenderer.act(() => { newEnvironment.commitPayload(query, { node: { __typename: 'User', id: '1', name: 'Alice Updated', }, }); }); // Assert that data in new environment is updated expectFragmentResults([ { data: { id: '1', name: 'Alice Updated', profile_picture: null, ...createFragmentRef('1', query), }, }, ]); }); */ it('resets to parent data when parent fragment ref changes', () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch({ id: '4' }); }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); // Pass new parent fragment ref with different variables const newVariables = { ...variables, scale: 32 }; const newQuery = createOperationDescriptor(gqlQuery, newVariables, { force: true, }); environment.commitPayload(newQuery, { node: { __typename: 'User', id: '1', name: 'Alice', username: 'useralice', profile_picture: { uri: 'uri32', }, }, }); TestRenderer.act(() => { setOwner(newQuery); }); // Assert that parent data is rendered const expectedUser = { id: '1', name: 'Alice', profile_picture: { uri: 'uri32', }, ...createFragmentRef('1', newQuery), }; /*expectFragmentResults([ original relay { data: expectedUser }, //{ data: expectedUser }, { data: expectedUser }, ]);*/ expectFragmentResults([{ data: expectedUser }]); // Assert refetch query was released expect(release).toBeCalledTimes(1); expect(environment.retain).toBeCalledTimes(1); // Update new parent data TestRenderer.act(() => { environment.commitPayload(newQuery, { node: { __typename: 'User', id: '1', name: 'Alice Updated', }, }); }); // Assert that new data from parent is updated expectFragmentResults([ { data: { id: '1', name: 'Alice Updated', profile_picture: { uri: 'uri32', }, ...createFragmentRef('1', newQuery), }, }, ]); }); /* it('warns if data retured has different __typename', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); const refetchVariables = { id: '1', scale: 32, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); renderSpy.mockClear(); environment.execute.mockClear(); environment.retain.mockClear(); release.mockClear(); TestRenderer.act(() => { refetch({ scale: 32 }, { fetchPolicy: 'network-only' }); }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'MessagingParticipant', id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, username: 'useralice', }, }, }); }); TestRenderer.act(() => { jest.runAllImmediates(); }); // any[prop-missing] const warningCalls = warning.mock.calls.filter((call) => call[0] === false); expect(warningCalls.length).toEqual(2); // the other warnings are from FragmentResource.js expect( warningCalls[1][1].includes( 'Relay: Call to `refetch` returned data with a different __typename:', ), ).toEqual(true); }); it('warns if a different id is returned', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); const refetchVariables = { id: '1', scale: 32, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); renderSpy.mockClear(); environment.execute.mockClear(); environment.retain.mockClear(); release.mockClear(); TestRenderer.act(() => { refetch({ scale: 32 }, { fetchPolicy: 'network-only' }); }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '2', name: 'Mark', profile_picture: { uri: 'scale32', }, username: 'usermark', }, }, }); }); TestRenderer.act(() => { jest.runAllImmediates(); }); // any[prop-missing] const warningCalls = warning.mock.calls.filter((call) => call[0] === false); expect(warningCalls.length).toEqual(2); expect( warningCalls[0][1].includes( 'Relay: Call to `refetch` returned a different id, expected', ), ).toEqual(true); }); */ it("doesn't warn if refetching on a different id than the current one in display", () => { renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); const refetchVariables = { id: '1', scale: 32, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); renderSpy.mockClear(); environment.execute.mockClear(); environment.retain.mockClear(); release.mockClear(); TestRenderer.act(() => { refetch({ id: '2', scale: 32 }, { fetchPolicy: 'network-only' }); jest.runAllImmediates(); }); TestRenderer.act(() => { refetch({ id: '3', scale: 32 }, { fetchPolicy: 'network-only' }); }); TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '3', name: 'Mark', profile_picture: { uri: 'scale32', }, username: 'usermark', }, }, }); }); TestRenderer.act(() => { jest.runAllTimers(); }); expect( // any[prop-missing] warning.mock.calls.filter((call) => call[0] === false).length, ).toEqual(0); }); describe('multiple refetches', () => { let fetchSpy; beforeEach(() => { fetchSpy = jest.fn(); const internalRuntime = require('relay-runtime').__internal; const originalFetchQuery = internalRuntime.fetchQuery; jest.spyOn(internalRuntime, 'fetchQuery').mockImplementation((...args) => { const originalObservable = originalFetchQuery(...args); return { ...originalObservable, subscribe: (...subscribeArgs) => { fetchSpy(...args); return originalObservable.subscribe(...subscribeArgs); }, }; }); }); it('refetches correctly when refetching multiple times in a row', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); const refetchVariables = { id: '1', scale: 32, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); const refetchedUser = { id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, ...createFragmentRef('1', refetchQuery), }; const doAndAssertRefetch = (fragmentResults, retain = true) => { renderSpy.mockClear(); environment.execute.mockClear(); environment.retain.mockClear(); TestRenderer.act(() => { // We use fetchPolicy network-only to ensure the call to refetch // always suspends refetch({ scale: 32 }, { fetchPolicy: 'network-only' }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch expectFragmentIsRefetching( renderer, { refetchVariables, refetchQuery, }, undefined, retain, ); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, username: 'useralice', }, }, }); }); // Assert fragment is rendered with new data expectFragmentResults(fragmentResults); // Assert refetch query was retained if (retain) { expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); } }; // Refetch once // doAndAssertRefetch([{ data: refetchedUser }, { data: refetchedUser }]); relay original doAndAssertRefetch([{ data: refetchedUser }]); // Refetch twice doAndAssertRefetch([{ data: refetchedUser }], false); //expect(release).toBeCalledTimes(1); }); /* it('refetches correctly when a second refetch starts while the first is one suspended', () => { const renderer = renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '1' }, { fetchPolicy: 'network-only', UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert request is started const refetchVariables1 = { id: '1', scale: 16 }; const refetchQuery1 = createOperationDescriptor( gqlRefetchQuery, refetchVariables1, { force: true }, ); // Assert we suspend on intial refetch request expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery1, refetchVariables: refetchVariables1, }); // Call refetch a second time environment.execute.mockClear(); environment.retain.mockClear(); const refetchVariables2 = { id: '4', scale: 16 }; const refetchQuery2 = createOperationDescriptor( gqlRefetchQuery, refetchVariables2, { force: true }, ); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy: 'network-only', UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert we suspend on the second refetch request expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery2, refetchVariables: refetchVariables2, }); // Mock response for initial refetch request TestRenderer.act(() => { environment.mock.resolve(refetchQuery1, { data: { node: { __typename: 'User', id: '1', name: 'User 1', profile_picture: { uri: 'scale16', }, username: 'user1', }, }, }); }); // Assert that we are still suspended the second refetch request // since that one hasn't resolved and that's the latest one we want // to render expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery2, refetchVariables: refetchVariables2, }); // Mock response for second refetch request TestRenderer.act(() => { environment.mock.resolve(refetchQuery2, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert component is rendered with data from second request const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16' }, ...createFragmentRef('4', refetchQuery2), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); expect(fetchSpy).toBeCalledTimes(2); }); it('does not re-issue initial refetch request if second refetch is interrupted by high-pri update', () => { const renderer = renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '1' }, { fetchPolicy: 'network-only', UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert request is started const refetchVariables1 = { id: '1', scale: 16 }; const refetchQuery1 = createOperationDescriptor( gqlRefetchQuery, refetchVariables1, { force: true }, ); // Assert we suspend on intial refetch request expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery1, refetchVariables: refetchVariables1, }); // Call refetch a second time environment.execute.mockClear(); environment.retain.mockClear(); const refetchVariables2 = { id: '4', scale: 16 }; const refetchQuery2 = createOperationDescriptor( gqlRefetchQuery, refetchVariables2, { force: true }, ); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy: 'network-only', UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert we suspend on the second refetch request expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery2, refetchVariables: refetchVariables2, }); // Schedule a high-pri update while the component is // suspended on pagination TestRenderer.act(() => { Scheduler.unstable_runWithPriority( Scheduler.unstable_UserBlockingPriority, () => { forceUpdate((prev) => prev + 1); }, ); }); // Assert that we are still suspended the second refetch request // since that one hasn't resolved and that's the latest one we want // to render expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery2, refetchVariables: refetchVariables2, }); // Mock response for initial refetch request TestRenderer.act(() => { environment.mock.resolve(refetchQuery1, { data: { node: { __typename: 'User', id: '1', name: 'User 1', profile_picture: { uri: 'scale16', }, username: 'user1', }, }, }); }); // Assert that we are still suspended the second refetch request // since that one hasn't resolved and that's the latest one we want // to render expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery2, refetchVariables: refetchVariables2, }); // Mock response for second refetch request TestRenderer.act(() => { environment.mock.resolve(refetchQuery2, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert component is rendered with data from second request const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16' }, ...createFragmentRef('4', refetchQuery2), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); expect(fetchSpy).toBeCalledTimes(2); }); it('refetches correctly when switching between multiple refetches', () => { const renderer = renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '1' }, { fetchPolicy: 'network-only', UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert request is started const refetchVariables1 = { id: '1', scale: 16 }; const refetchQuery1 = createOperationDescriptor( gqlRefetchQuery, refetchVariables1, { force: true }, ); // Assert we suspend on intial refetch request expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery1, refetchVariables: refetchVariables1, }); // Call refetch a second time environment.execute.mockClear(); environment.retain.mockClear(); const refetchVariables2 = { id: '4', scale: 16 }; const refetchQuery2 = createOperationDescriptor( gqlRefetchQuery, refetchVariables2, { force: true }, ); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy: 'network-only', UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert we suspend on the second refetch request expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery2, refetchVariables: refetchVariables2, }); // Switch back to initial refetch environment.retain.mockClear(); TestRenderer.act(() => { refetch( { id: '1' }, { fetchPolicy: 'network-only', UNSTABLE_renderPolicy: renderPolicy }, ); }); // Mock response for second refetch request TestRenderer.act(() => { environment.mock.resolve(refetchQuery2, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert that we are still suspended the initial refetch request // since that one hasn't resolved and that's the latest one we want // to render expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery1, refetchVariables: refetchVariables1, }); // Mock response for initial refetch request TestRenderer.act(() => { environment.mock.resolve(refetchQuery1, { data: { node: { __typename: 'User', id: '1', name: 'User 1', profile_picture: { uri: 'scale16', }, username: 'user1', }, }, }); }); // Assert component is rendered with data from second request const refetchedUser = { id: '1', name: 'User 1', profile_picture: { uri: 'scale16' }, ...createFragmentRef('1', refetchQuery1), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); expect(fetchSpy).toBeCalledTimes(3); }); it('does not dispose ongoing request if refetch is called again', () => { const renderer = renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '1' }, { fetchPolicy: 'store-and-network', UNSTABLE_renderPolicy: renderPolicy, }, ); }); // Assert request is started const refetchVariables1 = { id: '1', scale: 16 }; const refetchQuery1 = createOperationDescriptor( gqlRefetchQuery, refetchVariables1, { force: true }, ); expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables: refetchVariables1, }); // Component renders immediately even though request is in flight // since data is cached const refetchingUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', refetchQuery1), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchingUser }]); // Call refetch a second time environment.execute.mockClear(); const refetchVariables2 = { id: '4', scale: 16 }; TestRenderer.act(() => { refetch({ id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }); }); // Assert first request is not canceled expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables: refetchVariables1, }); // Assert second request is started expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables: refetchVariables2, }); // Assert component suspended expect(renderSpy).toBeCalledTimes(0); expect(renderer.toJSON()).toEqual('Fallback'); expect(fetchSpy).toBeCalledTimes(2); }); */ }); describe('fetchPolicy', () => { describe('store-or-network', () => { beforeEach(() => { fetchPolicy = 'store-or-network'; }); describe('renderPolicy: partial', () => { beforeEach(() => { renderPolicy = 'partial'; }); it("doesn't start network request if refetch query is fully cached", () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '1' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert request is not started const refetchVariables = { ...variables }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); expectRequestIsInFlight({ inFlight: false, requestCount: 0, gqlRefetchQuery, refetchVariables, }); // Assert component renders immediately since data is cached const refetchedUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), //...createFragmentRef('1', refetchQuery), //original relay }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay }); it('starts network request if refetch query is not fully cached and suspends if fragment has missing data', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); }); it("starts network request if refetch query is not fully cached and doesn't suspend if fragment doesn't have missing data", () => { // Cache user with missing username const refetchVariables = { id: '4', scale: 16 }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); environment.commitPayload(refetchQuery, { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: null, }, }); renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert request is started expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables, }); // Assert component renders immediately since data is cached const refetchedUser = { id: '4', name: 'Mark', profile_picture: null, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); }); }); describe('renderPolicy: full', () => { beforeEach(() => { renderPolicy = 'full'; }); it("doesn't start network request if refetch query is fully cached", () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '1' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert request is not started const refetchVariables = { ...variables }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); expectRequestIsInFlight({ inFlight: false, requestCount: 0, gqlRefetchQuery, refetchVariables, }); // Assert component renders immediately since data is cached const refetchedUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), //...createFragmentRef('1', refetchQuery), //original relay }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay }); it('starts network request if refetch query is not fully cached and suspends if fragment has missing data', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); }); it("starts network request if refetch query is not fully cached and suspends even if fragment doesn't have missing data", () => { // Cache user with missing username const refetchVariables = { id: '4', scale: 16 }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); environment.commitPayload(refetchQuery, { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: null, }, }); const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); }); }); }); describe('store-and-network', () => { beforeEach(() => { fetchPolicy = 'store-and-network'; }); describe('renderPolicy: partial', () => { beforeEach(() => { renderPolicy = 'partial'; }); it('starts network request if refetch query is fully cached', () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '1' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert request is not started const refetchVariables = { ...variables }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables, }); // Assert component renders immediately since data is cached const refetchingUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), //...createFragmentRef('1', refetchQuery), //original relay }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchingUser }]); }); it('starts network request if refetch query is not fully cached and suspends if fragment has missing data', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); }); it("starts network request if refetch query is not fully cached and doesn't suspend if fragment doesn't have missing data", () => { // Cache user with missing username const refetchVariables = { id: '4', scale: 16 }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); environment.commitPayload(refetchQuery, { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: null, }, }); renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert request is started expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables, }); // Assert component renders immediately since data is cached const refetchingUser = { id: '4', name: 'Mark', profile_picture: null, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchingUser }]); }); }); describe('renderPolicy: full', () => { beforeEach(() => { renderPolicy = 'full'; }); it('starts network request if refetch query is fully cached', () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '1' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert request is not started const refetchVariables = { ...variables }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables, }); // Assert component renders immediately since data is cached const refetchingUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), //...createFragmentRef('1', refetchQuery), //original relay }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchingUser }]); }); it('starts network request if refetch query is not fully cached and suspends if fragment has missing data', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); }); it("starts network request if refetch query is not fully cached and doesn't suspend if fragment doesn't have missing data", () => { // Cache user with missing username const refetchVariables = { id: '4', scale: 16 }; refetchQuery = createOperationDescriptor( gqlRefetchQuery, refetchVariables, { force: true }, ); environment.commitPayload(refetchQuery, { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: null, }, }); const renderer = renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch( { id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert component suspended expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); }); }); }); describe('network-only', () => { beforeEach(() => { fetchPolicy = 'network-only'; }); it('starts network request and suspends if refetch query is fully cached', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch({ id: '1' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { ...variables, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '1', name: 'Alice Update', profile_picture: null, username: 'useralice', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { ...initialUser, ...createFragmentRef('1', query), // original relay refetchQuery name: 'Alice Update', }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); }); it('starts network request and suspends if refetch query is not fully cached', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch({ id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); }); }); describe('store-only', () => { beforeEach(() => { fetchPolicy = 'store-only'; }); it("doesn't start network request if refetch query is fully cached", () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch({ id: '1' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }); }); // Assert request is not started const refetchVariables = { ...variables }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectRequestIsInFlight({ inFlight: false, requestCount: 0, gqlRefetchQuery, refetchVariables, }); // Assert component renders immediately since data is cached const refetchingUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), //...createFragmentRef('1', refetchQuery), //original relay }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay }); it("doesn't start network request if refetch query is not fully cached", () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch({ id: '4' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }); }); // Assert request is not started const refetchVariables = { id: '4', scale: 32 }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectRequestIsInFlight({ inFlight: false, requestCount: 0, gqlRefetchQuery, refetchVariables, }); // Assert component renders immediately with empty daa //expectFragmentResults([{ data: null }, { data: null }]); original relay expectFragmentResults([{ data: null }]); }); }); }); describe('disposing', () => { const fetchPolicy = 'store-and-network'; afterEach(() => { jest.dontMock('relay-runtime'); }); /* it('disposes ongoing request if environment changes', () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch({ id: '1' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }); }); // Assert request is started const refetchVariables = { id: '1', scale: 16 }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables, }); // Component renders immediately even though request is in flight // since data is cached const refetchingUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchingUser }]); // Set new environment const newEnvironment = createMockEnvironment(); newEnvironment.commitPayload(query, { node: { __typename: 'User', id: '1', name: 'Alice in a different env', username: 'useralice', profile_picture: null, }, }); TestRenderer.act(() => { setEnvironment(newEnvironment); }); // Assert request was canceled expect(unsubscribe).toBeCalledTimes(1); expectRequestIsInFlight({ inFlight: false, requestCount: 1, gqlRefetchQuery, refetchVariables, }); // Assert newly rendered data const expectedUser = { id: '1', name: 'Alice in a different env', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([ { data: expectedUser }, { data: expectedUser }, { data: expectedUser }, ]); }); */ it('disposes ongoing request if fragment ref changes', () => { renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch({ id: '1' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }); }); // Assert request is started const refetchVariables = { id: '1', scale: 16 }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables, }); // Component renders immediately even though request is in flight // since data is cached const refetchingUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), //...createFragmentRef('1', newQuery), original relay }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchingUser }]); // Pass new parent fragment ref with different variables const newVariables = { ...variables, scale: 32 }; const newQuery = createOperationDescriptor(gqlQuery, newVariables, { force: true, }); environment.commitPayload(newQuery, { node: { __typename: 'User', id: '1', name: 'Alice', username: 'useralice', profile_picture: { uri: 'uri32', }, }, }); TestRenderer.act(() => { setOwner(newQuery); }); // Assert request was canceled expect(unsubscribe).toBeCalledTimes(1); expectRequestIsInFlight({ inFlight: false, requestCount: 1, gqlRefetchQuery, refetchVariables, }); // Assert newly rendered data const expectedUser = { id: '1', name: 'Alice', profile_picture: { uri: 'uri32', }, ...createFragmentRef('1', newQuery), }; /*expectFragmentResults([ original relay { data: expectedUser }, //{ data: expectedUser }, { data: expectedUser }, ]);*/ expectFragmentResults([{ data: expectedUser }]); }); it('disposes of ongoing request on unmount when refetch suspends', () => { const renderer = renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch({ id: '2' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }); }); // Assert request is started const refetchVariables = { id: '2', scale: 16 }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); TestRenderer.act(() => { renderer.unmount(); }); // Assert request was canceled expect(unsubscribe).toBeCalledTimes(1); expectRequestIsInFlight({ inFlight: false, requestCount: 1, gqlRefetchQuery, refetchVariables, }); }); it('disposes of ongoing request on unmount when refetch does not suspend', () => { const renderer = renderFragment(); renderSpy.mockClear(); TestRenderer.act(() => { refetch({ id: '1' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }); }); // Assert request is started const refetchVariables = { id: '1', scale: 16 }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables, }); // Component renders immediately even though request is in flight // since data is cached const refetchingUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), //...createFragmentRef('1', refetchQuery), original relay }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchingUser }]); TestRenderer.act(() => { renderer.unmount(); }); // Assert request was canceled expect(unsubscribe).toBeCalledTimes(1); // original relay 2 expectRequestIsInFlight({ inFlight: false, requestCount: 1, gqlRefetchQuery, refetchVariables, }); }); it('disposes ongoing request if it is manually disposed', () => { renderFragment(); renderSpy.mockClear(); let disposable; TestRenderer.act(() => { disposable = refetch( { id: '1' }, { fetchPolicy, UNSTABLE_renderPolicy: renderPolicy }, ); }); // Assert request is started const refetchVariables = { id: '1', scale: 16 }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectRequestIsInFlight({ inFlight: true, requestCount: 1, gqlRefetchQuery, refetchVariables, }); // Component renders immediately even though request is in flight // since data is cached const refetchingUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), //...createFragmentRef('1', refetchQuery), original relay }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchingUser }]); disposable && disposable.dispose(); // Assert request was canceled expect(unsubscribe).toBeCalledTimes(1); expectRequestIsInFlight({ inFlight: false, requestCount: 1, gqlRefetchQuery, refetchVariables, }); }); }); describe('refetching @fetchable types', () => { beforeEach(() => { const generated = generateAndCompile( ` fragment UserFragment on NonNodeStory @refetchable(queryName: "UserFragmentRefetchQuery") { actor { name } } query UserQuery($id: ID!) { nonNodeStory(id: $id) { ...UserFragment } } `, ); variables = { id: 'a' }; gqlQuery = generated.UserQuery; gqlRefetchQuery = generated.UserFragmentRefetchQuery; gqlFragment = generated.UserFragment; invariant( gqlFragment.metadata?.refetch?.operation === '@@MODULE_START@@UserFragmentRefetchQuery.graphql@@MODULE_END@@', 'useRefetchableFragment-test: Expected refetchable fragment metadata to contain operation.', ); // Manually set the refetchable operation for the test. gqlFragment.metadata.refetch.operation = gqlRefetchQuery; refetchQuery = createOperationDescriptor(gqlRefetchQuery, variables, { force: true, }); query = createOperationDescriptor(gqlQuery, variables, { force: true }); environment.commitPayload(query, { nonNodeStory: { __typename: 'NonNodeStory', id: 'a', actor: { name: 'Alice', __typename: 'User', id: '1' }, fetch_id: 'fetch:a', }, }); }); it('refetches new variables correctly when refetching new id', () => { const renderer = renderFragment(); const initialUser = { actor: { name: 'Alice' }, fetch_id: 'fetch:a', }; expectFragmentResults([ { data: initialUser, }, ]); TestRenderer.act(() => { refetch({ id: 'fetch:b' }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: 'fetch:b', }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { fetch__NonNodeStory: { __typename: 'NonNodeStory', id: 'b', actor: { name: 'Mark', __typename: 'User', id: '4' }, fetch_id: 'fetch:b', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { actor: { name: 'Mark' }, fetch_id: 'fetch:b', }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); }); it('refetches new variables correctly when refetching same id', () => { const renderer = renderFragment(); const initialUser = { actor: { name: 'Alice' }, fetch_id: 'fetch:a', }; expectFragmentResults([ { data: initialUser, }, ]); TestRenderer.act(() => { refetch({}, { fetchPolicy: 'network-only' }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: 'fetch:a', }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { fetch__NonNodeStory: { __typename: 'NonNodeStory', id: 'a', actor: { name: 'Alice (updated)', __typename: 'User', id: '1' }, fetch_id: 'fetch:a', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { actor: { name: 'Alice (updated)' }, fetch_id: 'fetch:a', }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); }); it('refetches new variables correctly when refetching after the id from the parent has changed', () => { // add data for second query const query2 = createOperationDescriptor( gqlQuery, { id: 'b', }, { force: true }, ); environment.commitPayload(query2, { nonNodeStory: { __typename: 'NonNodeStory', id: 'b', actor: { name: 'Zuck', __typename: 'User', id: '4' }, fetch_id: 'fetch:b', }, }); const renderer = renderFragment(); const initialUser = { actor: { name: 'Alice' }, fetch_id: 'fetch:a', }; expectFragmentResults([ { data: initialUser, }, ]); TestRenderer.act(() => { setOwner(query2); }); const nextUser = { actor: { name: 'Zuck' }, fetch_id: 'fetch:b', }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: nextUser }]); TestRenderer.act(() => { refetch({}, { fetchPolicy: 'network-only' }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: 'fetch:b', }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { fetch__NonNodeStory: { __typename: 'NonNodeStory', id: 'b', actor: { name: 'Zuck (updated)', __typename: 'User', id: '4' }, fetch_id: 'fetch:b', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { actor: { name: 'Zuck (updated)' }, fetch_id: 'fetch:b', }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); }); }); describe('when id variable has a different variable name in original query', () => { beforeEach(() => { const generated = generateAndCompile( ` fragment NestedUserFragment on User { username } fragment UserFragment on User @refetchable(queryName: "UserFragmentRefetchQuery") { id name profile_picture(scale: $scale) { uri } ...NestedUserFragment } query UserQuery($nodeID: ID!, $scale: Int!) { node(id: $nodeID) { ...UserFragment } } `, ); variables = { nodeID: '1', scale: 16 }; gqlQuery = generated.UserQuery; gqlRefetchQuery = generated.UserFragmentRefetchQuery; gqlFragment = generated.UserFragment; invariant( gqlFragment.metadata?.refetch?.operation === '@@MODULE_START@@UserFragmentRefetchQuery.graphql@@MODULE_END@@', 'useRefetchableFragment-test: Expected refetchable fragment metadata to contain operation.', ); // Manually set the refetchable operation for the test. gqlFragment.metadata.refetch.operation = gqlRefetchQuery; query = createOperationDescriptor(gqlQuery, variables, { force: true }); refetchQuery = createOperationDescriptor(gqlRefetchQuery, variables, { force: true, }); environment.commitPayload(query, { node: { __typename: 'User', id: '1', name: 'Alice', username: 'useralice', profile_picture: null, }, }); }); it('refetches new variables correctly when refetching new id', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([ { data: initialUser, }, ]); TestRenderer.act(() => { refetch({ id: '4' }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '4', scale: 16, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, username: 'usermark', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '4', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('4', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); }); it('refetches new variables correctly when refetching same id', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; expectFragmentResults([ { data: initialUser, }, ]); TestRenderer.act(() => { refetch({ scale: 32 }); }); // Assert that fragment is refetching with the right variables and // suspends upon refetch const refetchVariables = { id: '1', scale: 32, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); expectFragmentIsRefetching(renderer, { refetchVariables, refetchQuery, }); // Mock network response TestRenderer.act(() => { environment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, username: 'useralice', }, }, }); }); // Assert fragment is rendered with new data const refetchedUser = { id: '1', name: 'Alice', profile_picture: { uri: 'scale32', }, ...createFragmentRef('1', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Assert refetch query was retained expect(release).not.toBeCalled(); expect(environment.retain).toBeCalledTimes(1); expect(environment.retain.mock.calls[0][0]).toEqual(refetchQuery); }); }); describe('internal environment option', () => { let newRelease; let newEnvironment; beforeEach(() => { ({ createMockEnvironment } = require('relay-test-utils-internal')); newEnvironment = createMockEnvironment(); newRelease = jest.fn(); newEnvironment.retain.mockImplementation((...args) => { return { dispose: newRelease, }; }); }); /* it('reloads new data into new environment, and renders successfully', () => { const renderer = renderFragment(); const initialUser = { id: '1', name: 'Alice', profile_picture: null, ...createFragmentRef('1', query), }; // initial data on default environment expectFragmentResults([{ data: initialUser }]); TestRenderer.act(() => { refetch( { id: '1' }, { __environment: newEnvironment, }, ); }); const refetchVariables = { id: '1', scale: 16, }; refetchQuery = createOperationDescriptor(gqlRefetchQuery, refetchVariables, { force: true, }); // Fetch on newEnvironment expectFragmentIsRefetching( renderer, { refetchVariables, refetchQuery, }, newEnvironment, ); TestRenderer.act(() => { newEnvironment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '1', name: 'Mark', username: 'usermark', profile_picture: { uri: 'scale16', }, }, }, }); }); TestRenderer.act(() => jest.runAllImmediates()); // Data should be loaded on the newEnvironment const dataInSource = { __id: '1', __typename: 'User', 'profile_picture(scale:16)': { __ref: 'client:1:profile_picture(scale:16)', }, id: '1', name: 'Mark', username: 'usermark', }; const source = newEnvironment.getStore().getSource(); expect(source.get('1')).toEqual(dataInSource); // Assert refetch query was retained expect(newRelease).not.toBeCalled(); expect(newEnvironment.retain).toBeCalledTimes(1); expect(newEnvironment.retain.mock.calls[0][0]).toEqual(refetchQuery); // Should be able to use the new data if switched to new environment renderSpy.mockClear(); newRelease.mockClear(); TestRenderer.act(() => { setEnvironment(newEnvironment); }); // refetch on the same newEnvironment after switching should not be reset expect(release).not.toBeCalled(); const refetchedUser = { id: '1', name: 'Mark', profile_picture: { uri: 'scale16', }, ...createFragmentRef('1', refetchQuery), }; // expectFragmentResults([{ data: refetchedUser }, { data: refetchedUser }]); original relay expectFragmentResults([{ data: refetchedUser }]); // Refetch on another enironment afterwards should work renderSpy.mockClear(); environment.execute.mockClear(); const anotherNewEnvironment = createMockEnvironment(); TestRenderer.act(() => jest.runAllImmediates()); TestRenderer.act(() => { refetch( { id: '1' }, { __environment: anotherNewEnvironment, }, ); }); expectFragmentIsRefetching( renderer, { refetchVariables, refetchQuery, }, anotherNewEnvironment, ); TestRenderer.act(() => { anotherNewEnvironment.mock.resolve(gqlRefetchQuery, { data: { node: { __typename: 'User', id: '1', name: 'Mark', username: 'usermark', profile_picture: { uri: 'scale16', }, }, }, }); }); expect( anotherNewEnvironment .getStore() .getSource() .get('1'), ).toEqual(dataInSource); });*/ }); }); });
the_stack
* Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. */ import { ActionTypes, Activity, ActivityTypes, CardFactory, Channels, CoreAppCredentials, InputHints, MessageFactory, OAuthCard, OAuthLoginTimeoutKey, StatusCodes, TokenExchangeInvokeRequest, TokenResponse, TurnContext, tokenExchangeOperationName, tokenResponseEventName, verifyStateOperationName, } from 'botbuilder-core'; import * as UserTokenAccess from './userTokenAccess'; import { ClaimsIdentity, JwtTokenValidation, SkillValidation } from 'botframework-connector'; import { Dialog, DialogTurnResult } from '../dialog'; import { DialogContext } from '../dialogContext'; import { PromptOptions, PromptRecognizerResult, PromptValidator } from './prompt'; /** * Response body returned for a token exchange invoke activity. */ class TokenExchangeInvokeResponse { id: string; connectionName: string; failureDetail: string; constructor(id: string, connectionName: string, failureDetail: string) { this.id = id; this.connectionName = connectionName; this.failureDetail = failureDetail; } } /** * Settings used to configure an `OAuthPrompt` instance. */ export interface OAuthPromptSettings { /** * AppCredentials for OAuth. */ oAuthAppCredentials?: CoreAppCredentials; /** * Name of the OAuth connection being used. */ connectionName: string; /** * Title of the cards signin button. */ title: string; /** * (Optional) additional text to include on the signin card. */ text?: string; /** * (Optional) number of milliseconds the prompt will wait for the user to authenticate. * Defaults to a value `900,000` (15 minutes.) */ timeout?: number; /** * (Optional) value indicating whether the OAuthPrompt should end upon * receiving an invalid message. Generally the OAuthPrompt will ignore * incoming messages from the user during the auth flow, if they are not related to the * auth flow. This flag enables ending the OAuthPrompt rather than * ignoring the user's message. Typically, this flag will be set to 'true', but is 'false' * by default for backwards compatibility. */ endOnInvalidMessage?: boolean; /** * (Optional) value to force the display of a Sign In link overriding the default behavior. * True to display the SignInLink. */ showSignInLink?: boolean; } /** * Creates a new prompt that asks the user to sign in using the Bot Frameworks Single Sign On (SSO) * service. * * @remarks * The prompt will attempt to retrieve the users current token and if the user isn't signed in, it * will send them an `OAuthCard` containing a button they can press to signin. Depending on the * channel, the user will be sent through one of two possible signin flows: * * - The automatic signin flow where once the user signs in and the SSO service will forward the bot * the users access token using either an `event` or `invoke` activity. * - The "magic code" flow where where once the user signs in they will be prompted by the SSO * service to send the bot a six digit code confirming their identity. This code will be sent as a * standard `message` activity. * * Both flows are automatically supported by the `OAuthPrompt` and the only thing you need to be * careful of is that you don't block the `event` and `invoke` activities that the prompt might * be waiting on. * * > [!NOTE] * > You should avoid persisting the access token with your bots other state. The Bot Frameworks * > SSO service will securely store the token on your behalf. If you store it in your bots state * > it could expire or be revoked in between turns. * > * > When calling the prompt from within a waterfall step you should use the token within the step * > following the prompt and then let the token go out of scope at the end of your function. * * #### Prompt Usage * * When used with your bots `DialogSet` you can simply add a new instance of the prompt as a named * dialog using `DialogSet.add()`. You can then start the prompt from a waterfall step using either * `DialogContext.beginDialog()` or `DialogContext.prompt()`. The user will be prompted to signin as * needed and their access token will be passed as an argument to the callers next waterfall step: * * ```JavaScript * const { ConversationState, MemoryStorage, OAuthLoginTimeoutMsValue } = require('botbuilder'); * const { DialogSet, OAuthPrompt, WaterfallDialog } = require('botbuilder-dialogs'); * * const convoState = new ConversationState(new MemoryStorage()); * const dialogState = convoState.createProperty('dialogState'); * const dialogs = new DialogSet(dialogState); * * dialogs.add(new OAuthPrompt('loginPrompt', { * connectionName: 'GitConnection', * title: 'Login To GitHub', * timeout: OAuthLoginTimeoutMsValue // User has 15 minutes to login * })); * * dialogs.add(new WaterfallDialog('taskNeedingLogin', [ * async (step) => { * return await step.beginDialog('loginPrompt'); * }, * async (step) => { * const token = step.result; * if (token) { * * // ... continue with task needing access token ... * * } else { * await step.context.sendActivity(`Sorry... We couldn't log you in. Try again later.`); * return await step.endDialog(); * } * } * ])); * ``` */ export class OAuthPrompt extends Dialog { private readonly PersistedCaller: string = 'botbuilder-dialogs.caller'; /** * Creates a new OAuthPrompt instance. * @param dialogId Unique ID of the dialog within its parent `DialogSet` or `ComponentDialog`. * @param settings Settings used to configure the prompt. * @param validator (Optional) validator that will be called each time the user responds to the prompt. */ constructor( dialogId: string, private settings: OAuthPromptSettings, private validator?: PromptValidator<TokenResponse> ) { super(dialogId); } /** * Called when a prompt dialog is pushed onto the dialog stack and is being activated. * * @param dc The [DialogContext](xref:botbuilder-dialogs.DialogContext) for the current * turn of the conversation. * @param options Optional. [PromptOptions](xref:botbuilder-dialogs.PromptOptions), * additional information to pass to the prompt being started. * @returns A `Promise` representing the asynchronous operation. * @remarks * If the task is successful, the result indicates whether the prompt is still * active after the turn has been processed by the prompt. */ async beginDialog(dc: DialogContext, options?: PromptOptions): Promise<DialogTurnResult> { // Ensure prompts have input hint set const o: Partial<PromptOptions> = { ...options }; if (o.prompt && typeof o.prompt === 'object' && typeof o.prompt.inputHint !== 'string') { o.prompt.inputHint = InputHints.AcceptingInput; } if (o.retryPrompt && typeof o.retryPrompt === 'object' && typeof o.retryPrompt.inputHint !== 'string') { o.retryPrompt.inputHint = InputHints.AcceptingInput; } // Initialize prompt state const timeout = typeof this.settings.timeout === 'number' ? this.settings.timeout : 900000; const state = dc.activeDialog.state as OAuthPromptState; state.state = {}; state.options = o; state.expires = new Date().getTime() + timeout; state[this.PersistedCaller] = OAuthPrompt.createCallerInfo(dc.context); // Attempt to get the users token const output = await UserTokenAccess.getUserToken(dc.context, this.settings, undefined); if (output) { // Return token return await dc.endDialog(output); } // Prompt user to login await OAuthPrompt.sendOAuthCard(this.settings, dc.context, state.options.prompt); return Dialog.EndOfTurn; } /** * Called when a prompt dialog is the active dialog and the user replied with a new activity. * @param dc The [DialogContext](xref:botbuilder-dialogs.DialogContext) for the current turn * of the conversation. * @returns A `Promise` representing the asynchronous operation. * @remarks * If the task is successful, the result indicates whether the dialog is still * active after the turn has been processed by the dialog. * The prompt generally continues to receive the user's replies until it accepts the * user's reply as valid input for the prompt. */ async continueDialog(dc: DialogContext): Promise<DialogTurnResult> { // Check for timeout const state: OAuthPromptState = dc.activeDialog.state as OAuthPromptState; const isMessage: boolean = dc.context.activity.type === ActivityTypes.Message; const isTimeoutActivityType: boolean = isMessage || OAuthPrompt.isTokenResponseEvent(dc.context) || OAuthPrompt.isTeamsVerificationInvoke(dc.context) || OAuthPrompt.isTokenExchangeRequestInvoke(dc.context); // If the incoming Activity is a message, or an Activity Type normally handled by OAuthPrompt, // check to see if this OAuthPrompt Expiration has elapsed, and end the dialog if so. const hasTimedOut: boolean = isTimeoutActivityType && new Date().getTime() > state.expires; if (hasTimedOut) { return await dc.endDialog(undefined); } else { // Recognize token const recognized: PromptRecognizerResult<TokenResponse> = await this.recognizeToken(dc); if (state.state['attemptCount'] === undefined) { state.state['attemptCount'] = 0; } // Validate the return value let isValid = false; if (this.validator) { isValid = await this.validator({ context: dc.context, recognized: recognized, state: state.state, options: state.options, attemptCount: ++state.state['attemptCount'], }); } else if (recognized.succeeded) { isValid = true; } // Return recognized value or re-prompt if (isValid) { return await dc.endDialog(recognized.value); } if (isMessage && this.settings.endOnInvalidMessage) { return await dc.endDialog(undefined); } // Send retry prompt if (!dc.context.responded && isMessage && state.options.retryPrompt) { await dc.context.sendActivity(state.options.retryPrompt); } return Dialog.EndOfTurn; } } /** * Attempts to retrieve the stored token for the current user. * @param context Context reference the user that's being looked up. * @param code (Optional) login code received from the user. */ async getUserToken(context: TurnContext, code?: string): Promise<TokenResponse | undefined> { return UserTokenAccess.getUserToken(context, this.settings, code); } /** * Signs the user out of the service. * * @remarks * This example shows creating an instance of the prompt and then signing out the user. * * ```JavaScript * const prompt = new OAuthPrompt({ * connectionName: 'GitConnection', * title: 'Login To GitHub' * }); * await prompt.signOutUser(context); * ``` * @param context Context referencing the user that's being signed out. */ async signOutUser(context: TurnContext): Promise<void> { return UserTokenAccess.signOutUser(context, this.settings); } /** * Sends an OAuth card. * * @param {OAuthPromptSettings} settings OAuth settings. * @param {TurnContext} turnContext Turn context. * @param {string | Partial<Activity>} prompt Message activity. */ static async sendOAuthCard( settings: OAuthPromptSettings, turnContext: TurnContext, prompt?: string | Partial<Activity> ): Promise<void> { // Initialize outgoing message const msg: Partial<Activity> = typeof prompt === 'object' ? { ...prompt } : MessageFactory.text(prompt, undefined, InputHints.AcceptingInput); if (!Array.isArray(msg.attachments)) { msg.attachments = []; } // Append appropriate card if missing if (!this.isOAuthCardSupported(turnContext)) { if (!msg.attachments.some((a) => a.contentType === CardFactory.contentTypes.signinCard)) { const signInResource = await UserTokenAccess.getSignInResource(turnContext, settings); msg.attachments.push(CardFactory.signinCard(settings.title, signInResource.signInLink, settings.text)); } } else if (!msg.attachments.some((a) => a.contentType === CardFactory.contentTypes.oauthCard)) { let cardActionType = ActionTypes.Signin; const signInResource = await UserTokenAccess.getSignInResource(turnContext, settings); let link = signInResource.signInLink; const identity = turnContext.turnState.get<ClaimsIdentity>(turnContext.adapter.BotIdentityKey); // use the SignInLink when // in speech channel or // bot is a skill or // an extra OAuthAppCredentials is being passed in if ( OAuthPrompt.isFromStreamingConnection(turnContext.activity) || (identity && SkillValidation.isSkillClaim(identity.claims)) || settings.oAuthAppCredentials ) { if (turnContext.activity.channelId === Channels.Emulator) { cardActionType = ActionTypes.OpenUrl; } } else if ( settings.showSignInLink === false || (!settings.showSignInLink && !this.channelRequiresSignInLink(turnContext.activity.channelId)) ) { link = undefined; } // Append oauth card const card = CardFactory.oauthCard( settings.connectionName, settings.title, settings.text, link, signInResource.tokenExchangeResource ); // Set the appropriate ActionType for the button. (card.content as OAuthCard).buttons[0].type = cardActionType; msg.attachments.push(card); } // Add the login timeout specified in OAuthPromptSettings to TurnState so it can be referenced if polling is needed if (!turnContext.turnState.get(OAuthLoginTimeoutKey) && settings.timeout) { turnContext.turnState.set(OAuthLoginTimeoutKey, settings.timeout); } // Set input hint if (!msg.inputHint) { msg.inputHint = InputHints.AcceptingInput; } // Send prompt await turnContext.sendActivity(msg); } /** * Shared implementation of the RecognizeTokenAsync function. This is intended for internal use, to consolidate * the implementation of the OAuthPrompt and OAuthInput. Application logic should use those dialog classes. * * @param dc The [DialogContext](xref:botbuilder-dialogs.DialogContext) for the current turn of the conversation. * @returns A Promise that resolves to the result */ async recognizeToken(dc: DialogContext): Promise<PromptRecognizerResult<TokenResponse>> { const context = dc.context; let token: TokenResponse | undefined; if (OAuthPrompt.isTokenResponseEvent(context)) { token = context.activity.value as TokenResponse; // Fix-up the DialogContext's state context if this was received from a skill host caller. const state: CallerInfo = dc.activeDialog.state[this.PersistedCaller]; if (state) { // Set the ServiceUrl to the skill host's Url context.activity.serviceUrl = state.callerServiceUrl; const claimsIdentity = context.turnState.get<ClaimsIdentity>(context.adapter.BotIdentityKey); const connectorClient = await UserTokenAccess.createConnectorClient( context, context.activity.serviceUrl, claimsIdentity, state.scope ); context.turnState.set(context.adapter.ConnectorClientKey, connectorClient); } } else if (OAuthPrompt.isTeamsVerificationInvoke(context)) { const magicCode = context.activity.value.state; try { token = await UserTokenAccess.getUserToken(context, this.settings, magicCode); if (token) { await context.sendActivity({ type: 'invokeResponse', value: { status: StatusCodes.OK } }); } else { await context.sendActivity({ type: 'invokeResponse', value: { status: 404 } }); } } catch (_err) { await context.sendActivity({ type: 'invokeResponse', value: { status: 500 } }); } } else if (OAuthPrompt.isTokenExchangeRequestInvoke(context)) { // Received activity is not a token exchange request if (!(context.activity.value && OAuthPrompt.isTokenExchangeRequest(context.activity.value))) { await context.sendActivity( this.getTokenExchangeInvokeResponse( StatusCodes.BAD_REQUEST, 'The bot received an InvokeActivity that is missing a TokenExchangeInvokeRequest value. This is required to be sent with the InvokeActivity.' ) ); } else if (context.activity.value.connectionName != this.settings.connectionName) { // Connection name on activity does not match that of setting await context.sendActivity( this.getTokenExchangeInvokeResponse( StatusCodes.BAD_REQUEST, 'The bot received an InvokeActivity with a TokenExchangeInvokeRequest containing a ConnectionName that does not match the ConnectionName' + 'expected by the bots active OAuthPrompt. Ensure these names match when sending the InvokeActivityInvalid ConnectionName in the TokenExchangeInvokeRequest' ) ); } else { let tokenExchangeResponse: TokenResponse; try { tokenExchangeResponse = await UserTokenAccess.exchangeToken(context, this.settings, { token: context.activity.value.token, }); } catch (_err) { // Ignore errors. // If the token exchange failed for any reason, the tokenExchangeResponse stays undefined // and we send back a failure invoke response to the caller. } if (!tokenExchangeResponse || !tokenExchangeResponse.token) { await context.sendActivity( this.getTokenExchangeInvokeResponse( StatusCodes.PRECONDITION_FAILED, 'The bot is unable to exchange token. Proceed with regular login.' ) ); } else { await context.sendActivity( this.getTokenExchangeInvokeResponse(StatusCodes.OK, null, context.activity.value.id) ); token = { channelId: tokenExchangeResponse.channelId, connectionName: tokenExchangeResponse.connectionName, token: tokenExchangeResponse.token, expiration: null, }; } } } else if (context.activity.type === ActivityTypes.Message) { const [, magicCode] = /(\d{6})/.exec(context.activity.text) ?? []; if (magicCode) { token = await UserTokenAccess.getUserToken(context, this.settings, magicCode); } } return token !== undefined ? { succeeded: true, value: token } : { succeeded: false }; } /** * @private */ private static createCallerInfo(context: TurnContext) { const botIdentity = context.turnState.get<ClaimsIdentity>(context.adapter.BotIdentityKey); if (botIdentity && SkillValidation.isSkillClaim(botIdentity.claims)) { return { callerServiceUrl: context.activity.serviceUrl, scope: JwtTokenValidation.getAppIdFromClaims(botIdentity.claims), }; } return null; } /** * @private */ private getTokenExchangeInvokeResponse(status: number, failureDetail: string, id?: string): Activity { const invokeResponse: Partial<Activity> = { type: 'invokeResponse', value: { status, body: new TokenExchangeInvokeResponse(id, this.settings.connectionName, failureDetail) }, }; return invokeResponse as Activity; } /** * @private */ private static isFromStreamingConnection(activity: Activity): boolean { return activity && activity.serviceUrl && !activity.serviceUrl.toLowerCase().startsWith('http'); } /** * @private */ private static isTokenResponseEvent(context: TurnContext): boolean { const activity: Activity = context.activity; return activity.type === ActivityTypes.Event && activity.name === tokenResponseEventName; } /** * @private */ private static isTeamsVerificationInvoke(context: TurnContext): boolean { const activity: Activity = context.activity; return activity.type === ActivityTypes.Invoke && activity.name === verifyStateOperationName; } /** * @private */ private static isOAuthCardSupported(context: TurnContext): boolean { // Azure Bot Service OAuth cards are not supported in the community adapters. Since community adapters // have a 'name' in them, we cast the adapter to 'any' to check for the name. const adapter: any = context.adapter; if (adapter.name) { switch (adapter.name) { case 'Facebook Adapter': case 'Google Hangouts Adapter': case 'Slack Adapter': case 'Twilio SMS Adapter': case 'Web Adapter': case 'Webex Adapter': case 'Botkit CMS': return false; default: } } return this.channelSupportsOAuthCard(context.activity.channelId); } /** * @private */ private static isTokenExchangeRequestInvoke(context: TurnContext): boolean { const activity: Activity = context.activity; return activity.type === ActivityTypes.Invoke && activity.name === tokenExchangeOperationName; } /** * @private */ private static isTokenExchangeRequest(obj: unknown): obj is TokenExchangeInvokeRequest { if (obj.hasOwnProperty('token')) { return true; } return false; } /** * @private */ private static channelSupportsOAuthCard(channelId: string): boolean { switch (channelId) { case Channels.Skype: case Channels.Skypeforbusiness: return false; default: } return true; } /** * @private */ private static channelRequiresSignInLink(channelId: string): boolean { switch (channelId) { case Channels.Msteams: return true; default: } return false; } } /** * @private */ interface OAuthPromptState { state: any; options: PromptOptions; expires: number; // Timestamp of when the prompt will timeout. } /** * @private */ interface CallerInfo { callerServiceUrl: string; scope: string; }
the_stack
import {Request} from '../lib/request'; import {Response} from '../lib/response'; import {AWSError} from '../lib/error'; import {Service} from '../lib/service'; import {ServiceConfigurationOptions} from '../lib/service'; import {ConfigBase as Config} from '../lib/config'; interface Blob {} declare class Athena extends Service { /** * Constructs a service object. This object has one method for each API operation. */ constructor(options?: Athena.Types.ClientConfiguration) config: Config & Athena.Types.ClientConfiguration; /** * Returns the details of a single named query or a list of up to 50 queries, which you provide as an array of query ID strings. Use ListNamedQueries to get the list of named query IDs. If information could not be retrieved for a submitted query ID, information about the query ID submitted is listed under UnprocessedNamedQueryId. Named queries are different from executed queries. Use BatchGetQueryExecution to get details about each unique query execution, and ListQueryExecutions to get a list of query execution IDs. */ batchGetNamedQuery(params: Athena.Types.BatchGetNamedQueryInput, callback?: (err: AWSError, data: Athena.Types.BatchGetNamedQueryOutput) => void): Request<Athena.Types.BatchGetNamedQueryOutput, AWSError>; /** * Returns the details of a single named query or a list of up to 50 queries, which you provide as an array of query ID strings. Use ListNamedQueries to get the list of named query IDs. If information could not be retrieved for a submitted query ID, information about the query ID submitted is listed under UnprocessedNamedQueryId. Named queries are different from executed queries. Use BatchGetQueryExecution to get details about each unique query execution, and ListQueryExecutions to get a list of query execution IDs. */ batchGetNamedQuery(callback?: (err: AWSError, data: Athena.Types.BatchGetNamedQueryOutput) => void): Request<Athena.Types.BatchGetNamedQueryOutput, AWSError>; /** * Returns the details of a single query execution or a list of up to 50 query executions, which you provide as an array of query execution ID strings. To get a list of query execution IDs, use ListQueryExecutions. Query executions are different from named (saved) queries. Use BatchGetNamedQuery to get details about named queries. */ batchGetQueryExecution(params: Athena.Types.BatchGetQueryExecutionInput, callback?: (err: AWSError, data: Athena.Types.BatchGetQueryExecutionOutput) => void): Request<Athena.Types.BatchGetQueryExecutionOutput, AWSError>; /** * Returns the details of a single query execution or a list of up to 50 query executions, which you provide as an array of query execution ID strings. To get a list of query execution IDs, use ListQueryExecutions. Query executions are different from named (saved) queries. Use BatchGetNamedQuery to get details about named queries. */ batchGetQueryExecution(callback?: (err: AWSError, data: Athena.Types.BatchGetQueryExecutionOutput) => void): Request<Athena.Types.BatchGetQueryExecutionOutput, AWSError>; /** * Creates a named query. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ createNamedQuery(params: Athena.Types.CreateNamedQueryInput, callback?: (err: AWSError, data: Athena.Types.CreateNamedQueryOutput) => void): Request<Athena.Types.CreateNamedQueryOutput, AWSError>; /** * Creates a named query. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ createNamedQuery(callback?: (err: AWSError, data: Athena.Types.CreateNamedQueryOutput) => void): Request<Athena.Types.CreateNamedQueryOutput, AWSError>; /** * Deletes a named query. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ deleteNamedQuery(params: Athena.Types.DeleteNamedQueryInput, callback?: (err: AWSError, data: Athena.Types.DeleteNamedQueryOutput) => void): Request<Athena.Types.DeleteNamedQueryOutput, AWSError>; /** * Deletes a named query. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ deleteNamedQuery(callback?: (err: AWSError, data: Athena.Types.DeleteNamedQueryOutput) => void): Request<Athena.Types.DeleteNamedQueryOutput, AWSError>; /** * Returns information about a single query. */ getNamedQuery(params: Athena.Types.GetNamedQueryInput, callback?: (err: AWSError, data: Athena.Types.GetNamedQueryOutput) => void): Request<Athena.Types.GetNamedQueryOutput, AWSError>; /** * Returns information about a single query. */ getNamedQuery(callback?: (err: AWSError, data: Athena.Types.GetNamedQueryOutput) => void): Request<Athena.Types.GetNamedQueryOutput, AWSError>; /** * Returns information about a single execution of a query. Each time a query executes, information about the query execution is saved with a unique ID. */ getQueryExecution(params: Athena.Types.GetQueryExecutionInput, callback?: (err: AWSError, data: Athena.Types.GetQueryExecutionOutput) => void): Request<Athena.Types.GetQueryExecutionOutput, AWSError>; /** * Returns information about a single execution of a query. Each time a query executes, information about the query execution is saved with a unique ID. */ getQueryExecution(callback?: (err: AWSError, data: Athena.Types.GetQueryExecutionOutput) => void): Request<Athena.Types.GetQueryExecutionOutput, AWSError>; /** * Returns the results of a single query execution specified by QueryExecutionId. This request does not execute the query but returns results. Use StartQueryExecution to run a query. */ getQueryResults(params: Athena.Types.GetQueryResultsInput, callback?: (err: AWSError, data: Athena.Types.GetQueryResultsOutput) => void): Request<Athena.Types.GetQueryResultsOutput, AWSError>; /** * Returns the results of a single query execution specified by QueryExecutionId. This request does not execute the query but returns results. Use StartQueryExecution to run a query. */ getQueryResults(callback?: (err: AWSError, data: Athena.Types.GetQueryResultsOutput) => void): Request<Athena.Types.GetQueryResultsOutput, AWSError>; /** * Provides a list of all available query IDs. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ listNamedQueries(params: Athena.Types.ListNamedQueriesInput, callback?: (err: AWSError, data: Athena.Types.ListNamedQueriesOutput) => void): Request<Athena.Types.ListNamedQueriesOutput, AWSError>; /** * Provides a list of all available query IDs. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ listNamedQueries(callback?: (err: AWSError, data: Athena.Types.ListNamedQueriesOutput) => void): Request<Athena.Types.ListNamedQueriesOutput, AWSError>; /** * Provides a list of all available query execution IDs. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ listQueryExecutions(params: Athena.Types.ListQueryExecutionsInput, callback?: (err: AWSError, data: Athena.Types.ListQueryExecutionsOutput) => void): Request<Athena.Types.ListQueryExecutionsOutput, AWSError>; /** * Provides a list of all available query execution IDs. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ listQueryExecutions(callback?: (err: AWSError, data: Athena.Types.ListQueryExecutionsOutput) => void): Request<Athena.Types.ListQueryExecutionsOutput, AWSError>; /** * Runs (executes) the SQL query statements contained in the Query string. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ startQueryExecution(params: Athena.Types.StartQueryExecutionInput, callback?: (err: AWSError, data: Athena.Types.StartQueryExecutionOutput) => void): Request<Athena.Types.StartQueryExecutionOutput, AWSError>; /** * Runs (executes) the SQL query statements contained in the Query string. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ startQueryExecution(callback?: (err: AWSError, data: Athena.Types.StartQueryExecutionOutput) => void): Request<Athena.Types.StartQueryExecutionOutput, AWSError>; /** * Stops a query execution. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ stopQueryExecution(params: Athena.Types.StopQueryExecutionInput, callback?: (err: AWSError, data: Athena.Types.StopQueryExecutionOutput) => void): Request<Athena.Types.StopQueryExecutionOutput, AWSError>; /** * Stops a query execution. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ stopQueryExecution(callback?: (err: AWSError, data: Athena.Types.StopQueryExecutionOutput) => void): Request<Athena.Types.StopQueryExecutionOutput, AWSError>; } declare namespace Athena { export interface BatchGetNamedQueryInput { /** * An array of query IDs. */ NamedQueryIds: NamedQueryIdList; } export interface BatchGetNamedQueryOutput { /** * Information about the named query IDs submitted. */ NamedQueries?: NamedQueryList; /** * Information about provided query IDs. */ UnprocessedNamedQueryIds?: UnprocessedNamedQueryIdList; } export interface BatchGetQueryExecutionInput { /** * An array of query execution IDs. */ QueryExecutionIds: QueryExecutionIdList; } export interface BatchGetQueryExecutionOutput { /** * Information about a query execution. */ QueryExecutions?: QueryExecutionList; /** * Information about the query executions that failed to run. */ UnprocessedQueryExecutionIds?: UnprocessedQueryExecutionIdList; } export type Boolean = boolean; export interface ColumnInfo { /** * The catalog to which the query results belong. */ CatalogName?: String; /** * The schema name (database name) to which the query results belong. */ SchemaName?: String; /** * The table name for the query results. */ TableName?: String; /** * The name of the column. */ Name: String; /** * A column label. */ Label?: String; /** * The data type of the column. */ Type: String; /** * For DECIMAL data types, specifies the total number of digits, up to 38. For performance reasons, we recommend up to 18 digits. */ Precision?: Integer; /** * For DECIMAL data types, specifies the total number of digits in the fractional part of the value. Defaults to 0. */ Scale?: Integer; /** * Indicates the column's nullable status. */ Nullable?: ColumnNullable; /** * Indicates whether values in the column are case-sensitive. */ CaseSensitive?: Boolean; } export type ColumnInfoList = ColumnInfo[]; export type ColumnNullable = "NOT_NULL"|"NULLABLE"|"UNKNOWN"|string; export interface CreateNamedQueryInput { /** * The plain language name for the query. */ Name: NameString; /** * A brief explanation of the query. */ Description?: DescriptionString; /** * The database to which the query belongs. */ Database: DatabaseString; /** * The text of the query itself. In other words, all query statements. */ QueryString: QueryString; /** * A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another CreateNamedQuery request is received, the same response is returned and another query is not created. If a parameter has changed, for example, the QueryString, an error is returned. This token is listed as not required because AWS SDKs (for example the AWS SDK for Java) auto-generate the token for users. If you are not using the AWS SDK or the AWS CLI, you must provide this token or the action will fail. */ ClientRequestToken?: IdempotencyToken; } export interface CreateNamedQueryOutput { /** * The unique ID of the query. */ NamedQueryId?: NamedQueryId; } export type DatabaseString = string; export type _Date = Date; export interface Datum { /** * The value of the datum. */ VarCharValue?: datumString; } export interface DeleteNamedQueryInput { /** * The unique ID of the query to delete. */ NamedQueryId: NamedQueryId; } export interface DeleteNamedQueryOutput { } export type DescriptionString = string; export interface EncryptionConfiguration { /** * Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3), server-side encryption with KMS-managed keys (SSE-KMS), or client-side encryption with KMS-managed keys (CSE-KMS) is used. */ EncryptionOption: EncryptionOption; /** * For SSE-KMS and CSE-KMS, this is the KMS key ARN or ID. */ KmsKey?: String; } export type EncryptionOption = "SSE_S3"|"SSE_KMS"|"CSE_KMS"|string; export type ErrorCode = string; export type ErrorMessage = string; export interface GetNamedQueryInput { /** * The unique ID of the query. Use ListNamedQueries to get query IDs. */ NamedQueryId: NamedQueryId; } export interface GetNamedQueryOutput { /** * Information about the query. */ NamedQuery?: NamedQuery; } export interface GetQueryExecutionInput { /** * The unique ID of the query execution. */ QueryExecutionId: QueryExecutionId; } export interface GetQueryExecutionOutput { /** * Information about the query execution. */ QueryExecution?: QueryExecution; } export interface GetQueryResultsInput { /** * The unique ID of the query execution. */ QueryExecutionId: QueryExecutionId; /** * The token that specifies where to start pagination if a previous request was truncated. */ NextToken?: Token; /** * The maximum number of results (rows) to return in this request. */ MaxResults?: MaxQueryResults; } export interface GetQueryResultsOutput { /** * The results of the query execution. */ ResultSet?: ResultSet; /** * A token to be used by the next request if this request is truncated. */ NextToken?: Token; } export type IdempotencyToken = string; export type Integer = number; export interface ListNamedQueriesInput { /** * The token that specifies where to start pagination if a previous request was truncated. */ NextToken?: Token; /** * The maximum number of queries to return in this request. */ MaxResults?: MaxNamedQueriesCount; } export interface ListNamedQueriesOutput { /** * The list of unique query IDs. */ NamedQueryIds?: NamedQueryIdList; /** * A token to be used by the next request if this request is truncated. */ NextToken?: Token; } export interface ListQueryExecutionsInput { /** * The token that specifies where to start pagination if a previous request was truncated. */ NextToken?: Token; /** * The maximum number of query executions to return in this request. */ MaxResults?: MaxQueryExecutionsCount; } export interface ListQueryExecutionsOutput { /** * The unique IDs of each query execution as an array of strings. */ QueryExecutionIds?: QueryExecutionIdList; /** * A token to be used by the next request if this request is truncated. */ NextToken?: Token; } export type Long = number; export type MaxNamedQueriesCount = number; export type MaxQueryExecutionsCount = number; export type MaxQueryResults = number; export type NameString = string; export interface NamedQuery { /** * The plain-language name of the query. */ Name: NameString; /** * A brief description of the query. */ Description?: DescriptionString; /** * The database to which the query belongs. */ Database: DatabaseString; /** * The SQL query statements that comprise the query. */ QueryString: QueryString; /** * The unique identifier of the query. */ NamedQueryId?: NamedQueryId; } export type NamedQueryId = string; export type NamedQueryIdList = NamedQueryId[]; export type NamedQueryList = NamedQuery[]; export interface QueryExecution { /** * The unique identifier for each query execution. */ QueryExecutionId?: QueryExecutionId; /** * The SQL query statements which the query execution ran. */ Query?: QueryString; /** * The location in Amazon S3 where query results were stored and the encryption option, if any, used for query results. */ ResultConfiguration?: ResultConfiguration; /** * The database in which the query execution occurred. */ QueryExecutionContext?: QueryExecutionContext; /** * The completion date, current state, submission time, and state change reason (if applicable) for the query execution. */ Status?: QueryExecutionStatus; /** * The amount of data scanned during the query execution and the amount of time that it took to execute. */ Statistics?: QueryExecutionStatistics; } export interface QueryExecutionContext { /** * The name of the database. */ Database?: DatabaseString; } export type QueryExecutionId = string; export type QueryExecutionIdList = QueryExecutionId[]; export type QueryExecutionList = QueryExecution[]; export type QueryExecutionState = "QUEUED"|"RUNNING"|"SUCCEEDED"|"FAILED"|"CANCELLED"|string; export interface QueryExecutionStatistics { /** * The number of milliseconds that the query took to execute. */ EngineExecutionTimeInMillis?: Long; /** * The number of bytes in the data that was queried. */ DataScannedInBytes?: Long; } export interface QueryExecutionStatus { /** * The state of query execution. SUBMITTED indicates that the query is queued for execution. RUNNING indicates that the query is scanning data and returning results. SUCCEEDED indicates that the query completed without error. FAILED indicates that the query experienced an error and did not complete processing. CANCELLED indicates that user input interrupted query execution. */ State?: QueryExecutionState; /** * Further detail about the status of the query. */ StateChangeReason?: String; /** * The date and time that the query was submitted. */ SubmissionDateTime?: _Date; /** * The date and time that the query completed. */ CompletionDateTime?: _Date; } export type QueryString = string; export interface ResultConfiguration { /** * The location in S3 where query results are stored. */ OutputLocation: String; /** * If query results are encrypted in S3, indicates the S3 encryption option used (for example, SSE-KMS or CSE-KMS and key information. */ EncryptionConfiguration?: EncryptionConfiguration; } export interface ResultSet { /** * The rows in the table. */ Rows?: RowList; /** * The metadata that describes the column structure and data types of a table of query results. */ ResultSetMetadata?: ResultSetMetadata; } export interface ResultSetMetadata { /** * Information about the columns in a query execution result. */ ColumnInfo?: ColumnInfoList; } export interface Row { /** * The data that populates a row in a query result table. */ Data?: datumList; } export type RowList = Row[]; export interface StartQueryExecutionInput { /** * The SQL query statements to be executed. */ QueryString: QueryString; /** * A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another StartQueryExecution request is received, the same response is returned and another query is not created. If a parameter has changed, for example, the QueryString, an error is returned. This token is listed as not required because AWS SDKs (for example the AWS SDK for Java) auto-generate the token for users. If you are not using the AWS SDK or the AWS CLI, you must provide this token or the action will fail. */ ClientRequestToken?: IdempotencyToken; /** * The database within which the query executes. */ QueryExecutionContext?: QueryExecutionContext; /** * Specifies information about where and how to save the results of the query execution. */ ResultConfiguration: ResultConfiguration; } export interface StartQueryExecutionOutput { /** * The unique ID of the query that ran as a result of this request. */ QueryExecutionId?: QueryExecutionId; } export interface StopQueryExecutionInput { /** * The unique ID of the query execution to stop. */ QueryExecutionId: QueryExecutionId; } export interface StopQueryExecutionOutput { } export type String = string; export type ThrottleReason = "CONCURRENT_QUERY_LIMIT_EXCEEDED"|string; export type Token = string; export interface UnprocessedNamedQueryId { /** * The unique identifier of the named query. */ NamedQueryId?: NamedQueryId; /** * The error code returned when the processing request for the named query failed, if applicable. */ ErrorCode?: ErrorCode; /** * The error message returned when the processing request for the named query failed, if applicable. */ ErrorMessage?: ErrorMessage; } export type UnprocessedNamedQueryIdList = UnprocessedNamedQueryId[]; export interface UnprocessedQueryExecutionId { /** * The unique identifier of the query execution. */ QueryExecutionId?: QueryExecutionId; /** * The error code returned when the query execution failed to process, if applicable. */ ErrorCode?: ErrorCode; /** * The error message returned when the query execution failed to process, if applicable. */ ErrorMessage?: ErrorMessage; } export type UnprocessedQueryExecutionIdList = UnprocessedQueryExecutionId[]; export type datumList = Datum[]; export type datumString = string; /** * A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version. */ export type apiVersion = "2017-05-18"|"latest"|string; export interface ClientApiVersions { /** * A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version. */ apiVersion?: apiVersion; } export type ClientConfiguration = ServiceConfigurationOptions & ClientApiVersions; /** * Contains interfaces for use with the Athena client. */ export import Types = Athena; } export = Athena;
the_stack
import type { APIGatewayEvent, Context } from 'aws-lambda' import { AuthenticationError } from '../../errors' import { getCurrentUser } from './fixtures/auth' type RedwoodUser = Record<string, unknown> & { roles?: string[] } export const mockedAuthenticationEvent = ({ headers = {}, }): APIGatewayEvent => { return { body: 'MOCKED_BODY', headers, multiValueHeaders: {}, httpMethod: 'POST', isBase64Encoded: false, path: '/MOCK_PATH', pathParameters: null, queryStringParameters: null, multiValueQueryStringParameters: null, stageVariables: null, requestContext: { accountId: 'MOCKED_ACCOUNT', apiId: 'MOCKED_API_ID', authorizer: { name: 'MOCKED_AUTHORIZER' }, protocol: 'HTTP', identity: { accessKey: null, accountId: null, apiKey: null, apiKeyId: null, caller: null, clientCert: null, cognitoAuthenticationProvider: null, cognitoAuthenticationType: null, cognitoIdentityId: null, cognitoIdentityPoolId: null, principalOrgId: null, sourceIp: '123.123.123.123', user: null, userAgent: null, userArn: null, }, httpMethod: 'POST', path: '/MOCK_PATH', stage: 'MOCK_STAGE', requestId: 'MOCKED_REQUEST_ID', requestTimeEpoch: 1, resourceId: 'MOCKED_RESOURCE_ID', resourcePath: 'MOCKED_RESOURCE_PATH', }, resource: 'MOCKED_RESOURCE', } } const handler = async ( _event: APIGatewayEvent, _context: Context ): Promise<any> => { // @MARK // Don't use globalContext until beforeAll runs const globalContext = require('../../globalContext').context const currentUser = globalContext.currentUser return { statusCode: 200, headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ currentUser: currentUser || 'NO_CURRENT_USER' }), } } const handlerWithAuthChecks = async ( _event: APIGatewayEvent, _context: Context ): Promise<unknown> => { // TODO: Add requireAuth('role') here // or isAuthenticated() const { hasRole, isAuthenticated, requireAuth } = require('./fixtures/auth') const body = { message: '', } if (!isAuthenticated()) { body.message = 'Not authenticated' } else if (hasRole('admin')) { body.message = 'User is an admin' } else { requireAuth({ roles: 'editor' }) body.message = 'User is an editor' } return { statusCode: 200, headers: { 'Content-Type': 'application/json', }, body: JSON.stringify(body), } } const handlerWithError = async ( _event: APIGatewayEvent, _context: Context ): Promise<any> => { // @MARK // Don't use globalContext until beforeAll runs const globalContext = require('../../globalContext').context const currentUser = globalContext.currentUser try { throw new AuthenticationError('An error occurred in the handler') return { statusCode: 200, headers: { 'Content-Type': 'application/json', }, body: JSON.stringify(currentUser), } } catch (error) { return { statusCode: 500, headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ error: error.message }), } } } const getCurrentUserWithError = async ( _decoded, { _token } ): Promise<RedwoodUser> => { throw Error('Something went wrong getting the user info') } describe('useRequireAuth', () => { beforeAll(() => { process.env.DISABLE_CONTEXT_ISOLATION = '1' }) afterAll(() => { process.env.DISABLE_CONTEXT_ISOLATION = '0' }) afterEach(() => { // Clean up after test cases const globalContext = require('../../globalContext').context delete globalContext.currentUser }) it('Updates context with output of current user', async () => { // @MARK // Because we use context inside useRequireAuth, we only want to import this function // once we disable context isolation for our test const { useRequireAuth } = require('../useRequireAuth') const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handler, getCurrentUser, }) const headers = { 'auth-provider': 'custom', authorization: 'Bearer myToken', } const output = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers }), {} ) const response = JSON.parse(output.body) expect(response.currentUser.token).toEqual('myToken') }) it('Updates context with output of current user with roles', async () => { // @MARK // Because we use context inside useRequireAuth, we only want to import this function // once we disable context isolation for our test const { useRequireAuth } = require('../useRequireAuth') const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handler, getCurrentUser, }) // The authorization JWT is valid and has roles in app metadata // { // "sub": "1234567891", // "name": "John Editor", // "iat": 1516239022, // "app_metadata": { // "roles": ["editor"] // } // } const headersWithRoles = { 'auth-provider': 'netlify', authorization: 'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkxIiwibmFtZSI6IkpvaG4gRWRpdG9yIiwiaWF0IjoxNTE2MjM5MDIyLCJhcHBfbWV0YWRhdGEiOnsicm9sZXMiOlsiZWRpdG9yIl19fQ.Fhxe58-7BcjJDoYQAZluJYGwPTPLU0x6K5yA3zXKaX8', } const output = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: headersWithRoles }), {} ) // ? const response = JSON.parse(output.body) expect(response.currentUser.name).toEqual('John Editor') expect(response.currentUser.roles).toContain('editor') }) it('is 200 status if an error occurs when getting current user info', async () => { const { useRequireAuth } = require('../useRequireAuth') const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handler, getCurrentUser: getCurrentUserWithError, }) const customHeaders = { 'auth-provider': 'custom', authorization: 'Bearer myToken', } const output = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: customHeaders }), {} ) expect(output.statusCode).toEqual(200) const response = JSON.parse(output.body) expect(response.currentUser).toEqual('NO_CURRENT_USER') }) it('is 200 status if no auth headers present', async () => { const { useRequireAuth } = require('../useRequireAuth') const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handler, getCurrentUser, }) const missingHeaders = null const output = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: missingHeaders }), {} ) expect(output.statusCode).toEqual(200) const response = JSON.parse(output.body) expect(response.currentUser).toEqual('NO_CURRENT_USER') }) it('is 200 status with token if the auth provider is unsupported', async () => { const { useRequireAuth } = require('../useRequireAuth') const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handler, getCurrentUser, }) const unsupportedProviderHeaders = { 'auth-provider': 'this-auth-provider-is-unsupported', authorization: 'Basic myToken', } const response = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: unsupportedProviderHeaders }), {} ) const body = JSON.parse(response.body) expect(response.statusCode).toEqual(200) expect(body.currentUser.token).toEqual('myToken') }) it('returns 200 if decoding JWT succeeds for netlify', async () => { const { useRequireAuth } = require('../useRequireAuth') const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handler, getCurrentUser, }) // Note: The Bearer token JWT contains: // { // "sub": "1234567890", // "name": "John Doe", // "iat": 1516239022 // } const netlifyJWTHeaders = { 'auth-provider': 'netlify', authorization: 'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c', } const response = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: netlifyJWTHeaders }), {} ) const body = JSON.parse(response.body) expect(response.statusCode).toEqual(200) expect(body.currentUser.sub).toEqual('1234567890') expect(body.currentUser.name).toEqual('John Doe') }) it('is 200 status if decoding JWT fails for netlify', async () => { const { useRequireAuth } = require('../useRequireAuth') const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handler, getCurrentUser, }) const invalidJWTHeaders = { 'auth-provider': 'netlify', authorization: 'Bearer this-is-an-invalid-jwt', } const response = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: invalidJWTHeaders }), {} ) const body = JSON.parse(response.body) expect(response.statusCode).toEqual(200) expect(body.currentUser).toEqual('NO_CURRENT_USER') }) it('is 200 status if decoding JWT fails for supabase', async () => { const { useRequireAuth } = require('../useRequireAuth') const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handler, getCurrentUser, }) const invalidJWTHeaders = { 'auth-provider': 'supabase', authorization: 'Bearer this-is-an-invalid-jwt', } const response = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: invalidJWTHeaders }), {} ) const body = JSON.parse(response.body) expect(response.statusCode).toEqual(200) expect(body.currentUser).toEqual('NO_CURRENT_USER') }) it('is 500 Server Error status if handler errors', async () => { const { useRequireAuth } = require('../useRequireAuth') const customHeaders = { 'auth-provider': 'custom', authorization: 'Bearer myToken', } const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handlerWithError, getCurrentUser, }) const response = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: customHeaders }), {} ) const message = JSON.parse(response.body).error expect(response.statusCode).toEqual(500) expect(message).toEqual('An error occurred in the handler') }) it('enables the use of auth functions inside the handler to check that isAuthenticated blocks unauthenticated users', async () => { const { useRequireAuth } = require('../useRequireAuth') const netlifyJWTHeaders = { 'auth-provider': 'netlify', authorization: 'Bearer eyJhbGciOi.eyJzdWI.Sfl_expired_token', } const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handlerWithAuthChecks, getCurrentUser, }) const response = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: netlifyJWTHeaders }), {} ) const body = JSON.parse(response.body) expect(response.statusCode).toEqual(200) expect(body.message).toEqual('Not authenticated') }) it("enables the use of auth functions inside the handler to check that requireAuth throws if the user doesn't have the required role", async () => { const { useRequireAuth } = require('../useRequireAuth') // Note: The Bearer token JWT contains: // { // "sub": "1234567890", // "name": "John Doe", // "iat": 1516239022 // } const netlifyJWTHeaders = { 'auth-provider': 'netlify', authorization: 'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c', } const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handlerWithAuthChecks, getCurrentUser, }) await expect( handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: netlifyJWTHeaders }), {} ) ).rejects.toThrow("You don't have access to do that.") }) it('enables the use of auth functions inside the handler to check editor role', async () => { const { useRequireAuth } = require('../useRequireAuth') // The authorization JWT is valid and has roles in app metadata // { // "sub": "1234567891", // "name": "John Editor", // "iat": 1516239022, // "app_metadata": { // "roles": ["editor"] // } // } const headersWithRoles = { 'auth-provider': 'netlify', authorization: 'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkxIiwibmFtZSI6IkpvaG4gRWRpdG9yIiwiaWF0IjoxNTE2MjM5MDIyLCJhcHBfbWV0YWRhdGEiOnsicm9sZXMiOlsiZWRpdG9yIl19fQ.Fhxe58-7BcjJDoYQAZluJYGwPTPLU0x6K5yA3zXKaX8', } const handlerEnrichedWithAuthentication = useRequireAuth({ handlerFn: handlerWithAuthChecks, getCurrentUser, }) const response = await handlerEnrichedWithAuthentication( mockedAuthenticationEvent({ headers: headersWithRoles }), {} ) const body = JSON.parse(response.body) expect(response.statusCode).toEqual(200) expect(body.message).toEqual('User is an editor') }) })
the_stack
import {Mutable, Proto, Identifiers} from "@swim/util"; import {Affinity} from "./Affinity"; import {FastenerContextClass, FastenerContext} from "./FastenerContext"; /** @internal */ export type MemberFasteners<O, F extends Fastener<any> = Fastener<any>> = {[K in keyof O as O[K] extends F ? K : never]: O[K]}; /** @internal */ export type MemberFastener<O, K extends keyof MemberFasteners<O, F>, F extends Fastener<any> = Fastener<any>> = MemberFasteners<O, F>[K] extends F ? MemberFasteners<O, F>[K] : never; /** @internal */ export type MemberFastenerClass<O, K extends keyof MemberFasteners<O, F>, F extends Fastener<any> = Fastener<any>> = MemberFasteners<O, F>[K] extends F ? FastenerClass<MemberFasteners<O, F>[K]> : never; /** @public */ export type FastenerOwner<F> = F extends Fastener<infer O> ? O : never; /** @public */ export type FastenerFlags = number; /** @public */ export interface FastenerInit { extends?: {prototype: Fastener<any>} | string | boolean | null; implements?: unknown; name?: string; lazy?: boolean; static?: string | boolean; affinity?: Affinity; inherits?: string | boolean; init?(): void; willSetAffinity?(newAffinity: Affinity, oldAffinity: Affinity): void; didSetAffinity?(newAffinity: Affinity, oldAffinity: Affinity): void; willSetInherits?(inherits: boolean, superName: string | undefined): void; didSetInherits?(inherits: boolean, superName: string | undefined): void; willInherit?(superFastener: Fastener): void; didInherit?(superFastener: Fastener): void; willUninherit?(superFastener: Fastener): void; didUninherit?(superFastener: Fastener): void; willBindSuperFastener?(superFastener: Fastener): void; didBindSuperFastener?(superFastener: Fastener): void; willUnbindSuperFastener?(superFastener: Fastener): void; didUnbindSuperFastener?(superFastener: Fastener): void; willMount?(): void; didMount?(): void; willUnmount?(): void; didUnmount?(): void; } /** @public */ export type FastenerDescriptor<O = unknown, I = {}> = ThisType<Fastener<O> & I> & FastenerInit & Partial<I>; /** @public */ export interface FastenerClass<F extends Fastener<any> = Fastener<any>> extends Function { /** @internal */ prototype: F; /** @internal */ contextClass?: FastenerContextClass; create(owner: FastenerOwner<F>): F; construct(fastenerClass: {prototype: F}, fastener: F | null, owner: FastenerOwner<F>): F; /** @internal */ readonly MountedFlag: FastenerFlags; /** @internal */ readonly InheritsFlag: FastenerFlags; /** @internal */ readonly InheritedFlag: FastenerFlags; /** @internal */ readonly DecoherentFlag: FastenerFlags; /** @internal */ readonly FlagShift: number; /** @internal */ readonly FlagMask: FastenerFlags; } /** @public */ export interface FastenerFactory<F extends Fastener<any> = Fastener<any>> extends FastenerClass<F> { extend<I = {}>(className: string, classMembers?: Partial<I> | null): FastenerFactory<F> & I; define<O>(className: string, descriptor: FastenerDescriptor<O>): FastenerFactory<Fastener<any>>; define<O, I = {}>(className: string, descriptor: {implements: unknown} & FastenerDescriptor<O, I>): FastenerFactory<Fastener<any> & I>; <O>(descriptor: FastenerDescriptor<O>): PropertyDecorator; <O, I = {}>(descriptor: {implements: unknown} & FastenerDescriptor<O, I>): PropertyDecorator; } /** @public */ export interface Fastener<O = unknown> { readonly owner: O; /** @internal */ init(): void; /** @override */ get fastenerType(): Proto<Fastener<any>>; get name(): string; /** @internal */ readonly flags: FastenerFlags; /** @internal */ setFlags(flags: FastenerFlags): void; get affinity(): Affinity; hasAffinity(affinity: Affinity): boolean; /** @internal */ initAffinity(affinity: Affinity): void; /** @internal */ minAffinity(affinity: Affinity): boolean; setAffinity(affinity: Affinity): void; /** @protected */ willSetAffinity(newAffinity: Affinity, oldAffinity: Affinity): void; /** @protected */ onSetAffinity(newAffinity: Affinity, oldAffinity: Affinity): void; /** @protected */ didSetAffinity(newAffinity: Affinity, oldAffinity: Affinity): void; get inherits(): boolean; /** @internal */ initInherits(inherits: string | boolean): void; setInherits(inherits: string | boolean): void; /** @protected */ willSetInherits(inherits: boolean, superName: string | undefined): void; /** @protected */ onSetInherits(inherits: boolean, superName: string | undefined): void; /** @protected */ didSetInherits(inherits: boolean, superName: string | undefined): void; get inherited(): boolean; /** @internal */ setInherited(inherited: boolean, superFastener: Fastener): void; /** @protected */ willInherit(superFastener: Fastener): void; /** @protected */ onInherit(superFastener: Fastener): void; /** @protected */ didInherit(superFastener: Fastener): void; /** @protected */ willUninherit(superFastener: Fastener): void; /** @protected */ onUninherit(superFastener: Fastener): void; /** @protected */ didUninherit(superFastener: Fastener): void; get superName(): string | undefined; get superFastener(): Fastener | null; /** @internal */ getSuperFastener(): Fastener | null; /** @internal */ bindSuperFastener(): void; /** @protected */ willBindSuperFastener(superFastener: Fastener): void; /** @protected */ onBindSuperFastener(superFastener: Fastener): void; /** @protected */ didBindSuperFastener(superFastener: Fastener): void; /** @internal */ unbindSuperFastener(): void; /** @protected */ willUnbindSuperFastener(superFastener: Fastener): void; /** @protected */ onUnbindSuperFastener(superFastener: Fastener): void; /** @protected */ didUnbindSuperFastener(superFastener: Fastener): void; /** @internal */ attachSubFastener(subFastener: Fastener): void; /** @internal */ detachSubFastener(subFastener: Fastener): void; get coherent(): boolean; /** @internal */ setCoherent(coherent: boolean): void; /** @internal */ decohere(): void; /** @internal */ recohere(t: number): void; get mounted(): boolean; /** @internal */ mount(): void; /** @protected */ willMount(): void; /** @protected */ onMount(): void; /** @protected */ didMount(): void; /** @internal */ unmount(): void; /** @protected */ willUnmount(): void; /** @protected */ onUnmount(): void; /** @protected */ didUnmount(): void; /** @override */ toString(): string; /** @internal */ get lazy(): boolean; // prototype property /** @internal */ get static(): string | boolean; // prototype property /** @internal @protected */ get binds(): boolean | undefined; // optional prototype property } /** @public */ export const Fastener = (function (_super: typeof Object) { const Fastener = function (descriptor: FastenerDescriptor): PropertyDecorator { return FastenerContext.decorator(Fastener, descriptor); } as FastenerFactory; Fastener.prototype = Object.create(_super.prototype); Fastener.prototype.constructor = Fastener; Fastener.prototype.init = function (this: Fastener): void { // hook }; Object.defineProperty(Fastener.prototype, "fastenerType", { get: function (this: Fastener): Proto<Fastener<any>> { return Fastener; }, configurable: true, }); Object.defineProperty(Fastener.prototype, "name", { value: "", configurable: true, }); Fastener.prototype.setFlags = function (this: Fastener, flags: FastenerFlags): void { (this as Mutable<typeof this>).flags = flags; }; Object.defineProperty(Fastener.prototype, "affinity", { get(this: Fastener): Affinity { return this.flags & Affinity.Mask; }, configurable: true, }); Fastener.prototype.hasAffinity = function (this: Fastener, affinity: Affinity): boolean { return affinity >= (this.flags & Affinity.Mask); }; Fastener.prototype.initAffinity = function (this: Fastener, affinity: Affinity): void { (this as Mutable<typeof this>).flags = this.flags & ~Affinity.Mask | affinity & Affinity.Mask; }; Fastener.prototype.minAffinity = function (this: Fastener, newAffinity: Affinity): boolean { const oldAffinity = this.flags & Affinity.Mask; if (newAffinity === Affinity.Reflexive) { newAffinity = oldAffinity; } else if ((newAffinity & ~Affinity.Mask) !== 0) { throw new Error("invalid affinity: " + newAffinity); } if (newAffinity > oldAffinity) { this.willSetAffinity(newAffinity, oldAffinity); this.setFlags(this.flags & ~Affinity.Mask | newAffinity); this.onSetAffinity(newAffinity, oldAffinity); this.didSetAffinity(newAffinity, oldAffinity); } return newAffinity >= oldAffinity; }; Fastener.prototype.setAffinity = function (this: Fastener, newAffinity: Affinity): void { if ((newAffinity & ~Affinity.Mask) !== 0) { throw new Error("invalid affinity: " + newAffinity); } const oldAffinity = this.flags & Affinity.Mask; if (newAffinity !== oldAffinity) { this.willSetAffinity(newAffinity, oldAffinity); this.setFlags(this.flags & ~Affinity.Mask | newAffinity); this.onSetAffinity(newAffinity, oldAffinity); this.didSetAffinity(newAffinity, oldAffinity); } }; Fastener.prototype.willSetAffinity = function (this: Fastener, newAffinity: Affinity, oldAffinity: Affinity): void { // hook }; Fastener.prototype.onSetAffinity = function (this: Fastener, newAffinity: Affinity, oldAffinity: Affinity): void { if (newAffinity > oldAffinity && (this.flags & Fastener.InheritedFlag) !== 0) { const superFastener = this.superFastener; if (superFastener !== null && Math.min(superFastener.flags & Affinity.Mask, Affinity.Intrinsic) < newAffinity) { this.setInherited(false, superFastener); } } else if (newAffinity < oldAffinity && (this.flags & Fastener.InheritsFlag) !== 0) { const superFastener = this.superFastener; if (superFastener !== null && Math.min(superFastener.flags & Affinity.Mask, Affinity.Intrinsic) >= newAffinity) { this.setInherited(true, superFastener); } } }; Fastener.prototype.didSetAffinity = function (this: Fastener, newAffinity: Affinity, oldAffinity: Affinity): void { // hook }; Object.defineProperty(Fastener.prototype, "inherits", { get: function (this: Fastener): boolean { return (this.flags & Fastener.InheritsFlag) !== 0; }, configurable: true, }); Fastener.prototype.initInherits = function (this: Fastener, inherits: string | boolean): void { let superName: string | undefined; if (typeof inherits === "string") { superName = inherits; inherits = true; } if (inherits) { if (superName !== void 0) { Object.defineProperty(this, "name", { value: superName, enumerable: true, configurable: true, }); } (this as Mutable<typeof this>).flags = this.flags | Fastener.InheritsFlag; } else { (this as Mutable<typeof this>).flags = this.flags & ~Fastener.InheritsFlag; } }; Fastener.prototype.setInherits = function (this: Fastener, inherits: string | boolean): void { let superName: string | undefined; if (typeof inherits === "string") { if (inherits !== this.name) { superName = inherits; } inherits = true; } if (inherits !== ((this.flags & Fastener.InheritsFlag) !== 0) || superName !== void 0) { this.unbindSuperFastener(); this.willSetInherits(inherits, superName); if (inherits) { if (superName !== void 0) { Object.defineProperty(this, "name", { value: superName, enumerable: true, configurable: true, }); } this.setFlags(this.flags | Fastener.InheritsFlag); } else { this.setFlags(this.flags & ~Fastener.InheritsFlag); } this.onSetInherits(inherits, superName); this.didSetInherits(inherits, superName); this.bindSuperFastener(); } }; Fastener.prototype.willSetInherits = function (this: Fastener, inherits: boolean, superName: string | undefined): void { // hook }; Fastener.prototype.onSetInherits = function (this: Fastener, inherits: boolean, superName: string | undefined): void { // hook }; Fastener.prototype.didSetInherits = function (this: Fastener, inherits: boolean, superName: string | undefined): void { // hook }; Object.defineProperty(Fastener.prototype, "inherited", { get: function (this: Fastener): boolean { return (this.flags & Fastener.InheritedFlag) !== 0; }, configurable: true, }); Fastener.prototype.setInherited = function (this: Fastener, inherited: boolean, superFastener: Fastener): void { if (inherited && (this.flags & Fastener.InheritedFlag) === 0) { this.willInherit(superFastener); this.setFlags(this.flags | Fastener.InheritedFlag); this.onInherit(superFastener); this.didInherit(superFastener); } else if (!inherited && (this.flags & Fastener.InheritedFlag) !== 0) { this.willUninherit(superFastener); this.setFlags(this.flags & ~Fastener.InheritedFlag); this.onUninherit(superFastener); this.didUninherit(superFastener); } }; Fastener.prototype.willInherit = function (this: Fastener, superFastener: Fastener): void { // hook }; Fastener.prototype.onInherit = function (this: Fastener, superFastener: Fastener): void { // hook }; Fastener.prototype.didInherit = function (this: Fastener, superFastener: Fastener): void { // hook }; Fastener.prototype.willUninherit = function (this: Fastener, superFastener: Fastener): void { // hook }; Fastener.prototype.onUninherit = function (this: Fastener, superFastener: Fastener): void { // hook }; Fastener.prototype.didUninherit = function (this: Fastener, superFastener: Fastener): void { // hook }; Object.defineProperty(Fastener.prototype, "superName", { get: function (this: Fastener): string | undefined { return (this.flags & Fastener.InheritsFlag) !== 0 ? this.name : void 0; }, configurable: true, }); Object.defineProperty(Fastener.prototype, "superFastener", { get: function (this: Fastener): Fastener | null { return this.getSuperFastener(); }, configurable: true, }); Fastener.prototype.getSuperFastener = function (this: Fastener): Fastener | null { const superName = this.superName; if (superName !== void 0) { const fastenerContext = this.owner; if (FastenerContext.is(fastenerContext)) { const superFastener = fastenerContext.getSuperFastener(superName, this.fastenerType); if (superFastener !== null) { return superFastener; } } } return null; } Fastener.prototype.bindSuperFastener = function (this: Fastener): void { const superName = this.superName; if (superName !== void 0) { const fastenerContext = this.owner; if (FastenerContext.is(fastenerContext)) { const superFastener = fastenerContext.getSuperFastener(superName, this.fastenerType); if (superFastener !== null) { this.willBindSuperFastener(superFastener); superFastener.attachSubFastener(this); this.onBindSuperFastener(superFastener); this.didBindSuperFastener(superFastener); } } } }; Fastener.prototype.willBindSuperFastener = function (this: Fastener, superFastener: Fastener): void { // hook }; Fastener.prototype.onBindSuperFastener = function (this: Fastener, superFastener: Fastener): void { if ((superFastener.flags & Affinity.Mask) >= (this.flags & Affinity.Mask)) { this.setInherited(true, superFastener); } }; Fastener.prototype.didBindSuperFastener = function (this: Fastener, superFastener: Fastener): void { // hook }; Fastener.prototype.unbindSuperFastener = function (this: Fastener): void { const superFastener = this.superFastener; if (superFastener !== null) { this.willUnbindSuperFastener(superFastener); superFastener.detachSubFastener(this); this.onUnbindSuperFastener(superFastener); this.didUnbindSuperFastener(superFastener); } }; Fastener.prototype.willUnbindSuperFastener = function (this: Fastener, superFastener: Fastener): void { // hook }; Fastener.prototype.onUnbindSuperFastener = function (this: Fastener, superFastener: Fastener): void { this.setInherited(false, superFastener); }; Fastener.prototype.didUnbindSuperFastener = function (this: Fastener, superFastener: Fastener): void { // hook }; Fastener.prototype.attachSubFastener = function (this: Fastener, subFastener: Fastener): void { // hook }; Fastener.prototype.detachSubFastener = function (this: Fastener, subFastener: Fastener): void { // hook }; Object.defineProperty(Fastener.prototype, "coherent", { get: function (this: Fastener): boolean { return (this.flags & Fastener.DecoherentFlag) === 0; }, configurable: true, }); Fastener.prototype.setCoherent = function (this: Fastener, coherent: boolean): void { if (coherent) { this.setFlags(this.flags & ~Fastener.DecoherentFlag); } else { this.setFlags(this.flags | Fastener.DecoherentFlag); } }; Fastener.prototype.decohere = function (this: Fastener): void { const fastenerContext = this.owner; if (FastenerContext.has(fastenerContext, "decohereFastener")) { fastenerContext.decohereFastener(this); } }; Fastener.prototype.recohere = function (this: Fastener, t: number): void { // hook }; Object.defineProperty(Fastener.prototype, "mounted", { get: function (this: Fastener): boolean { return (this.flags & Fastener.MountedFlag) !== 0; }, configurable: true, }); Fastener.prototype.mount = function (this: Fastener): void { if ((this.flags & Fastener.MountedFlag) === 0) { this.willMount(); this.setFlags(this.flags | Fastener.MountedFlag); this.onMount(); this.didMount(); } }; Fastener.prototype.willMount = function (this: Fastener): void { // hook }; Fastener.prototype.onMount = function (this: Fastener): void { this.bindSuperFastener(); }; Fastener.prototype.didMount = function (this: Fastener): void { // hook }; Fastener.prototype.unmount = function (this: Fastener): void { if ((this.flags & Fastener.MountedFlag) !== 0) { this.willUnmount(); this.setFlags(this.flags & ~Fastener.MountedFlag); this.onUnmount(); this.didUnmount(); } }; Fastener.prototype.willUnmount = function (this: Fastener): void { // hook }; Fastener.prototype.onUnmount = function (this: Fastener): void { this.unbindSuperFastener(); }; Fastener.prototype.didUnmount = function (this: Fastener): void { // hook }; Fastener.prototype.toString = function (this: Fastener): string { return this.name; }; Object.defineProperty(Fastener.prototype, "lazy", { get: function (this: Fastener): boolean { return true; }, configurable: true, }); Object.defineProperty(Fastener.prototype, "static", { get: function (this: Fastener): string | boolean { return false; }, configurable: true, }); Fastener.create = function <F extends Fastener<any>>(this: FastenerClass<F>, owner: FastenerOwner<F>): F { const fastener = this.construct(this, null, owner); fastener.init(); return fastener; }; Fastener.construct = function <F extends Fastener<any>>(fastenerClass: {prototype: F}, fastener: F | null, owner: FastenerOwner<F>): F { if (fastener === null) { fastener = Object.create(fastenerClass.prototype) as F; } (fastener as Mutable<typeof fastener>).owner = owner; (fastener as Mutable<typeof fastener>).flags = 0; return fastener; }; Fastener.extend = function <I>(className: string, classMembers?: {readonly name?: string} & Partial<I> | null): FastenerFactory & I { let classIdentifier: string | undefined; if (classMembers !== void 0 && classMembers !== null && typeof classMembers.name === "string" && Identifiers.isValid(classMembers.name)) { classIdentifier = classMembers.name; className = classIdentifier; } else if (Identifiers.isValid(className)) { classIdentifier = className; } let fastenerClass: FastenerFactory & I; if (classIdentifier !== void 0) { fastenerClass = new Function("FastenerContext", "return function " + className + "(descriptor) { return FastenerContext.decorator(" + className + ", descriptor); }" )(FastenerContext); } else { fastenerClass = function (descriptor: FastenerDescriptor): PropertyDecorator { return FastenerContext.decorator(fastenerClass, descriptor); } as FastenerFactory & I; Object.defineProperty(fastenerClass, "name", { value: className, configurable: true, }); } const classProperties: PropertyDescriptorMap = {}; if (classMembers !== void 0 && classMembers !== null) { classProperties.name = { value: className, configurable: true, }; const classMemberNames = Object.getOwnPropertyNames(classMembers); for (let i = 0; i < classMemberNames.length; i += 1) { const classMemberName = classMemberNames[i]!; classProperties[classMemberName] = Object.getOwnPropertyDescriptor(classMembers, classMemberName)!; } } else { classProperties.name = { value: "", configurable: true, }; } Object.setPrototypeOf(fastenerClass, this); fastenerClass.prototype = Object.create(this.prototype, classProperties); fastenerClass.prototype.constructor = fastenerClass; return fastenerClass; } Fastener.define = function <O>(className: string, descriptor: FastenerDescriptor<O>): FastenerFactory<Fastener<any>> { let superClass = descriptor.extends as FastenerFactory | null | undefined; const affinity = descriptor.affinity; const inherits = descriptor.inherits; delete descriptor.extends; delete descriptor.implements; delete descriptor.affinity; delete descriptor.inherits; if (superClass === void 0 || superClass === null) { superClass = this; } const fastenerClass = superClass.extend(className, descriptor); fastenerClass.construct = function (fastenerClass: {prototype: Fastener<any>}, fastener: Fastener<O> | null, owner: O): Fastener<O> { fastener = superClass!.construct(fastenerClass, fastener, owner); if (affinity !== void 0) { fastener.initAffinity(affinity); } if (inherits !== void 0) { fastener.initInherits(inherits); } return fastener; }; return fastenerClass; }; (Fastener as Mutable<typeof Fastener>).MountedFlag = 1 << (Affinity.Shift + 0); (Fastener as Mutable<typeof Fastener>).InheritsFlag = 1 << (Affinity.Shift + 1); (Fastener as Mutable<typeof Fastener>).InheritedFlag = 1 << (Affinity.Shift + 2); (Fastener as Mutable<typeof Fastener>).DecoherentFlag = 1 << (Affinity.Shift + 3); (Fastener as Mutable<typeof Fastener>).FlagShift = Affinity.Shift + 4; (Fastener as Mutable<typeof Fastener>).FlagMask = (1 << Fastener.FlagShift) - 1; return Fastener; })(Object);
the_stack
// Directly import @sentry/electron main process code. // See: https://docs.sentry.io/platforms/javascript/guides/electron/#webpack-configuration import * as sentry from '@sentry/electron/dist/main'; import {app, BrowserWindow, ipcMain, Menu, MenuItemConstructorOptions, nativeImage, shell, Tray} from 'electron'; import * as promiseIpc from 'electron-promise-ipc'; import {autoUpdater} from 'electron-updater'; import * as os from 'os'; import * as path from 'path'; import * as process from 'process'; import * as url from 'url'; import autoLaunch = require('auto-launch'); // tslint:disable-line import * as connectivity from './connectivity'; import * as errors from '../www/model/errors'; import {ShadowsocksConfig} from '../www/app/config'; import {TunnelStatus} from '../www/app/tunnel'; import {GoVpnTunnel} from './go_vpn_tunnel'; import {RoutingDaemon} from './routing_service'; import {ShadowsocksLibevBadvpnTunnel} from './sslibev_badvpn_tunnel'; import {TunnelStore, SerializableTunnel} from './tunnel_store'; import {VpnTunnel} from './vpn_tunnel'; // Used for the auto-connect feature. There will be a tunnel in store // if the user was connected at shutdown. const tunnelStore = new TunnelStore(app.getPath('userData')); // Keep a global reference of the window object, if you don't, the window will // be closed automatically when the JavaScript object is garbage collected. let mainWindow: Electron.BrowserWindow|null; let tray: Tray; let isAppQuitting = false; // Default to English strings in case we fail to retrieve them from the renderer process. let localizedStrings: {[key: string]: string} = { 'tray-open-window': 'Open', 'connected-server-state': 'Connected', 'disconnected-server-state': 'Disconnected', 'quit': 'Quit' }; const debugMode = process.env.OUTLINE_DEBUG === 'true'; // Build-time constant defined by webpack and set to the value of $NETWORK_STACK, // or 'libevbadvpn' by default. declare const NETWORK_STACK: string; const TRAY_ICON_IMAGES = { connected: createTrayIconImage('connected.png'), disconnected: createTrayIconImage('disconnected.png') }; const enum Options { AUTOSTART = '--autostart' } const REACHABILITY_TIMEOUT_MS = 10000; let currentTunnel: VpnTunnel|undefined; function setupMenu(): void { if (debugMode) { Menu.setApplicationMenu(Menu.buildFromTemplate([{ label: 'Developer', submenu: Menu.buildFromTemplate( [{role: 'reload'}, {role: 'forceReload'}, {role: 'toggleDevTools'}]) }])); } else { // Hide standard menu. Menu.setApplicationMenu(null); } } function setupTray(): void { tray = new Tray(TRAY_ICON_IMAGES.disconnected); // On Linux, the click event is never fired: https://github.com/electron/electron/issues/14941 tray.on('click', () => { mainWindow?.show(); }); tray.setToolTip('Outline'); updateTray(TunnelStatus.DISCONNECTED); } function setupWindow(): void { // Create the browser window. mainWindow = new BrowserWindow( {width: 360, height: 640, resizable: false, webPreferences: {nodeIntegration: true}}); const pathToIndexHtml = path.join(app.getAppPath(), 'www', 'electron_index.html'); const webAppUrl = new url.URL(`file://${pathToIndexHtml}`); // Debug mode, etc. const queryParams = new url.URLSearchParams(); if (debugMode) { queryParams.set('debug', 'true'); } queryParams.set('appName', app.getName()); webAppUrl.search = queryParams.toString(); const webAppUrlAsString = webAppUrl.toString(); console.info(`loading web app from ${webAppUrlAsString}`); mainWindow.loadURL(webAppUrlAsString); mainWindow.on('close', (event: Event) => { if (isAppQuitting) { // Actually close the window if we are quitting. return; } // Hide instead of close so we don't need to create a new one. event.preventDefault(); mainWindow.hide(); }); if (os.platform() === 'win32') { // On Windows we hide the app from the taskbar. mainWindow.on('minimize', (event: Event) => { event.preventDefault(); mainWindow.hide(); }); } // TODO: is this the most appropriate event? mainWindow.webContents.on('did-finish-load', () => { mainWindow.webContents.send('localizationRequest', Object.keys(localizedStrings)); interceptShadowsocksLink(process.argv); }); // The client is a single page app - loading any other page means the // user clicked on one of the Privacy, Terms, etc., links. These should // open in the user's browser. mainWindow.webContents.on('will-navigate', (event: Event, url: string) => { shell.openExternal(url); event.preventDefault(); }); } function updateTray(status: TunnelStatus) { const isConnected = status === TunnelStatus.CONNECTED; tray.setImage(isConnected ? TRAY_ICON_IMAGES.connected : TRAY_ICON_IMAGES.disconnected); // Retrieve localized strings, falling back to the pre-populated English default. const statusString = isConnected ? localizedStrings['connected-server-state'] : localizedStrings['disconnected-server-state']; let menuTemplate = [ {label: statusString, enabled: false}, {type: 'separator'} as MenuItemConstructorOptions, {label: localizedStrings['quit'], click: quitApp} ]; if (os.platform() === 'linux') { // Because the click event is never fired on Linux, we need an explicit open option. menuTemplate = [ {label: localizedStrings['tray-open-window'], click: () => mainWindow.show()}, ...menuTemplate ]; } tray.setContextMenu(Menu.buildFromTemplate(menuTemplate)); } function createTrayIconImage(imageName: string) { const image = nativeImage.createFromPath(path.join(app.getAppPath(), 'resources', 'tray', imageName)); if (image.isEmpty()) { throw new Error(`cannot find ${imageName} tray icon image`); } return image; } // Signals that the app is quitting and quits the app. This is necessary because we override the // window 'close' event to support minimizing to the system tray. async function quitApp() { isAppQuitting = true; await stopVpn(); app.quit(); } function interceptShadowsocksLink(argv: string[]) { if (argv.length > 1) { const protocol = 'ss://'; let url = argv[1]; if (url.startsWith(protocol)) { if (mainWindow) { // The system adds a trailing slash to the intercepted URL (before the fragment). // Remove it before sending to the UI. url = `${protocol}${url.substr(protocol.length).replace(/\//g, '')}`; mainWindow.webContents.send('add-server', url); } else { console.error('called with URL but mainWindow not open'); } } } } // Set the app to launch at startup to connect automatically in case of a shutdown while // proxying. async function setupAutoLaunch(args: SerializableTunnel): Promise<void> { try { await tunnelStore.save(args); if (os.platform() === 'linux') { if (process.env.APPIMAGE) { const outlineAutoLauncher = new autoLaunch({ name: 'OutlineClient', path: process.env.APPIMAGE, }); outlineAutoLauncher.enable(); } } else { app.setLoginItemSettings({openAtLogin: true, args: [Options.AUTOSTART]}); } } catch (e) { console.error(`Failed to set up auto-launch: ${e.message}`); } } async function tearDownAutoLaunch() { try { if (os.platform() === 'linux') { const outlineAutoLauncher = new autoLaunch({ name: 'OutlineClient', }); outlineAutoLauncher.disable(); } else { app.setLoginItemSettings({openAtLogin: false}); } await tunnelStore.clear(); } catch (e) { console.error(`Failed to tear down auto-launch: ${e.message}`); } } // Factory function to create a VPNTunnel instance backed by a network statck // specified at build time. function createVpnTunnel(config: ShadowsocksConfig, isAutoConnect: boolean): VpnTunnel { const routing = new RoutingDaemon(config.host || '', isAutoConnect); let tunnel: VpnTunnel; if (NETWORK_STACK === 'go') { console.log('Using Go network stack'); tunnel = new GoVpnTunnel(routing, config); } else { tunnel = new ShadowsocksLibevBadvpnTunnel(routing, config); } routing.onNetworkChange = tunnel.networkChanged.bind(tunnel); return tunnel; } // Invoked by both the start-proxying event handler and auto-connect. async function startVpn(config: ShadowsocksConfig, id: string, isAutoConnect = false) { if (currentTunnel) { throw new Error('already connected'); } currentTunnel = createVpnTunnel(config, isAutoConnect); if (debugMode) { currentTunnel.enableDebugMode(); } currentTunnel.onceDisconnected.then(() => { console.log(`disconnected from ${id}`); currentTunnel = undefined; setUiTunnelStatus(TunnelStatus.DISCONNECTED, id); }); currentTunnel.onReconnecting(() => { console.log(`reconnecting to ${id}`); setUiTunnelStatus(TunnelStatus.RECONNECTING, id); }); currentTunnel.onReconnected(() => { console.log(`reconnected to ${id}`); setUiTunnelStatus(TunnelStatus.CONNECTED, id); }); // Don't check connectivity on boot: if the key was revoked or network connectivity is not ready, // we want the system to stay "connected" so that traffic doesn't leak. await currentTunnel.connect(!isAutoConnect); setUiTunnelStatus(TunnelStatus.CONNECTED, id); } // Invoked by both the stop-proxying event and quit handler. async function stopVpn() { if (!currentTunnel) { return; } currentTunnel.disconnect(); await tearDownAutoLaunch(); await currentTunnel.onceDisconnected; } function setUiTunnelStatus(status: TunnelStatus, tunnelId: string) { let statusString; switch (status) { case TunnelStatus.CONNECTED: statusString = 'connected'; break; case TunnelStatus.DISCONNECTED: statusString = 'disconnected'; break; case TunnelStatus.RECONNECTING: statusString = 'reconnecting'; break; default: console.error(`Cannot send unknown proxy status: ${status}`); return; } const event = `proxy-${statusString}-${tunnelId}`; if (mainWindow) { mainWindow.webContents.send(event); } else { console.warn(`received ${event} event but no mainWindow to notify`); } updateTray(status); } function checkForUpdates() { try { autoUpdater.checkForUpdates(); } catch (e) { console.error(`Failed to check for updates`, e); } } function main() { if (!app.requestSingleInstanceLock()) { console.log('another instance is running - exiting'); app.quit(); } app.setAsDefaultProtocolClient('ss'); // This method will be called when Electron has finished // initialization and is ready to create browser windows. // Some APIs can only be used after this event occurs. app.on('ready', async () => { setupMenu(); setupTray(); // TODO(fortuna): Start the app with the window hidden on auto-start? setupWindow(); let tunnelAtShutdown: SerializableTunnel; try { tunnelAtShutdown = await tunnelStore.load(); } catch (e) { // No tunnel at shutdown, or failure - either way, no need to start. // TODO: Instead of quitting, how about creating the system tray icon? console.warn(`Could not load active tunnel: `, e); await tunnelStore.clear(); } if (tunnelAtShutdown) { console.info(`was connected at shutdown, reconnecting to ${tunnelAtShutdown.id}`); setUiTunnelStatus(TunnelStatus.RECONNECTING, tunnelAtShutdown.id); try { await startVpn(tunnelAtShutdown.config, tunnelAtShutdown.id, true); console.log(`reconnected to ${tunnelAtShutdown.id}`); } catch (e) { console.error(`could not reconnect: ${e.name} (${e.message})`); } } if (!debugMode) { checkForUpdates(); // Check every six hours setInterval(checkForUpdates, 6 * 60 * 60 * 1000); } }); app.on('second-instance', (event: Event, argv: string[]) => { interceptShadowsocksLink(argv); // Someone tried to run a second instance, we should focus our window. mainWindow?.show(); }); app.on('activate', () => { // On OS X it's common to re-create a window in the app when the // dock icon is clicked and there are no other windows open. mainWindow?.show(); }); // This event fires whenever the app's window receives focus. app.on('browser-window-focus', () => { mainWindow?.webContents.send('push-clipboard'); }); promiseIpc.on('is-server-reachable', async (args: {hostname: string, port: number}) => { try { await connectivity.isServerReachable( args.hostname || '', args.port || 0, REACHABILITY_TIMEOUT_MS); return true; } catch { return false; } }); // Connects to the specified server, if that server is reachable and the credentials are valid. promiseIpc.on('start-proxying', async (args: {config: ShadowsocksConfig, id: string}) => { // TODO: Rather than first disconnecting, implement a more efficient switchover (as well as // being faster, this would help prevent traffic leaks - the Cordova clients already do // this). if (currentTunnel) { console.log('disconnecting from current server...'); currentTunnel.disconnect(); await currentTunnel.onceDisconnected; } console.log(`connecting to ${args.id}...`); try { // Rather than repeadedly resolving a hostname in what may be a fingerprint-able way, // resolve it just once, upfront. args.config.host = await connectivity.lookupIp(args.config.host || ''); await connectivity.isServerReachable( args.config.host || '', args.config.port || 0, REACHABILITY_TIMEOUT_MS); await startVpn(args.config, args.id); console.log(`connected to ${args.id}`); await setupAutoLaunch(args); // Auto-connect requires IPs; the hostname in here has already been resolved (see above). tunnelStore.save(args).catch((e) => { console.error('Failed to store tunnel.'); }); } catch (e) { console.error(`could not connect: ${e.name} (${e.message})`); throw errors.toErrorCode(e); } }); // Disconnects from the current server, if any. promiseIpc.on('stop-proxying', stopVpn); // Error reporting. // This config makes console (log/info/warn/error - no debug!) output go to breadcrumbs. ipcMain.on('environment-info', (event: Event, info: {appVersion: string, dsn: string}) => { if (info.dsn) { sentry.init({dsn: info.dsn, release: info.appVersion, maxBreadcrumbs: 100}); } // To clearly identify app restarts in Sentry. console.info(`Outline is starting`); }); ipcMain.on('quit-app', quitApp); ipcMain.on( 'localizationResponse', (event: Event, localizationResult: {[key: string]: string}) => { if (!!localizationResult) { localizedStrings = localizationResult; } updateTray(TunnelStatus.DISCONNECTED); }); // Notify the UI of updates. autoUpdater.on('update-downloaded', (ev, info) => { mainWindow?.webContents.send('update-downloaded'); }); } main();
the_stack
import { Match, Template } from '@aws-cdk/assertions'; import { AccountRootPrincipal, Role, } from '@aws-cdk/aws-iam'; import * as kms from '@aws-cdk/aws-kms'; import { testFutureBehavior, testLegacyBehavior } from '@aws-cdk/cdk-build-tools/lib/feature-flag'; import * as cdk from '@aws-cdk/core'; import * as cxapi from '@aws-cdk/cx-api'; import { AmazonLinuxGeneration, EbsDeviceVolumeType, Instance, InstanceType, MachineImage, Volume, Vpc, } from '../lib'; describe('volume', () => { test('basic volume', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), volumeName: 'MyVolume', }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { AvailabilityZone: 'us-east-1a', MultiAttachEnabled: false, Size: 8, VolumeType: 'gp2', Tags: [ { Key: 'Name', Value: 'MyVolume', }, ], }); Template.fromStack(stack).hasResource('AWS::EC2::Volume', { DeletionPolicy: 'Retain', }); }); test('fromVolumeAttributes', () => { // GIVEN const stack = new cdk.Stack(); const encryptionKey = new kms.Key(stack, 'Key'); const volumeId = 'vol-000000'; const availabilityZone = 'us-east-1a'; // WHEN const volume = Volume.fromVolumeAttributes(stack, 'Volume', { volumeId, availabilityZone, encryptionKey, }); // THEN expect(volume.volumeId).toEqual(volumeId); expect(volume.availabilityZone).toEqual(availabilityZone); expect(volume.encryptionKey).toEqual(encryptionKey); }); test('tagged volume', () => { // GIVEN const stack = new cdk.Stack(); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); // WHEN cdk.Tags.of(volume).add('TagKey', 'TagValue'); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { AvailabilityZone: 'us-east-1a', MultiAttachEnabled: false, Size: 8, VolumeType: 'gp2', Tags: [{ Key: 'TagKey', Value: 'TagValue', }], }); }); test('autoenableIO', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), autoEnableIo: true, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { AutoEnableIO: true, }); }); test('encryption', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), encrypted: true, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { Encrypted: true, }); }); test('encryption with kms', () => { // GIVEN const stack = new cdk.Stack(); const encryptionKey = new kms.Key(stack, 'Key'); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), encrypted: true, encryptionKey, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { Encrypted: true, KmsKeyId: { 'Fn::GetAtt': [ 'Key961B73FD', 'Arn', ], }, }); Template.fromStack(stack).hasResourceProperties('AWS::KMS::Key', { KeyPolicy: { Statement: Match.arrayWith([{ Effect: 'Allow', Principal: { AWS: { 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':iam::', { Ref: 'AWS::AccountId', }, ':root', ], ], }, }, Resource: '*', Action: [ 'kms:DescribeKey', 'kms:GenerateDataKeyWithoutPlainText', ], Condition: { StringEquals: { 'kms:ViaService': { 'Fn::Join': [ '', [ 'ec2.', { Ref: 'AWS::Region', }, '.amazonaws.com', ], ], }, 'kms:CallerAccount': { Ref: 'AWS::AccountId', }, }, }, }]), }, }); }); // only enable for legacy behaviour // see https://github.com/aws/aws-cdk/issues/12962 testLegacyBehavior('encryption with kms from snapshot', cdk.App, (app) => { // GIVEN const stack = new cdk.Stack(app); const encryptionKey = new kms.Key(stack, 'Key'); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), encrypted: true, encryptionKey, snapshotId: 'snap-1234567890', }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::KMS::Key', { KeyPolicy: { Statement: Match.arrayWith([Match.objectLike({ Action: [ 'kms:DescribeKey', 'kms:GenerateDataKeyWithoutPlainText', 'kms:ReEncrypt*', ], })]), }, }); }); test('iops', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), iops: 500, volumeType: EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { Iops: 500, VolumeType: 'io1', }); }); test('multi-attach', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), iops: 500, volumeType: EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, enableMultiAttach: true, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { MultiAttachEnabled: true, }); }); test('snapshotId', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', snapshotId: 'snap-00000000', }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { SnapshotId: 'snap-00000000', }); }); test('volume: standard', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), volumeType: EbsDeviceVolumeType.MAGNETIC, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { VolumeType: 'standard', }); }); test('volume: io1', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), volumeType: EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, iops: 300, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { VolumeType: 'io1', }); }); test('volume: io2', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), volumeType: EbsDeviceVolumeType.PROVISIONED_IOPS_SSD_IO2, iops: 300, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { VolumeType: 'io2', }); }); test('volume: gp2', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), volumeType: EbsDeviceVolumeType.GENERAL_PURPOSE_SSD, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { VolumeType: 'gp2', }); }); test('volume: gp3', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), volumeType: EbsDeviceVolumeType.GENERAL_PURPOSE_SSD_GP3, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { VolumeType: 'gp3', }); }); test('volume: st1', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), volumeType: EbsDeviceVolumeType.THROUGHPUT_OPTIMIZED_HDD, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { VolumeType: 'st1', }); }); test('volume: sc1', () => { // GIVEN const stack = new cdk.Stack(); // WHEN new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), volumeType: EbsDeviceVolumeType.COLD_HDD, }); // THEN Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { VolumeType: 'sc1', }); }); test('grantAttachVolume to any instance', () => { // GIVEN const stack = new cdk.Stack(); const role = new Role(stack, 'Role', { assumedBy: new AccountRootPrincipal() }); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); // WHEN volume.grantAttachVolume(role); // THEN Template.fromStack(stack).hasResourceProperties('AWS::IAM::Policy', { PolicyDocument: { Version: '2012-10-17', Statement: [{ Action: 'ec2:AttachVolume', Effect: 'Allow', Resource: [ { 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':volume/', { Ref: 'VolumeA92988D3', }, ], ], }, { 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':instance/*', ], ], }, ], }], }, }); }); describe('grantAttachVolume to any instance with encryption', () => { // This exact assertions here are only applicable when 'aws-kms:defaultKeyPolicies' feature flag is disabled. // See subsequent test case for the updated behaviour testLegacyBehavior('legacy', cdk.App, (app) => { // GIVEN const stack = new cdk.Stack(app); const role = new Role(stack, 'Role', { assumedBy: new AccountRootPrincipal() }); const encryptionKey = new kms.Key(stack, 'Key'); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), encrypted: true, encryptionKey, }); // WHEN volume.grantAttachVolume(role); // THEN Template.fromStack(stack).hasResourceProperties('AWS::KMS::Key', { KeyPolicy: { Statement: Match.arrayWith([{ Effect: 'Allow', Principal: { AWS: { 'Fn::GetAtt': [ 'Role1ABCC5F0', 'Arn', ], }, }, Action: 'kms:CreateGrant', Condition: { Bool: { 'kms:GrantIsForAWSResource': true, }, StringEquals: { 'kms:ViaService': { 'Fn::Join': [ '', [ 'ec2.', { Ref: 'AWS::Region', }, '.amazonaws.com', ], ], }, 'kms:GrantConstraintType': 'EncryptionContextSubset', }, }, Resource: '*', }]), }, }); }); testFutureBehavior('with future flag aws-kms:defaultKeyPolicies', { [cxapi.KMS_DEFAULT_KEY_POLICIES]: true }, cdk.App, (app) => { // GIVEN const stack = new cdk.Stack(app); const role = new Role(stack, 'Role', { assumedBy: new AccountRootPrincipal() }); const encryptionKey = new kms.Key(stack, 'Key'); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), encrypted: true, encryptionKey, }); // WHEN volume.grantAttachVolume(role); // THEN Template.fromStack(stack).hasResourceProperties('AWS::IAM::Policy', { PolicyDocument: { Statement: Match.arrayWith([{ Effect: 'Allow', Action: 'kms:CreateGrant', Condition: { Bool: { 'kms:GrantIsForAWSResource': true, }, StringEquals: { 'kms:ViaService': { 'Fn::Join': [ '', [ 'ec2.', { Ref: 'AWS::Region', }, '.amazonaws.com', ], ], }, 'kms:GrantConstraintType': 'EncryptionContextSubset', }, }, Resource: { 'Fn::GetAtt': [ 'Key961B73FD', 'Arn', ], }, }]), }, }); }); }); test('grantAttachVolume to any instance with KMS.fromKeyArn() encryption', () => { // GIVEN const stack = new cdk.Stack(); const role = new Role(stack, 'Role', { assumedBy: new AccountRootPrincipal() }); const kmsKey = new kms.Key(stack, 'Key'); // kmsKey policy is not strictly necessary for the test. // Demonstrating how to properly construct the Key. const principal = new kms.ViaServicePrincipal(`ec2.${stack.region}.amazonaws.com`, new AccountRootPrincipal()).withConditions({ StringEquals: { 'kms:CallerAccount': stack.account, }, }); kmsKey.grant(principal, // Describe & Generate are required to be able to create the CMK-encrypted Volume. 'kms:DescribeKey', 'kms:GenerateDataKeyWithoutPlainText', // ReEncrypt is required for when the CMK is rotated. 'kms:ReEncrypt*', ); const encryptionKey = kms.Key.fromKeyArn(stack, 'KeyArn', kmsKey.keyArn); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), encrypted: true, encryptionKey, }); // WHEN volume.grantAttachVolume(role); // THEN Template.fromStack(stack).hasResourceProperties('AWS::IAM::Policy', { PolicyDocument: { Version: '2012-10-17', Statement: Match.arrayWith([{ Effect: 'Allow', Action: 'kms:CreateGrant', Resource: { 'Fn::GetAtt': [ 'Key961B73FD', 'Arn', ], }, Condition: { Bool: { 'kms:GrantIsForAWSResource': true, }, StringEquals: { 'kms:ViaService': { 'Fn::Join': [ '', [ 'ec2.', { Ref: 'AWS::Region', }, '.amazonaws.com', ], ], }, 'kms:GrantConstraintType': 'EncryptionContextSubset', }, }, }]), }, }); }); test('grantAttachVolume to specific instances', () => { // GIVEN const stack = new cdk.Stack(); const role = new Role(stack, 'Role', { assumedBy: new AccountRootPrincipal() }); const vpc = new Vpc(stack, 'Vpc'); const instance1 = new Instance(stack, 'Instance1', { vpc, instanceType: new InstanceType('t3.small'), machineImage: MachineImage.latestAmazonLinux({ generation: AmazonLinuxGeneration.AMAZON_LINUX_2 }), availabilityZone: 'us-east-1a', }); const instance2 = new Instance(stack, 'Instance2', { vpc, instanceType: new InstanceType('t3.small'), machineImage: MachineImage.latestAmazonLinux({ generation: AmazonLinuxGeneration.AMAZON_LINUX_2 }), availabilityZone: 'us-east-1a', }); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); // WHEN volume.grantAttachVolume(role, [instance1, instance2]); // THEN Template.fromStack(stack).hasResourceProperties('AWS::IAM::Policy', { PolicyDocument: { Version: '2012-10-17', Statement: [{ Action: 'ec2:AttachVolume', Effect: 'Allow', Resource: Match.arrayWith([{ 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':instance/', { Ref: 'Instance14BC3991D', }, ], ], }, { 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':instance/', { Ref: 'Instance255F35265', }, ], ], }]), }], }, }); }); test('grantAttachVolume to instance self', () => { // GIVEN const stack = new cdk.Stack(); const vpc = new Vpc(stack, 'Vpc'); const instance = new Instance(stack, 'Instance', { vpc, instanceType: new InstanceType('t3.small'), machineImage: MachineImage.latestAmazonLinux({ generation: AmazonLinuxGeneration.AMAZON_LINUX_2 }), availabilityZone: 'us-east-1a', }); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); // WHEN volume.grantAttachVolumeByResourceTag(instance.grantPrincipal, [instance]); // THEN Template.fromStack(stack).hasResourceProperties('AWS::IAM::Policy', { PolicyDocument: { Version: '2012-10-17', Statement: [{ Action: 'ec2:AttachVolume', Effect: 'Allow', Resource: Match.arrayWith([{ 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':instance/*', ], ], }]), Condition: { 'ForAnyValue:StringEquals': { 'ec2:ResourceTag/VolumeGrantAttach-B2376B2BDA': 'b2376b2bda65cb40f83c290dd844c4aa', }, }, }], }, }); Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { Tags: [ { Key: 'VolumeGrantAttach-B2376B2BDA', Value: 'b2376b2bda65cb40f83c290dd844c4aa', }, ], }); Template.fromStack(stack).hasResourceProperties('AWS::EC2::Instance', { Tags: Match.arrayWith([{ Key: 'VolumeGrantAttach-B2376B2BDA', Value: 'b2376b2bda65cb40f83c290dd844c4aa', }]), }); }); test('grantAttachVolume to instance self with suffix', () => { // GIVEN const stack = new cdk.Stack(); const vpc = new Vpc(stack, 'Vpc'); const instance = new Instance(stack, 'Instance', { vpc, instanceType: new InstanceType('t3.small'), machineImage: MachineImage.latestAmazonLinux({ generation: AmazonLinuxGeneration.AMAZON_LINUX_2 }), availabilityZone: 'us-east-1a', }); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); // WHEN volume.grantAttachVolumeByResourceTag(instance.grantPrincipal, [instance], 'TestSuffix'); // THEN Template.fromStack(stack).hasResourceProperties('AWS::IAM::Policy', { PolicyDocument: { Version: '2012-10-17', Statement: [{ Action: 'ec2:AttachVolume', Effect: 'Allow', Resource: Match.arrayWith([{ 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':instance/*', ], ], }]), Condition: { 'ForAnyValue:StringEquals': { 'ec2:ResourceTag/VolumeGrantAttach-TestSuffix': 'b2376b2bda65cb40f83c290dd844c4aa', }, }, }], }, }); Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { Tags: [ { Key: 'VolumeGrantAttach-TestSuffix', Value: 'b2376b2bda65cb40f83c290dd844c4aa', }, ], }); Template.fromStack(stack).hasResourceProperties('AWS::EC2::Instance', { Tags: Match.arrayWith([{ Key: 'VolumeGrantAttach-TestSuffix', Value: 'b2376b2bda65cb40f83c290dd844c4aa', }]), }); }); test('grantDetachVolume to any instance', () => { // GIVEN const stack = new cdk.Stack(); const role = new Role(stack, 'Role', { assumedBy: new AccountRootPrincipal() }); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); // WHEN volume.grantDetachVolume(role); // THEN Template.fromStack(stack).hasResourceProperties('AWS::IAM::Policy', { PolicyDocument: { Version: '2012-10-17', Statement: [{ Action: 'ec2:DetachVolume', Effect: 'Allow', Resource: [ { 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':volume/', { Ref: 'VolumeA92988D3', }, ], ], }, { 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':instance/*', ], ], }, ], }], }, }); }); test('grantDetachVolume from specific instance', () => { // GIVEN const stack = new cdk.Stack(); const role = new Role(stack, 'Role', { assumedBy: new AccountRootPrincipal() }); const vpc = new Vpc(stack, 'Vpc'); const instance1 = new Instance(stack, 'Instance1', { vpc, instanceType: new InstanceType('t3.small'), machineImage: MachineImage.latestAmazonLinux({ generation: AmazonLinuxGeneration.AMAZON_LINUX_2 }), availabilityZone: 'us-east-1a', }); const instance2 = new Instance(stack, 'Instance2', { vpc, instanceType: new InstanceType('t3.small'), machineImage: MachineImage.latestAmazonLinux({ generation: AmazonLinuxGeneration.AMAZON_LINUX_2 }), availabilityZone: 'us-east-1a', }); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); // WHEN volume.grantDetachVolume(role, [instance1, instance2]); // THEN Template.fromStack(stack).hasResourceProperties('AWS::IAM::Policy', { PolicyDocument: { Version: '2012-10-17', Statement: [{ Action: 'ec2:DetachVolume', Effect: 'Allow', Resource: Match.arrayWith([{ 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':instance/', { Ref: 'Instance14BC3991D', }, ], ], }, { 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':instance/', { Ref: 'Instance255F35265', }, ], ], }]), }], }, }); }); test('grantDetachVolume from instance self', () => { // GIVEN const stack = new cdk.Stack(); const vpc = new Vpc(stack, 'Vpc'); const instance = new Instance(stack, 'Instance', { vpc, instanceType: new InstanceType('t3.small'), machineImage: MachineImage.latestAmazonLinux({ generation: AmazonLinuxGeneration.AMAZON_LINUX_2 }), availabilityZone: 'us-east-1a', }); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); // WHEN volume.grantDetachVolumeByResourceTag(instance.grantPrincipal, [instance]); // THEN Template.fromStack(stack).hasResourceProperties('AWS::IAM::Policy', { PolicyDocument: { Version: '2012-10-17', Statement: [{ Action: 'ec2:DetachVolume', Effect: 'Allow', Resource: Match.arrayWith([{ 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':instance/*', ], ], }]), Condition: { 'ForAnyValue:StringEquals': { 'ec2:ResourceTag/VolumeGrantDetach-B2376B2BDA': 'b2376b2bda65cb40f83c290dd844c4aa', }, }, }], }, }); Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { Tags: [ { Key: 'VolumeGrantDetach-B2376B2BDA', Value: 'b2376b2bda65cb40f83c290dd844c4aa', }, ], }); Template.fromStack(stack).hasResourceProperties('AWS::EC2::Instance', { Tags: Match.arrayWith([{ Key: 'VolumeGrantDetach-B2376B2BDA', Value: 'b2376b2bda65cb40f83c290dd844c4aa', }]), }); }); test('grantDetachVolume from instance self with suffix', () => { // GIVEN const stack = new cdk.Stack(); const vpc = new Vpc(stack, 'Vpc'); const instance = new Instance(stack, 'Instance', { vpc, instanceType: new InstanceType('t3.small'), machineImage: MachineImage.latestAmazonLinux({ generation: AmazonLinuxGeneration.AMAZON_LINUX_2 }), availabilityZone: 'us-east-1a', }); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); // WHEN volume.grantDetachVolumeByResourceTag(instance.grantPrincipal, [instance], 'TestSuffix'); // THEN Template.fromStack(stack).hasResourceProperties('AWS::IAM::Policy', { PolicyDocument: { Version: '2012-10-17', Statement: [{ Action: 'ec2:DetachVolume', Effect: 'Allow', Resource: Match.arrayWith([ { 'Fn::Join': [ '', [ 'arn:', { Ref: 'AWS::Partition', }, ':ec2:', { Ref: 'AWS::Region', }, ':', { Ref: 'AWS::AccountId', }, ':instance/*', ], ], }, ]), Condition: { 'ForAnyValue:StringEquals': { 'ec2:ResourceTag/VolumeGrantDetach-TestSuffix': 'b2376b2bda65cb40f83c290dd844c4aa', }, }, }], }, }); Template.fromStack(stack).hasResourceProperties('AWS::EC2::Volume', { Tags: [ { Key: 'VolumeGrantDetach-TestSuffix', Value: 'b2376b2bda65cb40f83c290dd844c4aa', }, ], }); Template.fromStack(stack).hasResourceProperties('AWS::EC2::Instance', { Tags: Match.arrayWith([{ Key: 'VolumeGrantDetach-TestSuffix', Value: 'b2376b2bda65cb40f83c290dd844c4aa', }]), }); }); test('validation fromVolumeAttributes', () => { // GIVEN let idx: number = 0; const stack = new cdk.Stack(); const volume = new Volume(stack, 'Volume', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); // THEN expect(() => { Volume.fromVolumeAttributes(stack, `Volume${idx++}`, { volumeId: volume.volumeId, availabilityZone: volume.availabilityZone, }); }).not.toThrow(); expect(() => { Volume.fromVolumeAttributes(stack, `Volume${idx++}`, { volumeId: 'vol-0123456789abcdefABCDEF', availabilityZone: 'us-east-1a', }); }).not.toThrow(); expect(() => { Volume.fromVolumeAttributes(stack, `Volume${idx++}`, { volumeId: ' vol-0123456789abcdefABCDEF', // leading invalid character(s) availabilityZone: 'us-east-1a', }); }).toThrow('`volumeId` does not match expected pattern. Expected `vol-<hexadecmial value>` (ex: `vol-05abe246af`) or a Token'); expect(() => { Volume.fromVolumeAttributes(stack, `Volume${idx++}`, { volumeId: 'vol-0123456789abcdefABCDEF ', // trailing invalid character(s) availabilityZone: 'us-east-1a', }); }).toThrow('`volumeId` does not match expected pattern. Expected `vol-<hexadecmial value>` (ex: `vol-05abe246af`) or a Token'); }); test('validation required props', () => { // GIVEN const stack = new cdk.Stack(); const key = new kms.Key(stack, 'Key'); let idx: number = 0; // THEN expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', }); }).toThrow('Must provide at least one of `size` or `snapshotId`'); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); }).not.toThrow(); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', snapshotId: 'snap-000000000', }); }).not.toThrow(); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), snapshotId: 'snap-000000000', }); }).not.toThrow(); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), encryptionKey: key, }); }).toThrow('`encrypted` must be true when providing an `encryptionKey`.'); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), encrypted: false, encryptionKey: key, }); }).toThrow('`encrypted` must be true when providing an `encryptionKey`.'); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), encrypted: true, encryptionKey: key, }); }).not.toThrow(); }); test('validation snapshotId', () => { // GIVEN const stack = new cdk.Stack(); const volume = new Volume(stack, 'ForToken', { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(8), }); let idx: number = 0; // THEN expect(() => { // Should not throw if we provide a Token for the snapshotId new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', snapshotId: volume.volumeId, }); }).not.toThrow(); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', snapshotId: 'snap-0123456789abcdefABCDEF', }); }).not.toThrow(); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', snapshotId: ' snap-1234', // leading extra character(s) }); }).toThrow('`snapshotId` does match expected pattern. Expected `snap-<hexadecmial value>` (ex: `snap-05abe246af`) or Token'); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', snapshotId: 'snap-1234 ', // trailing extra character(s) }); }).toThrow('`snapshotId` does match expected pattern. Expected `snap-<hexadecmial value>` (ex: `snap-05abe246af`) or Token'); }); test('validation iops', () => { // GIVEN const stack = new cdk.Stack(); let idx: number = 0; // THEN // Test: Type of volume for (const volumeType of [ EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, EbsDeviceVolumeType.PROVISIONED_IOPS_SSD_IO2, EbsDeviceVolumeType.GENERAL_PURPOSE_SSD_GP3, ]) { expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), iops: 3000, volumeType, }); }).not.toThrow(); } for (const volumeType of [ EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, EbsDeviceVolumeType.PROVISIONED_IOPS_SSD_IO2, ]) { expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), volumeType, }); }).toThrow(/`iops` must be specified if the `volumeType` is/); } for (const volumeType of [ EbsDeviceVolumeType.GENERAL_PURPOSE_SSD, EbsDeviceVolumeType.THROUGHPUT_OPTIMIZED_HDD, EbsDeviceVolumeType.COLD_HDD, EbsDeviceVolumeType.MAGNETIC, ]) { expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), iops: 100, volumeType, }); }).toThrow(/`iops` may only be specified if the `volumeType` is/); } // Test: iops in range for (const testData of [ [EbsDeviceVolumeType.GENERAL_PURPOSE_SSD_GP3, 3000, 16000], [EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, 100, 64000], [EbsDeviceVolumeType.PROVISIONED_IOPS_SSD_IO2, 100, 64000], ]) { const volumeType = testData[0] as EbsDeviceVolumeType; const min = testData[1] as number; const max = testData[2] as number; expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.tebibytes(10), volumeType, iops: min - 1, }); }).toThrow(/iops must be between/); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.tebibytes(10), volumeType, iops: min, }); }).not.toThrow(); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.tebibytes(10), volumeType, iops: max, }); }).not.toThrow(); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.tebibytes(10), volumeType, iops: max + 1, }); }).toThrow(/iops must be between/); } // Test: iops ratio for (const testData of [ [EbsDeviceVolumeType.GENERAL_PURPOSE_SSD_GP3, 500], [EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, 50], [EbsDeviceVolumeType.PROVISIONED_IOPS_SSD_IO2, 500], ]) { const volumeType = testData[0] as EbsDeviceVolumeType; const max = testData[1] as number; const size = 10; expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(size), volumeType, iops: max * size, }); }).not.toThrow(); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(size), volumeType, iops: max * size + 1, }); }).toThrow(/iops has a maximum ratio of/); } }); test('validation multi-attach', () => { // GIVEN const stack = new cdk.Stack(); let idx: number = 0; // THEN for (const volumeType of [ EbsDeviceVolumeType.GENERAL_PURPOSE_SSD, EbsDeviceVolumeType.GENERAL_PURPOSE_SSD_GP3, EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, EbsDeviceVolumeType.PROVISIONED_IOPS_SSD_IO2, EbsDeviceVolumeType.THROUGHPUT_OPTIMIZED_HDD, EbsDeviceVolumeType.COLD_HDD, EbsDeviceVolumeType.MAGNETIC, ]) { if ( [ EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, EbsDeviceVolumeType.PROVISIONED_IOPS_SSD_IO2, ].includes(volumeType) ) { expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), enableMultiAttach: true, volumeType, iops: 100, }); }).not.toThrow(); } else { expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(500), enableMultiAttach: true, volumeType, }); }).toThrow(/multi-attach is supported exclusively/); } } }); test('validation size in range', () => { // GIVEN const stack = new cdk.Stack(); let idx: number = 0; // THEN for (const testData of [ [EbsDeviceVolumeType.GENERAL_PURPOSE_SSD, 1, 16384], [EbsDeviceVolumeType.GENERAL_PURPOSE_SSD_GP3, 1, 16384], [EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, 4, 16384], [EbsDeviceVolumeType.PROVISIONED_IOPS_SSD_IO2, 4, 16384], [EbsDeviceVolumeType.THROUGHPUT_OPTIMIZED_HDD, 125, 16384], [EbsDeviceVolumeType.COLD_HDD, 125, 16384], [EbsDeviceVolumeType.MAGNETIC, 1, 1024], ]) { const volumeType = testData[0] as EbsDeviceVolumeType; const min = testData[1] as number; const max = testData[2] as number; const iops = [ EbsDeviceVolumeType.PROVISIONED_IOPS_SSD, EbsDeviceVolumeType.PROVISIONED_IOPS_SSD_IO2, ].includes(volumeType) ? 100 : null; expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(min - 1), volumeType, ...iops ? { iops } : {}, }); }).toThrow(/volumes must be between/); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(min), volumeType, ...iops ? { iops } : {}, }); }).not.toThrow(); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(max), volumeType, ...iops ? { iops } : {}, }); }).not.toThrow(); expect(() => { new Volume(stack, `Volume${idx++}`, { availabilityZone: 'us-east-1a', size: cdk.Size.gibibytes(max + 1), volumeType, ...iops ? { iops } : {}, }); }).toThrow(/volumes must be between/); } }); });
the_stack
import * as fs from 'fs-extra'; import * as os from 'os'; import * as path from 'path'; import * as plist from 'plist'; import compareVersion from 'compare-version'; import { debugLog, debugWarn, getAppContentsPath, execFileAsync, validateOptsApp, validateOptsPlatform, walkAsync } from './util'; import { Identity, findIdentities } from './util-identities'; import { preEmbedProvisioningProfile, getProvisioningProfile } from './util-provisioning-profiles'; import { preAutoEntitlements } from './util-entitlements'; import { ElectronMacPlatform, PerFileSignOptions, SignOptions, ValidatedSignOptions } from './types'; const pkgVersion: string = require('../../package.json').version; const osRelease = os.release(); /** * This function returns a promise validating opts.binaries, the additional binaries to be signed along with the discovered enclosed components. */ async function validateOptsBinaries (opts: SignOptions) { if (opts.binaries) { if (!Array.isArray(opts.binaries)) { throw new Error('Additional binaries should be an Array.'); } // TODO: Presence check for binary files, reject if any does not exist } } function validateOptsIgnore (ignore: SignOptions['ignore']): ValidatedSignOptions['ignore'] { if (ignore && !(ignore instanceof Array)) { return [ignore]; } } /** * This function returns a promise validating all options passed in opts. */ async function validateSignOpts (opts: SignOptions): Promise<Readonly<ValidatedSignOptions>> { await validateOptsBinaries(opts); await validateOptsApp(opts); if (opts.provisioningProfile && typeof opts.provisioningProfile !== 'string') { throw new Error('Path to provisioning profile should be a string.'); } if (opts.type && opts.type !== 'development' && opts.type !== 'distribution') { throw new Error('Type must be either `development` or `distribution`.'); } const platform = await validateOptsPlatform(opts); const cloned: ValidatedSignOptions = { ...opts, ignore: validateOptsIgnore(opts.ignore), type: opts.type || 'distribution', platform }; return cloned; } /** * This function returns a promise verifying the code sign of application bundle. */ async function verifySignApplication (opts: ValidatedSignOptions) { // Verify with codesign debugLog('Verifying application bundle with codesign...'); await execFileAsync( 'codesign', ['--verify', '--deep'].concat( opts.strictVerify !== false && compareVersion(osRelease, '15.0.0') >= 0 // Strict flag since darwin 15.0.0 --> OS X 10.11.0 El Capitan ? [ '--strict' + (opts.strictVerify ? '=' + opts.strictVerify // Array should be converted to a comma separated string : '') ] : [], ['--verbose=2', opts.app] ) ); } function defaultOptionsForFile (filePath: string, platform: ElectronMacPlatform) { const entitlementsFolder = path.resolve(__dirname, '..', '..', 'entitlements'); let entitlementsFile: string; if (platform === 'darwin') { // Default Entitlements // c.f. https://source.chromium.org/chromium/chromium/src/+/main:chrome/app/app-entitlements.plist // Also include JIT for main process V8 entitlementsFile = path.resolve(entitlementsFolder, 'default.darwin.plist'); // Plugin helper // c.f. https://source.chromium.org/chromium/chromium/src/+/main:chrome/app/helper-plugin-entitlements.plist if (filePath.includes('(Plugin).app')) { entitlementsFile = path.resolve(entitlementsFolder, 'default.darwin.plugin.plist'); // GPU Helper // c.f. https://source.chromium.org/chromium/chromium/src/+/main:chrome/app/helper-gpu-entitlements.plist } else if (filePath.includes('(GPU).app')) { entitlementsFile = path.resolve(entitlementsFolder, 'default.darwin.gpu.plist'); // Renderer Helper // c.f. https://source.chromium.org/chromium/chromium/src/+/main:chrome/app/helper-renderer-entitlements.plist } else if (filePath.includes('(Renderer).app')) { entitlementsFile = path.resolve(entitlementsFolder, 'default.darwin.renderer.plist'); } } else { // Default entitlements // TODO: Can these be more scoped like the non-mas variant? entitlementsFile = path.resolve(entitlementsFolder, 'default.mas.plist'); // If it is not the top level app bundle, we sign with inherit if (filePath.includes('.app/')) { entitlementsFile = path.resolve(entitlementsFolder, 'default.mas.child.plist'); } } return { entitlements: entitlementsFile, hardenedRuntime: true, requirements: undefined as string | undefined, signatureFlags: undefined as string | string[] | undefined, timestamp: undefined as string | undefined }; } async function mergeOptionsForFile ( opts: PerFileSignOptions | null, defaults: ReturnType<typeof defaultOptionsForFile> ) { const mergedPerFileOptions = { ...defaults }; if (opts) { if (opts.entitlements !== undefined) { if (Array.isArray(opts.entitlements)) { const entitlements = opts.entitlements.reduce<Record<string, any>>((dict, entitlementKey) => ({ ...dict, [entitlementKey]: true }), {}); const dir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'tmp-entitlements-')); const entitlementsPath = path.join(dir, 'entitlements.plist'); await fs.writeFile(entitlementsPath, plist.build(entitlements), 'utf8'); opts.entitlements = entitlementsPath; } mergedPerFileOptions.entitlements = opts.entitlements; } if (opts.hardenedRuntime !== undefined) { mergedPerFileOptions.hardenedRuntime = opts.hardenedRuntime; } if (opts.requirements !== undefined) mergedPerFileOptions.requirements = opts.requirements; if (opts.signatureFlags !== undefined) { mergedPerFileOptions.signatureFlags = opts.signatureFlags; } if (opts.timestamp !== undefined) mergedPerFileOptions.timestamp = opts.timestamp; } return mergedPerFileOptions; } /** * This function returns a promise codesigning only. */ async function signApplication (opts: ValidatedSignOptions, identity: Identity) { function shouldIgnoreFilePath (filePath: string) { if (opts.ignore) { return opts.ignore.some(function (ignore) { if (typeof ignore === 'function') { return ignore(filePath); } return filePath.match(ignore); }); } return false; } const children = await walkAsync(getAppContentsPath(opts)); if (opts.binaries) children.push(...opts.binaries); const args = ['--sign', identity.hash || identity.name, '--force']; if (opts.keychain) { args.push('--keychain', opts.keychain); } /** * Sort the child paths by how deep they are in the file tree. Some arcane apple * logic expects the deeper files to be signed first otherwise strange errors get * thrown our way */ children.sort((a, b) => { const aDepth = a.split(path.sep).length; const bDepth = b.split(path.sep).length; return bDepth - aDepth; }); for (const filePath of [...children, opts.app]) { if (shouldIgnoreFilePath(filePath)) { debugLog('Skipped... ' + filePath); continue; } const perFileOptions = await mergeOptionsForFile( opts.optionsForFile ? opts.optionsForFile(filePath) : null, defaultOptionsForFile(filePath, opts.platform) ); if (opts.preAutoEntitlements === false) { debugWarn('Pre-sign operation disabled for entitlements automation.'); } else { debugLog( 'Pre-sign operation enabled for entitlements automation with versions >= `1.1.1`:', '\n', '* Disable by setting `pre-auto-entitlements` to `false`.' ); if (!opts.version || compareVersion(opts.version, '1.1.1') >= 0) { // Enable Mac App Store sandboxing without using temporary-exception, introduced in Electron v1.1.1. Relates to electron#5601 const newEntitlements = await preAutoEntitlements(opts, perFileOptions, { identity, provisioningProfile: opts.provisioningProfile ? await getProvisioningProfile(opts.provisioningProfile, opts.keychain) : undefined }); // preAutoEntitlements may provide us new entitlements, if so we update our options // and ensure that entitlements-loginhelper has a correct default value if (newEntitlements) { perFileOptions.entitlements = newEntitlements; } } } debugLog('Signing... ' + filePath); if (perFileOptions.requirements) { args.push('--requirements', perFileOptions.requirements); } if (perFileOptions.timestamp) { args.push('--timestamp=' + perFileOptions.timestamp); } else { args.push('--timestamp'); } let optionsArguments: string[] = []; if (perFileOptions.signatureFlags) { if (Array.isArray(perFileOptions.signatureFlags)) { optionsArguments.push(...perFileOptions.signatureFlags); } else { const flags = perFileOptions.signatureFlags.split(',').map(function (flag) { return flag.trim(); }); optionsArguments.push(...flags); } } if (perFileOptions.hardenedRuntime || optionsArguments.includes('runtime')) { // Hardened runtime since darwin 17.7.0 --> macOS 10.13.6 if (compareVersion(osRelease, '17.7.0') >= 0) { optionsArguments.push('runtime'); } else { // Remove runtime if passed in with --signature-flags debugLog( 'Not enabling hardened runtime, current macOS version too low, requires 10.13.6 and higher' ); optionsArguments = optionsArguments.filter((arg) => { return arg !== 'runtime'; }); } } if (optionsArguments.length) { args.push('--options', [...new Set(optionsArguments)].join(',')); } await execFileAsync( 'codesign', args.concat('--entitlements', perFileOptions.entitlements, filePath) ); } // Verify code sign debugLog('Verifying...'); await verifySignApplication(opts); debugLog('Verified.'); // Check entitlements if applicable debugLog('Displaying entitlements...'); const result = await execFileAsync('codesign', [ '--display', '--entitlements', ':-', // Write to standard output and strip off the blob header opts.app ]); debugLog('Entitlements:', '\n', result); } /** * This function returns a promise signing the application. */ export async function signApp (_opts: SignOptions) { debugLog('electron-osx-sign@%s', pkgVersion); const validatedOpts = await validateSignOpts(_opts); let identities: Identity[] = []; let identityInUse: Identity | null = null; // Determine identity for signing if (validatedOpts.identity) { debugLog('`identity` passed in arguments.'); if (validatedOpts.identityValidation === false) { identityInUse = new Identity(validatedOpts.identity); } else { identities = await findIdentities(validatedOpts.keychain || null, validatedOpts.identity); } } else { debugWarn('No `identity` passed in arguments...'); if (validatedOpts.platform === 'mas') { if (validatedOpts.type === 'distribution') { debugLog( 'Finding `3rd Party Mac Developer Application` certificate for signing app distribution in the Mac App Store...' ); identities = await findIdentities( validatedOpts.keychain || null, '3rd Party Mac Developer Application:' ); } else { debugLog( 'Finding `Mac Developer` certificate for signing app in development for the Mac App Store signing...' ); identities = await findIdentities(validatedOpts.keychain || null, 'Mac Developer:'); } } else { debugLog( 'Finding `Developer ID Application` certificate for distribution outside the Mac App Store...' ); identities = await findIdentities( validatedOpts.keychain || null, 'Developer ID Application:' ); } } if (!identityInUse) { if (identities.length > 0) { // Identity(/ies) found if (identities.length > 1) { debugWarn('Multiple identities found, will use the first discovered.'); } else { debugLog('Found 1 identity.'); } identityInUse = identities[0]; } else { // No identity found throw new Error('No identity found for signing.'); } } // Pre-sign operations if (validatedOpts.preEmbedProvisioningProfile === false) { debugWarn( 'Pre-sign operation disabled for provisioning profile embedding:', '\n', '* Enable by setting `pre-embed-provisioning-profile` to `true`.' ); } else { debugLog( 'Pre-sign operation enabled for provisioning profile:', '\n', '* Disable by setting `pre-embed-provisioning-profile` to `false`.' ); await preEmbedProvisioningProfile( validatedOpts, validatedOpts.provisioningProfile ? await getProvisioningProfile(validatedOpts.provisioningProfile, validatedOpts.keychain) : null ); } debugLog( 'Signing application...', '\n', '> Application:', validatedOpts.app, '\n', '> Platform:', validatedOpts.platform, '\n', '> Additional binaries:', validatedOpts.binaries, '\n', '> Identity:', validatedOpts.identity ); await signApplication(validatedOpts, identityInUse); // Post-sign operations debugLog('Application signed.'); } /** * This function is a legacy callback implementation. * * @deprecated Please use the promise based "signApp" method */ export const sign = (opts: SignOptions, cb?: (error?: Error) => void) => { signApp(opts) .then(() => { debugLog('Application signed: ' + opts.app); if (cb) cb(); }) .catch((err) => { if (err.message) debugLog(err.message); else if (err.stack) debugLog(err.stack); else debugLog(err); if (cb) cb(err); }); };
the_stack
export interface CreateVocabLibResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 人员检索统计结果 */ export interface FaceIdentifyStatistic { /** * 持续时间 */ Duration?: number; /** * 结束时间 */ EndTs?: number; /** * 人员唯一标识符 */ PersonId?: string; /** * 相似度 */ Similarity?: number; /** * 开始时间 */ StartTs?: number; } /** * 单词出现的那个句子的起始时间和结束时间信息 */ export interface DetailInfo { /** * 单词出现在该音频中的那个句子的时间戳,出现了几次, 就返回对应次数的起始和结束时间戳 */ Value?: Array<WordTimePair>; /** * 词汇库中的单词 */ Keyword?: string; } /** * SubmitImageTaskPlus请求参数结构体 */ export interface SubmitImageTaskPlusRequest { /** * 输入分析对象内容,输入数据格式参考FileType参数释义 */ FileContent: Array<string>; /** * 输入分析对象类型,picture:二进制图片的 base64 编码字符串,picture_url:图片地址,vod_url:视频地址,live_url:直播地址 */ FileType: string; /** * 任务控制选项 */ Functions?: ImageTaskFunction; /** * 光照标准列表 */ LightStandardSet?: Array<LightStandard>; /** * 抽帧的时间间隔,单位毫秒,默认值1000,保留字段,当前不支持填写。 */ FrameInterval?: number; /** * 查询人员库列表 */ LibrarySet?: Array<string>; /** * 视频评估时间,单位秒,点播场景默认值为2小时(无法探测长度时)或完整视频,直播场景默认值为10分钟或直播提前结束 */ MaxVideoDuration?: number; /** * 人脸识别中的相似度阈值,默认值为0.89,保留字段,当前不支持填写。 */ SimThreshold?: number; } /** * FacePoseResult */ export interface FacePoseResult { /** * 正脸或侧脸的消息 */ Direction?: string; /** * 围绕Z轴旋转角度,俯仰角 */ Pitch?: number; /** * 围绕X轴旋转角度,翻滚角 */ Roll?: number; /** * 围绕Y轴旋转角度,偏航角 */ Yaw?: number; } /** * DeleteVocabLib请求参数结构体 */ export interface DeleteVocabLibRequest { /** * 词汇库名称 */ VocabLibName: string; } /** * DescribeAITaskResult请求参数结构体 */ export interface DescribeAITaskResultRequest { /** * 任务唯一标识符。在URL方式时提交请求后会返回一个任务标识符,后续查询该url的结果时使用这个标识符进行查询。 */ TaskId: number; /** * 限制数目 */ Limit?: number; /** * 偏移量 */ Offset?: number; } /** * 时长统计结果 */ export interface ActionDurationStatistic { /** * 时长 */ Duration: number; /** * 名称 */ Name: string; } /** * CreatePerson请求参数结构体 */ export interface CreatePersonRequest { /** * 人员库唯一标识符 */ LibraryId: string; /** * 人员名称 */ PersonName: string; /** * 图片数据 base64 字符串,与 Urls 参数选择一个输入 */ Images?: Array<string>; /** * 人员工作号码 */ JobNumber?: string; /** * 人员邮箱 */ Mail?: string; /** * 人员性别,0:未知 1:男性,2:女性 */ Male?: number; /** * 自定义人员 ID,注意不能使用 tci_person_ 前缀 */ PersonId?: string; /** * 人员电话号码 */ PhoneNumber?: string; /** * 人员学生号码 */ StudentNumber?: string; /** * 图片下载地址,与 Images 参数选择一个输入 */ Urls?: Array<string>; } /** * 光照统计结果 */ export interface LightStatistic { /** * 各个时间点的光线值 */ LightDistribution?: Array<LightDistributionStatistic>; /** * 光照程度比例统计结果 */ LightLevelRatio?: Array<LightLevelRatioStatistic>; } /** * DescribePersons请求参数结构体 */ export interface DescribePersonsRequest { /** * 人员库唯一标识符 */ LibraryId: string; /** * 限制数目 */ Limit?: number; /** * 偏移量 */ Offset?: number; } /** * SubmitOneByOneClassTask请求参数结构体 */ export interface SubmitOneByOneClassTaskRequest { /** * 输入分析对象内容,输入数据格式参考FileType参数释义 */ FileContent: string; /** * 输入分析对象类型,picture_url:图片地址,vod_url:视频地址,live_url:直播地址,picture: 图片二进制数据的BASE64编码 */ FileType: string; /** * 音频源的语言,默认0为英文,1为中文 */ Lang?: number; /** * 查询人员库列表,可填写学生的注册照所在人员库 */ LibrarySet?: Array<string>; /** * 视频评估时间,单位秒,点播场景默认值为2小时(无法探测长度时)或完整视频,直播场景默认值为10分钟或直播提前结束 */ MaxVideoDuration?: number; /** * 识别词库名列表,这些词汇库用来维护关键词,评估学生对这些关键词的使用情况 */ VocabLibNameList?: Array<string>; /** * 语音编码类型 1:pcm,当FileType为vod_url或live_url时为必填 */ VoiceEncodeType?: number; /** * 语音文件类型10:视频(三种音频格式目前仅支持16k采样率16bit),当FileType为vod_url或live_url时为必填 */ VoiceFileType?: number; } /** * CreateVocab返回参数结构体 */ export interface CreateVocabResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * SubmitConversationTask返回参数结构体 */ export interface SubmitConversationTaskResponse { /** * 查询结果时指名的jobid。在URL方式时提交请求后会返回一个jobid,后续查询该url的结果时使用这个jobid进行查询。 */ JobId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DescribeVocabLib请求参数结构体 */ export declare type DescribeVocabLibRequest = null; /** * 动作行为子类型 */ export interface ActionType { /** * 置信度 */ Confidence?: number; /** * 动作类别 */ Type?: string; } /** * DescribePersons返回参数结构体 */ export interface DescribePersonsResponse { /** * 人员列表 */ PersonSet?: Array<Person>; /** * 人员总数 */ TotalCount?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 检索配置开关项 */ export interface HLFunction { /** * 是否开启人脸检测 */ EnableFaceDetect?: boolean; /** * 是否开启表情识别 */ EnableFaceExpression?: boolean; /** * 是否开启人脸检索 */ EnableFaceIdent?: boolean; /** * 是否开启视频集锦-老师关键字识别 */ EnableKeywordWonderfulTime?: boolean; /** * 是否开启视频集锦-微笑识别 */ EnableSmileWonderfulTime?: boolean; } /** * CreateFace请求参数结构体 */ export interface CreateFaceRequest { /** * 人员唯一标识符 */ PersonId: string; /** * 图片数据 base64 字符串,与 Urls 参数选择一个输入 */ Images?: Array<string>; /** * 人员库唯一标识符 */ LibraryId?: string; /** * 图片下载地址,与 Images 参数选择一个输入 */ Urls?: Array<string>; } /** * DescribeImageTaskStatistic请求参数结构体 */ export interface DescribeImageTaskStatisticRequest { /** * 图像任务标识符 */ JobId: number; } /** * SubmitCheckAttendanceTaskPlus返回参数结构体 */ export interface SubmitCheckAttendanceTaskPlusResponse { /** * 任务标识符 */ JobId?: number; /** * 没有注册的人的ID列表 */ NotRegisteredSet?: string; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * SubmitImageTask请求参数结构体 */ export interface SubmitImageTaskRequest { /** * 输入分析对象内容,输入数据格式参考FileType参数释义 */ FileContent: string; /** * 输入分析对象类型,picture:二进制图片的 base64 编码字符串,picture_url:图片地址,vod_url:视频地址,live_url:直播地址 */ FileType: string; /** * 任务控制选项 */ Functions?: ImageTaskFunction; /** * 光照标准列表 */ LightStandardSet?: Array<LightStandard>; /** * 结果更新回调地址。 */ EventsCallBack?: string; /** * 抽帧的时间间隔,单位毫秒,默认值1000,保留字段,当前不支持填写。 */ FrameInterval?: number; /** * 查询人员库列表 */ LibrarySet?: Array<string>; /** * 视频评估时间,单位秒,点播场景默认值为2小时(无法探测长度时)或完整视频,直播场景默认值为10分钟或直播提前结束 */ MaxVideoDuration?: number; /** * 人脸识别中的相似度阈值,默认值为0.89,保留字段,当前不支持填写。 */ SimThreshold?: number; } /** * 单词出现的那个句子的起始时间和结束时间信息 */ export interface WordTimePair { /** * 单词出现的那个句子的起始时间 */ Mbtm?: number; /** * 单词出现的那个句子的结束时间 */ Metm?: number; } /** * ModifyLibrary请求参数结构体 */ export interface ModifyLibraryRequest { /** * 人员库唯一标识符 */ LibraryId: string; /** * 人员库名称 */ LibraryName: string; } /** * 人脸操作信息 */ export interface FaceInfo { /** * 人脸操作错误码 */ ErrorCode?: string; /** * 人脸操作结果信息 */ ErrorMsg?: string; /** * 人脸唯一标识符 */ FaceId?: string; /** * 人脸保存地址 */ FaceUrl?: string; /** * 人员唯一标识 */ PersonId?: string; } /** * 光照强度统计结果 */ export interface LightDistributionStatistic { /** * 时间点 */ Time: number; /** * 光线值 */ Value: number; } /** * DeleteLibrary返回参数结构体 */ export interface DeleteLibraryResponse { /** * 人员库唯一标识符 */ LibraryId?: string; /** * 人员库名称 */ LibraryName?: string; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * SubmitFullBodyClassTask请求参数结构体 */ export interface SubmitFullBodyClassTaskRequest { /** * 输入分析对象内容,输入数据格式参考FileType参数释义 */ FileContent: string; /** * 输入分析对象类型,picture_url:图片地址,vod_url:视频地址,live_url:直播地址,picture: 图片二进制数据的BASE64编码 */ FileType: string; /** * 音频源的语言,默认0为英文,1为中文 */ Lang?: number; /** * 查询人员库列表,可填写老师的注册照所在人员库 */ LibrarySet?: Array<string>; /** * 视频评估时间,单位秒,点播场景默认值为2小时(无法探测长度时)或完整视频,直播场景默认值为10分钟或直播提前结束 */ MaxVideoDuration?: number; /** * 识别词库名列表,这些词汇库用来维护关键词,评估老师授课过程中,对这些关键词的使用情况 */ VocabLibNameList?: Array<string>; /** * 语音编码类型 1:pcm,当FileType为vod_url或live_url时为必填 */ VoiceEncodeType?: number; /** * 语音文件类型 10:视频(三种音频格式目前仅支持16k采样率16bit),当FileType为vod_url或live_url时为必填 */ VoiceFileType?: number; } /** * LightResult */ export interface LightResult { /** * 光照程度,参考提交任务时的LightStandard指定的Name参数 */ LightLevel?: string; /** * 光照亮度 */ LightValue?: number; } /** * DescribeAttendanceResult请求参数结构体 */ export interface DescribeAttendanceResultRequest { /** * 任务唯一标识符 */ JobId: number; } /** * SubmitOpenClassTask返回参数结构体 */ export interface SubmitOpenClassTaskResponse { /** * 图像任务直接返回结果,包括:FaceAttr、 FaceExpression、 FaceIdentify、 FaceInfo、 FacePose、 StudentBodyMovement、TimeInfo */ ImageResults?: Array<ImageTaskResult>; /** * 任务ID */ TaskId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 含有语速的句子信息 */ export interface WholeTextItem { /** * 当前句子的信息 */ TextItem?: TextItem; /** * Vad的平均音量 */ AvgVolume?: number; /** * Vad的最大音量 */ MaxVolume?: number; /** * Vad的最小音量 */ MinVolume?: number; /** * 当前句子的语速 */ Speed?: number; } /** * SubmitCheckAttendanceTaskPlus请求参数结构体 */ export interface SubmitCheckAttendanceTaskPlusRequest { /** * 输入数据 */ FileContent: Array<string>; /** * 视频流类型,vod_url表示点播URL,live_url表示直播URL,默认vod_url */ FileType: string; /** * 人员库 ID列表 */ LibraryIds: Array<string>; /** * 确定出勤阈值;默认为0.92 */ AttendanceThreshold?: number; /** * 是否开启陌生人模式,陌生人模式是指在任务中发现的非注册人脸库中的人脸也返回相关统计信息,默认不开启 */ EnableStranger?: boolean; /** * 考勤结束时间(到视频的第几秒结束考勤),单位秒;默认为900 对于直播场景,使用绝对时间戳,单位秒,默认当前时间往后12小时 */ EndTime?: number; /** * 通知回调地址,要求方法为post,application/json格式 */ NoticeUrl?: string; /** * 考勤开始时间(从视频的第几秒开始考勤),单位秒;默认为0 对于直播场景,使用绝对时间戳,单位秒,默认当前时间 */ StartTime?: number; /** * 识别阈值;默认为0.8 */ Threshold?: number; } /** * DescribeConversationTask返回参数结构体 */ export interface DescribeConversationTaskResponse { /** * 返回的当前音频的统计信息。当进度为100时返回。 */ AsrStat?: ASRStat; /** * 返回当前音频流的详细信息,如果是流模式,返回的是对应流的详细信息,如果是 URL模式,返回的是查询的那一段seq对应的音频的详细信息。 */ Texts?: Array<WholeTextItem>; /** * 返回词汇库中的单词出现的详细时间信息。 */ VocabAnalysisDetailInfo?: Array<VocabDetailInfomation>; /** * 返回词汇库中的单词出现的次数信息。 */ VocabAnalysisStatInfo?: Array<VocabStatInfomation>; /** * 整个音频流的全部文本 */ AllTexts?: string; /** * 音频任务唯一id。在URL方式时提交请求后会返回一个jobid,后续查询该url的结果时使用这个jobid进行查询。 */ JobId?: number; /** * 返回的当前处理进度。 */ Progress?: number; /** * 结果总数 */ TotalCount?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * CreatePerson返回参数结构体 */ export interface CreatePersonResponse { /** * 人脸操作结果信息 */ FaceInfoSet?: Array<FaceInfo>; /** * 人员库唯一标识符 */ LibraryId?: string; /** * 人员唯一标识符 */ PersonId?: string; /** * 人员名称 */ PersonName?: string; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DescribeImageTask请求参数结构体 */ export interface DescribeImageTaskRequest { /** * 任务标识符 */ JobId: number; /** * 限制数目 */ Limit?: number; /** * 偏移量 */ Offset?: number; } /** * 精彩集锦信息 */ export interface HighlightsInfomation { /** * 专注的起始与终止时间信息。 */ Concentration?: Array<TimeType>; /** * 微笑的起始与终止时间信息。 */ Smile?: Array<TimeType>; /** * 高光集锦视频地址,保存剪辑好的视频地址。 */ HighlightsUrl?: string; /** * 片段中识别出来的人脸ID。 */ PersonId?: string; } /** * 起止时间 */ export interface TimeType { /** * 结束时间戳 */ EndTime?: number; /** * 起始时间戳 */ StartTime?: number; } /** * SubmitTraditionalClassTask请求参数结构体 */ export interface SubmitTraditionalClassTaskRequest { /** * 输入分析对象内容,输入数据格式参考FileType参数释义 */ FileContent: string; /** * 输入分析对象类型,picture_url:图片地址,vod_url:视频地址,live_url:直播地址,picture:图片二进制数据的BASE64编码 */ FileType: string; /** * 查询人员库列表,可填写学生们的注册照所在人员库 */ LibrarySet?: Array<string>; /** * 视频评估时间,单位秒,点播场景默认值为2小时(无法探测长度时)或完整视频,直播场景默认值为10分钟或直播提前结束 */ MaxVideoDuration?: number; } /** * DeleteFace返回参数结构体 */ export interface DeleteFaceResponse { /** * 人脸操作结果 */ FaceInfoSet?: Array<FaceInfo>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * SubmitDoubleVideoHighlights请求参数结构体 */ export interface SubmitDoubleVideoHighlightsRequest { /** * 学生视频url */ FileContent: string; /** * 需要检索的人脸合集库,不在库中的人脸将不参与精彩集锦;目前仅支持输入一个人脸库。 */ LibIds: Array<string>; /** * 详细功能开关配置项 */ Functions?: DoubleVideoFunction; /** * 需要匹配的人员信息列表。 */ PersonInfoList?: Array<PersonInfo>; /** * 视频处理的抽帧间隔,单位毫秒。建议留空。 */ FrameInterval?: number; /** * 旧版本需要匹配的人员信息列表。 */ PersonIds?: Array<string>; /** * 人脸检索的相似度阈值,默认值0.89。建议留空。 */ SimThreshold?: number; /** * 老师视频url */ TeacherFileContent?: string; } /** * 表情比例统计 */ export interface ExpressRatioStatistic { /** * 出现次数 */ Count?: number; /** * 表情 */ Express?: string; /** * 该表情时长占所有表情时长的比例 */ Ratio?: number; /** * 该表情时长占视频总时长的比例 */ RatioUseDuration?: number; } /** * TransmitAudioStream返回参数结构体 */ export interface TransmitAudioStreamResponse { /** * 返回的当前音频的统计信息。当进度为100时返回。 */ AsrStat?: ASRStat; /** * 返回当前音频流的详细信息,如果是流模式,返回的是对应流的详细信息,如果是 URL模式,返回的是查询的那一段seq对应的音频的详细信息。 */ Texts?: Array<WholeTextItem>; /** * 返回词汇库中的单词出现的详细时间信息。 */ VocabAnalysisDetailInfo?: Array<VocabDetailInfomation>; /** * 返回词汇库中的单词出现的次数信息。 */ VocabAnalysisStatInfo?: Array<VocabStatInfomation>; /** * 音频全部文本。 */ AllTexts?: string; /** * 临时保存的音频链接 */ AudioUrl?: string; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DeletePerson请求参数结构体 */ export interface DeletePersonRequest { /** * 人员库唯一标识符 */ LibraryId: string; /** * 人员唯一标识符 */ PersonId: string; } /** * DescribeLibraries返回参数结构体 */ export interface DescribeLibrariesResponse { /** * 人员库列表 */ LibrarySet?: Array<Library>; /** * 人员库总数量 */ TotalCount?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DeleteFace请求参数结构体 */ export interface DeleteFaceRequest { /** * 人脸标识符数组 */ FaceIdSet: Array<string>; /** * 人员唯一标识符 */ PersonId: string; /** * 人员库唯一标识符 */ LibraryId?: string; } /** * DeleteLibrary请求参数结构体 */ export interface DeleteLibraryRequest { /** * 人员库唯一标识符 */ LibraryId: string; } /** * DescribePerson返回参数结构体 */ export interface DescribePersonResponse { /** * 人员人脸列表 */ FaceSet?: Array<Face>; /** * 创建时间 */ CreateTime?: string; /** * 工作号码 */ JobNumber?: string; /** * 人员库唯一标识符 */ LibraryId?: string; /** * 邮箱 */ Mail?: string; /** * 性别 */ Male?: number; /** * 人员唯一标识符 */ PersonId?: string; /** * 人员名称 */ PersonName?: string; /** * 电话号码 */ PhoneNumber?: string; /** * 学生号码 */ StudentNumber?: string; /** * 修改时间 */ UpdateTime?: string; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 标准化接口图像分析结果 */ export interface StandardImageResult { /** * 详细结果 */ ResultSet?: Array<ImageTaskResult>; /** * 分析完成后的统计结果 */ Statistic?: ImageTaskStatistic; /** * 状态描述 */ Message?: string; /** * 任务状态 */ Status?: string; /** * 结果总数 */ TotalCount?: number; } /** * DescribeVocabLib返回参数结构体 */ export interface DescribeVocabLibResponse { /** * 返回该appid下的所有词汇库名 */ VocabLibNameSet?: Array<string>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 识别到的人员信息 */ export interface AttendanceInfo { /** * 识别到的人员信息 */ Face?: FrameInfo; /** * 识别到的人员id */ PersonId?: string; } /** * SubmitCheckAttendanceTask返回参数结构体 */ export interface SubmitCheckAttendanceTaskResponse { /** * 任务标识符 */ JobId?: number; /** * 没有注册的人的ID列表 */ NotRegisteredSet?: Array<string>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DeleteVocab返回参数结构体 */ export interface DeleteVocabResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * CheckFacePhoto请求参数结构体 */ export interface CheckFacePhotoRequest { /** * 输入分析对象内容 */ FileContent: string; /** * 输入分析对象类型,picture_url:图片地址 */ FileType: string; } /** * 教师是否在屏幕内判断结果 */ export interface TeacherOutScreenResult { /** * 动作识别结果,InScreen:在屏幕内 OutScreen:不在屏幕内 */ Class?: string; /** * 识别结果高度 */ Height?: number; /** * 识别结果左坐标 */ Left?: number; /** * 识别结果顶坐标 */ Top?: number; /** * 识别结果宽度 */ Width?: number; } /** * 当前句子包含的所有单词信息 */ export interface Word { /** * 当前词的置信度 */ Confidence?: number; /** * 当前单词语音的起始时间点,单位为ms */ Mbtm?: number; /** * 当前单词语音的终止时间点,单位为ms */ Metm?: number; /** * 当前词 */ Text?: string; /** * 当前词的字节数 */ Wsize?: number; } /** * DeleteVocab请求参数结构体 */ export interface DeleteVocabRequest { /** * 要删除词汇的词汇库名 */ VocabLibName: string; /** * 要删除的词汇列表 */ VocabList: Array<string>; } /** * 大教室场景肢体动作识别信息 */ export interface ActionInfo { /** * 躯体动作识别结果,包含坐着(sit)、站立(stand)和趴睡(sleep) */ BodyPosture?: ActionType; /** * 举手识别结果,包含举手(hand)和未检测到举手(nothand) */ Handup?: ActionType; /** * 是否低头识别结果,包含抬头(lookingahead)和未检测到抬头(notlookingahead) */ LookHead?: ActionType; /** * 是否写字识别结果,包含写字(write)和未检测到写字(notlookingahead) */ Writing?: ActionType; /** * 动作图像高度 */ Height?: number; /** * 动作出现图像的左侧起始坐标位置 */ Left?: number; /** * 动作出现图像的上侧起始侧坐标位置 */ Top?: number; /** * 动作图像宽度 */ Width?: number; } /** * FaceExpressionResult */ export interface FaceExpressionResult { /** * 表情置信度 */ Confidence?: number; /** * 表情识别结果,包括"neutral":中性,"happiness":开心,"angry":"生气","disgust":厌恶,"fear":"恐惧","sadness":"悲伤","surprise":"惊讶","contempt":"蔑视" */ Expression?: string; } /** * 单词出现的次数信息 */ export interface StatInfo { /** * 词汇库中的单词 */ Keyword?: string; /** * 单词出现在该音频中总次数 */ Value?: number; } /** * 数量统计结果 */ export interface ActionCountStatistic { /** * 数量 */ Count: number; /** * 名称 */ Name: string; } /** * GestureResult */ export interface GestureResult { /** * 识别结果,包含"USPEAK":听你说,"LISTEN":听我说,"GOOD":GOOD,"TOOLS":拿教具,"OTHERS":其他 */ Class?: string; /** * 置信度 */ Confidence?: number; /** * 识别结果高度 */ Height?: number; /** * 识别结果左坐标 */ Left?: number; /** * 识别结果顶坐标 */ Top?: number; /** * 识别结果宽度 */ Width?: number; } /** * CreateFace返回参数结构体 */ export interface CreateFaceResponse { /** * 人脸操作结果信息 */ FaceInfoSet?: Array<FaceInfo>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 老师肢体动作识别结果 */ export interface BodyMovementResult { /** * 置信度 */ Confidence?: number; /** * 识别结果高度 */ Height?: number; /** * 识别结果左坐标 */ Left?: number; /** * 老师动作识别结果,包含 1、teach_on_positive_attitude 正面讲解 2、point_to_the_blackboard 指黑板 3、writing_blackboard 写板书 4、other 其他 */ Movements?: string; /** * 识别结果顶坐标 */ Top?: number; /** * 识别结果宽度 */ Width?: number; } /** * DescribeAITaskResult返回参数结构体 */ export interface DescribeAITaskResultResponse { /** * 音频分析结果 */ AudioResult?: StandardAudioResult; /** * 图像分析结果 */ ImageResult?: StandardImageResult; /** * 视频分析结果 */ VideoResult?: StandardVideoResult; /** * 任务状态 */ Status?: string; /** * 任务唯一id。在URL方式时提交请求后会返回一个jobid,后续查询该url的结果时使用这个jobid进行查询。 */ TaskId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DescribeAudioTask返回参数结构体 */ export interface DescribeAudioTaskResponse { /** * 如果请求中开启了静音检测开关,则会返回所有的静音片段(静音时长超过阈值的片段)。 */ AllMuteSlice?: AllMuteSlice; /** * 返回的当前音频的统计信息。当进度为100时返回。 */ AsrStat?: ASRStat; /** * 返回当前音频流的详细信息,如果是流模式,返回的是对应流的详细信息,如果是 URL模式,返回的是查询的那一段seq对应的音频的详细信息。 */ Texts?: Array<WholeTextItem>; /** * 返回词汇库中的单词出现的详细时间信息。 */ VocabAnalysisDetailInfo?: Array<VocabDetailInfomation>; /** * 返回词汇库中的单词出现的次数信息。 */ VocabAnalysisStatInfo?: Array<VocabStatInfomation>; /** * 返回音频全部文本。 */ AllTexts?: string; /** * 音频任务唯一id。在URL方式时提交请求后会返回一个jobid,后续查询该url的结果时使用这个jobid进行查询。 */ JobId?: number; /** * 返回的当前处理进度。 */ Progress?: number; /** * 结果总数 */ TotalCount?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 时长占比统计结果 */ export interface ActionDurationRatioStatistic { /** * 名称 */ Name: string; /** * 比例 */ Ratio: number; } /** * DescribePerson请求参数结构体 */ export interface DescribePersonRequest { /** * 人员库唯一标识符 */ LibraryId: string; /** * 人员唯一标识符 */ PersonId: string; } /** * 标准化接口图像分析结果 */ export interface StandardVideoResult { /** * 分析完成后的统计结果 */ HighlightsInfo?: Array<HighlightsInfomation>; /** * 状态描述 */ Message?: string; /** * 任务状态 */ Status?: string; } /** * ModifyLibrary返回参数结构体 */ export interface ModifyLibraryResponse { /** * 人员库唯一标识符 */ LibraryId?: string; /** * 人员库名称 */ LibraryName?: string; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * TimeInfoResult */ export interface TimeInfoResult { /** * 持续时间,单位毫秒 */ Duration?: number; /** * 结束时间戳,单位毫秒 */ EndTs?: number; /** * 开始时间戳,单位毫秒 */ StartTs?: number; } /** * SubmitPartialBodyClassTask返回参数结构体 */ export interface SubmitPartialBodyClassTaskResponse { /** * 图像任务直接返回结果,包括: FaceAttr、 FaceExpression、 FaceIdentify、 FaceInfo、 FacePose、 Gesture 、 Light、 TimeInfo */ ImageResults?: Array<ImageTaskResult>; /** * 任务ID */ TaskId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 当前音频的统计结果 */ export interface ASRStat { /** * 当前音频的平均语速 */ AvgSpeed?: number; /** * Vad的平均音量 */ AvgVolume?: number; /** * Vad的最大音量 */ MaxVolume?: number; /** * Vad的最小音量 */ MinVolume?: number; /** * 当前音频的非发音时长 */ MuteDuration?: number; /** * 当前音频的发音时长 */ SoundDuration?: number; /** * 当前音频的总时长 */ TotalDuration?: number; /** * 当前音频的句子总数 */ VadNum?: number; /** * 当前音频的单词总数 */ WordNum?: number; } /** * 当前句子的信息 */ export interface TextItem { /** * 当前句子包含的所有单词信息 */ Words?: Array<Word>; /** * 当前句子的置信度 */ Confidence?: number; /** * 当前句子语音的起始时间点,单位为ms */ Mbtm?: number; /** * 当前句子语音的终止时间点,单位为ms */ Metm?: number; /** * 保留参数,暂无意义 */ Tag?: number; /** * 当前句子 */ Text?: string; /** * 当前句子的字节数 */ TextSize?: number; } /** * CancelTask请求参数结构体 */ export interface CancelTaskRequest { /** * 待取消任务标志符。 */ JobId?: number; } /** * 如果请求中开启了静音检测开关,则会返回所有的静音片段(静音时长超过阈值的片段)。 */ export interface AllMuteSlice { /** * 所有静音片段。 */ MuteSlice?: Array<MuteSlice>; /** * 静音时长占比。 */ MuteRatio?: number; /** * 静音总时长。 */ TotalMuteDuration?: number; } /** * 双路混流视频集锦开关项 */ export interface DoubleVideoFunction { /** * 片头片尾增加图片开关 */ EnableCoverPictures?: boolean; } /** * SubmitPartialBodyClassTask请求参数结构体 */ export interface SubmitPartialBodyClassTaskRequest { /** * 输入分析对象内容,输入数据格式参考FileType参数释义 */ FileContent: string; /** * 输入分析对象类型,picture_url:图片地址,vod_url:视频地址,live_url:直播地址,picture: 图片二进制数据的BASE64编码 */ FileType: string; /** * 音频源的语言,默认0为英文,1为中文 */ Lang?: number; /** * 查询人员库列表,可填写老师的注册照所在人员库 */ LibrarySet?: Array<string>; /** * 视频评估时间,单位秒,点播场景默认值为2小时(无法探测长度时)或完整视频,直播场景默认值为10分钟或直播提前结束 */ MaxVideoDuration?: number; /** * 识别词库名列表,这些词汇库用来维护关键词,评估老师授课过程中,对这些关键词的使用情况 */ VocabLibNameList?: Array<string>; /** * 语音编码类型 1:pcm,当FileType为vod_url或live_url时为必填 */ VoiceEncodeType?: number; /** * 语音文件类型 10:视频(三种音频格式目前仅支持16k采样率16bit),当FileType为vod_url或live_url时为必填 */ VoiceFileType?: number; } /** * DescribeHighlightResult返回参数结构体 */ export interface DescribeHighlightResultResponse { /** * 精彩集锦详细信息。 */ HighlightsInfo?: Array<HighlightsInfomation>; /** * 精彩集锦任务唯一id。在URL方式时提交请求后会返回一个JobId,后续查询该url的结果时使用这个JobId进行查询。 */ JobId?: number; /** * 任务的进度百分比,100表示任务已完成。 */ Progress?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * SubmitOneByOneClassTask返回参数结构体 */ export interface SubmitOneByOneClassTaskResponse { /** * 图像任务直接返回结果,包括:FaceAttr、 FaceExpression、 FaceIdentify、 FaceInfo、 FacePose、TimeInfo */ ImageResults?: Array<ImageTaskResult>; /** * 任务ID */ TaskId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 光照标准,结构的相关示例为: { "Name":"dark", "Range":[0,30] } 当光照的区间落入在0到30的范围时,就会命中dark的光照标准 */ export interface LightStandard { /** * 光照名称 */ Name: string; /** * 范围 */ Range: Array<number>; } /** * SubmitHighlights请求参数结构体 */ export interface SubmitHighlightsRequest { /** * 表情配置开关项。 */ Functions: HLFunction; /** * 视频url。 */ FileContent: string; /** * 视频类型及来源,目前只支持点播类型:"vod_url"。 */ FileType: string; /** * 需要检索的人脸合集库,不在库中的人脸将不参与精彩集锦。 */ LibIds: Array<string>; /** * 视频处理的抽帧间隔,单位毫秒。建议留空。 */ FrameInterval?: number; /** * 关键词语言类型,0为英文,1为中文。 */ KeywordsLanguage?: number; /** * 关键词数组,当且仅当Funtions中的EnableKeywordWonderfulTime为true时有意义,匹配相应的关键字。 */ KeywordsStrings?: Array<string>; /** * 处理视频的总时长,单位毫秒。该值为0或未设置时,默认值两小时生效;当该值大于视频实际时长时,视频实际时长生效;当该值小于视频实际时长时,该值生效;当获取视频实际时长失败时,若该值设置则生效,否则默认值生效。建议留空。 */ MaxVideoDuration?: number; /** * 人脸检索的相似度阈值,默认值0.89。建议留空。 */ SimThreshold?: number; } /** * ModifyPerson返回参数结构体 */ export interface ModifyPersonResponse { /** * 人脸信息 */ FaceInfoSet?: Array<FaceInfo>; /** * 人员所属人员库标识符 */ LibraryId?: string; /** * 人员唯一标识符 */ PersonId?: string; /** * 人员名称 */ PersonName?: string; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 人员信息 */ export interface PersonInfo { /** * 需要匹配的人员的ID列表。 */ PersonId: string; /** * 视频集锦开始封面照片。 */ CoverBeginUrl?: string; /** * 视频集锦结束封面照片。 */ CoverEndUrl?: string; } /** * 人脸描述 */ export interface Face { /** * 人脸唯一标识符 */ FaceId: string; /** * 人脸图片 URL */ FaceUrl: string; /** * 人员唯一标识符 */ PersonId: string; } /** * 人员描述 */ export interface Person { /** * 人员库唯一标识符 */ LibraryId: string; /** * 人员唯一标识符 */ PersonId: string; /** * 人员名称 */ PersonName: string; /** * 创建时间 */ CreateTime?: string; /** * 工作号码 */ JobNumber?: string; /** * 邮箱 */ Mail?: string; /** * 性别 */ Male?: number; /** * 电话号码 */ PhoneNumber?: string; /** * 学生号码 */ StudentNumber?: string; /** * 修改时间 */ UpdateTime?: string; } /** * 光照强度比例统计结果 */ export interface LightLevelRatioStatistic { /** * 名称 */ Level: string; /** * 比例 */ Ratio: number; } /** * DescribeVocab返回参数结构体 */ export interface DescribeVocabResponse { /** * 词汇列表 */ VocabNameSet?: Array<string>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DescribeImageTaskStatistic返回参数结构体 */ export interface DescribeImageTaskStatisticResponse { /** * 任务统计信息 */ Statistic?: ImageTaskStatistic; /** * 图像任务唯一标识符 */ JobId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * SubmitCheckAttendanceTask请求参数结构体 */ export interface SubmitCheckAttendanceTaskRequest { /** * 输入数据 */ FileContent: string; /** * 视频流类型,vod_url表示点播URL,live_url表示直播URL,默认vod_url */ FileType: string; /** * 人员库 ID列表 */ LibraryIds: Array<string>; /** * 确定出勤阈值;默认为0.92 */ AttendanceThreshold?: number; /** * 是否开启陌生人模式,陌生人模式是指在任务中发现的非注册人脸库中的人脸也返回相关统计信息,默认不开启 */ EnableStranger?: boolean; /** * 考勤结束时间(到视频的第几秒结束考勤),单位秒;默认为900 对于直播场景,使用绝对时间戳,单位秒,默认当前时间往后12小时 */ EndTime?: number; /** * 通知回调地址,要求方法为post,application/json格式 */ NoticeUrl?: string; /** * 考勤开始时间(从视频的第几秒开始考勤),单位秒;默认为0 对于直播场景,使用绝对时间戳,单位秒,默认当前时间 */ StartTime?: number; /** * 识别阈值;默认为0.8 */ Threshold?: number; } /** * CancelTask返回参数结构体 */ export interface CancelTaskResponse { /** * 取消任务标志符。 */ JobId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 学生肢体动作结果 */ export interface StudentBodyMovementResult { /** * 置信度(已废弃) */ Confidence?: number; /** * 举手识别结果置信度 */ HandupConfidence?: number; /** * 举手识别结果,包含举手(handup)和未举手(nothandup) */ HandupStatus?: string; /** * 识别结果高度 */ Height?: number; /** * 识别结果左坐标 */ Left?: number; /** * 动作识别结果(已废弃) */ Movements?: string; /** * 站立识别结果置信度 */ StandConfidence?: number; /** * 站立识别结果,包含站立(stand)和坐着(sit) */ StandStatus?: string; /** * 识别结果顶坐标 */ Top?: number; /** * 识别结果宽度 */ Width?: number; } /** * 缺勤人员信息 */ export interface AbsenceInfo { /** * 识别到的人员所在的库id */ LibraryIds?: string; /** * 识别到的人员id */ PersonId?: string; } /** * 图像任务控制选项 */ export interface ImageTaskFunction { /** * 大教室场景学生肢体动作识别选项 */ EnableActionClass?: boolean; /** * 人脸检测选项(默认为true,目前不可编辑) */ EnableFaceDetect?: boolean; /** * 人脸表情识别选项 */ EnableFaceExpression?: boolean; /** * 人脸检索选项(默认为true,目前不可编辑) */ EnableFaceIdentify?: boolean; /** * 手势选项 */ EnableGesture?: boolean; /** * 优图手势选项(该功能尚未支持) */ EnableHandTracking?: boolean; /** * 光照选项 */ EnableLightJudge?: boolean; /** * 小班课场景学生肢体动作识别选项 */ EnableStudentBodyMovements?: boolean; /** * 教师动作选项(该功能尚未支持) */ EnableTeacherBodyMovements?: boolean; /** * 判断老师是否在屏幕中(该功能尚未支持) */ EnableTeacherOutScreen?: boolean; } /** * 人员信息 */ export interface FrameInfo { /** * 相似度 */ Similarity?: number; /** * 截图的存储地址 */ SnapshotUrl?: string; /** * 相对于视频起始时间的时间戳,单位秒 */ Ts?: number; } /** * 人员库描述 */ export interface Library { /** * 人员库创建时间 */ CreateTime: string; /** * 人员库唯一标识符 */ LibraryId: string; /** * 人员库名称 */ LibraryName: string; /** * 人员库人员数量 */ PersonCount?: number; /** * 人员库修改时间 */ UpdateTime?: string; } /** * SubmitConversationTask请求参数结构体 */ export interface SubmitConversationTaskRequest { /** * 音频源的语言,默认0为英文,1为中文 */ Lang: number; /** * 学生音频流 */ StudentUrl: string; /** * 教师音频流 */ TeacherUrl: string; /** * 语音编码类型 1:pcm */ VoiceEncodeType: number; /** * 语音文件类型 1:raw, 2:wav, 3:mp3(三种格式目前仅支持16k采样率16bit) */ VoiceFileType: number; /** * 功能开关列表,表示是否需要打开相应的功能,返回相应的信息 */ Functions?: Function; /** * 识别词库名列表,评估过程使用这些词汇库中的词汇进行词汇使用行为分析 */ VocabLibNameList?: Array<string>; } /** * SubmitHighlights返回参数结构体 */ export interface SubmitHighlightsResponse { /** * 视频拆条任务ID,用来唯一标识视频拆条任务。 */ JobId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DeletePerson返回参数结构体 */ export interface DeletePersonResponse { /** * 人脸信息 */ FaceInfoSet?: Array<FaceInfo>; /** * 人员库唯一标识符 */ LibraryId?: string; /** * 人员唯一标识符 */ PersonId?: string; /** * 人员名称 */ PersonName?: string; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 图像任务统计结果 */ export interface ImageTaskStatistic { /** * 人员检测统计信息 */ FaceDetect?: Array<FaceDetectStatistic>; /** * 人脸表情统计信息 */ FaceExpression?: Array<FaceExpressStatistic>; /** * 人脸检索统计信息 */ FaceIdentify?: Array<FaceIdentifyStatistic>; /** * 姿势识别统计信息 */ Gesture?: ActionStatistic; /** * 手势识别统计信息 */ Handtracking?: ActionStatistic; /** * 光照统计信息 */ Light?: LightStatistic; /** * 学生动作统计信息 */ StudentMovement?: ActionStatistic; /** * 教师动作统计信息 */ TeacherMovement?: ActionStatistic; } /** * 疑似出席人员 */ export interface SuspectedInfo { /** * TopN匹配信息列表 */ FaceSet?: Array<FrameInfo>; /** * 识别到的人员id */ PersonId?: string; } /** * CheckFacePhoto返回参数结构体 */ export interface CheckFacePhotoResponse { /** * 人脸检查结果,0:通过检查,1:图片模糊 */ CheckResult?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 统计结果 */ export interface ActionStatistic { /** * 数量统计 */ ActionCount?: Array<ActionCountStatistic>; /** * 时长统计 */ ActionDuration?: Array<ActionDurationStatistic>; /** * 时长比例统计 */ ActionDurationRatio?: Array<ActionDurationRatioStatistic>; } /** * DescribeHighlightResult请求参数结构体 */ export interface DescribeHighlightResultRequest { /** * 精彩集锦任务唯一id。在URL方式时提交请求后会返回一个JobId,后续查询该url的结果时使用这个JobId进行查询。 */ JobId: number; } /** * DescribeVocab请求参数结构体 */ export interface DescribeVocabRequest { /** * 要查询词汇的词汇库名 */ VocabLibName: string; } /** * DeleteVocabLib返回参数结构体 */ export interface DeleteVocabLibResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 图像任务结果 */ export interface ImageTaskResult { /** * 大教室场景学生肢体动作识别信息 */ ActionInfo?: ActionInfo; /** * 属性识别结果 */ FaceAttr?: FaceAttrResult; /** * 表情识别结果 */ FaceExpression?: FaceExpressionResult; /** * 人脸检索结果 */ FaceIdentify?: FaceIdentifyResult; /** * 人脸检测结果 */ FaceInfo?: FaceInfoResult; /** * 姿势识别结果 */ FacePose?: FacePoseResult; /** * 动作分类结果 */ Gesture?: GestureResult; /** * 手势分类结果 */ HandTracking?: HandTrackingResult; /** * 光照识别结果 */ Light?: LightResult; /** * 学生肢体动作识别结果 */ StudentBodyMovement?: StudentBodyMovementResult; /** * 老师肢体动作识别结果 */ TeacherBodyMovement?: BodyMovementResult; /** * 教师是否在屏幕内判断结果 */ TeacherOutScreen?: TeacherOutScreenResult; /** * 时间统计结果 */ TimeInfo?: TimeInfoResult; } /** * SubmitImageTaskPlus返回参数结构体 */ export interface SubmitImageTaskPlusResponse { /** * 识别结果 */ ResultSet?: Array<ImageTaskResult>; /** * 任务标识符 */ JobId?: number; /** * 任务进度 */ Progress?: number; /** * 结果总数目 */ TotalCount?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 词汇库中的单词出现在音频中的总次数信息 */ export interface VocabStatInfomation { /** * 单词出现在该音频中总次数 */ VocabDetailInfo?: Array<StatInfo>; /** * 词汇库名称 */ VocabLibName?: string; } /** * SubmitDoubleVideoHighlights返回参数结构体 */ export interface SubmitDoubleVideoHighlightsResponse { /** * 视频拆条任务ID,用来唯一标识视频拆条任务。 */ JobId?: number; /** * 未注册的人员ID列表。若出现此项,代表评估出现了问题,输入的PersonId中有不在库中的人员ID。 */ NotRegistered?: Array<string>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 功能开关列表,表示是否需要打开相应的功能,返回相应的信息 */ export interface Function { /** * 输出全部文本标识,当该值设置为true时,会输出当前音频的全部文本 */ EnableAllText?: boolean; /** * 输出关键词信息标识,当该值设置为true时,会输出当前音频的关键词信息。 */ EnableKeyword?: boolean; /** * 静音检测标识,当设置为 true 时,需要设置静音时间阈值字段mute_threshold,统计结果中会返回静音片段。 */ EnableMuteDetect?: boolean; /** * 输出音频统计信息标识,当设置为 true 时,任务查询结果会输出音频的统计信息(AsrStat) */ EnableVadInfo?: boolean; /** * 输出音频音量信息标识,当设置为 true 时,会输出当前音频音量信息。 */ EnableVolume?: boolean; } /** * DescribeAudioTask请求参数结构体 */ export interface DescribeAudioTaskRequest { /** * 音频任务唯一id。在URL方式时提交请求后会返回一个jobid,后续查询该url的结果时使用这个jobid进行查询。 */ JobId: number; /** * 限制数目 */ Limit?: number; /** * 偏移量 */ Offset?: number; } /** * DescribeAttendanceResult返回参数结构体 */ export interface DescribeAttendanceResultResponse { /** * 缺失人员的ID列表(只针对请求中的libids字段) */ AbsenceSetInLibs?: Array<AbsenceInfo>; /** * 确定出勤人员列表 */ AttendanceSet?: Array<AttendanceInfo>; /** * 疑似出勤人员列表 */ SuspectedSet?: Array<SuspectedInfo>; /** * 缺失人员的ID列表(只针对请求中的personids字段) */ AbsenceSet?: Array<string>; /** * 请求处理进度 */ Progress?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 人脸表情统计结果 */ export interface FaceExpressStatistic { /** * 人员唯一标识符 */ PersonId: string; /** * 表情统计结果 */ ExpressRatio?: Array<ExpressRatioStatistic>; } /** * AIAssistant请求参数结构体 */ export interface AIAssistantRequest { /** * 输入分析对象内容,输入数据格式参考FileType参数释义 */ FileContent: string; /** * 输入分析对象类型,picture_url:图片地址,vod_url:视频地址,live_url:直播地址,audio_url: 音频文件,picture:图片二进制数据的BASE64编码 */ FileType: string; /** * 音频源的语言,默认0为英文,1为中文 */ Lang?: number; /** * 查询人员库列表 */ LibrarySet?: Array<string>; /** * 视频评估时间,单位秒,点播场景默认值为2小时(无法探测长度时)或完整视频,直播场景默认值为10分钟或直播提前结束 */ MaxVideoDuration?: number; /** * 标准化模板选择:0:AI助教基础版本,1:AI评教基础版本,2:AI评教标准版本。AI 助教基础版本功能包括:人脸检索、人脸检测、人脸表情识别、学生动作选项,音频信息分析,微笑识别。AI 评教基础版本功能包括:人脸检索、人脸检测、人脸表情识别、音频信息分析。AI 评教标准版功能包括人脸检索、人脸检测、人脸表情识别、手势识别、音频信息分析、音频关键词分析、视频精彩集锦分析。 */ Template?: number; /** * 识别词库名列表,评估过程使用这些词汇库中的词汇进行词汇使用行为分析 */ VocabLibNameList?: Array<string>; /** * 语音编码类型 1:pcm */ VoiceEncodeType?: number; /** * 语音文件类型 1:raw, 2:wav, 3:mp3,10:视频(三种音频格式目前仅支持16k采样率16bit) */ VoiceFileType?: number; } /** * DescribeLibraries请求参数结构体 */ export declare type DescribeLibrariesRequest = null; /** * SubmitFullBodyClassTask返回参数结构体 */ export interface SubmitFullBodyClassTaskResponse { /** * 图像任务直接返回结果,包括: FaceAttr、 FaceExpression、 FaceIdentify、 FaceInfo、 FacePose、 TeacherBodyMovement、TimeInfo */ ImageResults?: Array<ImageTaskResult>; /** * 任务ID */ TaskId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * ModifyPerson请求参数结构体 */ export interface ModifyPersonRequest { /** * 人员库唯一标识符 */ LibraryId: string; /** * 人员唯一标识符 */ PersonId: string; /** * 人员工作号码 */ JobNumber?: string; /** * 人员邮箱 */ Mail?: string; /** * 人员性别 */ Male?: number; /** * 人员名称 */ PersonName?: string; /** * 人员电话号码 */ PhoneNumber?: string; /** * 人员学生号码 */ StudentNumber?: string; } /** * DescribeImageTask返回参数结构体 */ export interface DescribeImageTaskResponse { /** * 任务处理结果 */ ResultSet?: Array<ImageTaskResult>; /** * 任务唯一标识 */ JobId?: number; /** * 任务执行进度 */ Progress?: number; /** * 任务结果数目 */ TotalCount?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 标准化接口图像分析结果 */ export interface StandardAudioResult { /** * 返回的当前音频的统计信息。当进度为100时返回。 */ AsrStat?: ASRStat; /** * 返回当前音频流的详细信息,如果是流模式,返回的是对应流的详细信息,如果是 URL模式,返回的是查询的那一段seq对应的音频的详细信息。 */ Texts?: Array<WholeTextItem>; /** * 返回词汇库中的单词出现的详细时间信息。 */ VocabAnalysisDetailInfo?: Array<VocabDetailInfomation>; /** * 返回词汇库中的单词出现的次数信息。 */ VocabAnalysisStatInfo?: Array<VocabStatInfomation>; /** * 状态描述 */ Message?: string; /** * 任务状态 */ Status?: string; /** * 结果数量 */ TotalCount?: number; } /** * HandTrackingResult */ export interface HandTrackingResult { /** * 识别结果 */ Class?: string; /** * 置信度 */ Confidence?: number; /** * 识别结果高度 */ Height?: number; /** * 识别结果左坐标 */ Left?: number; /** * 识别结果顶坐标 */ Top?: number; /** * 识别结果宽度 */ Width?: number; } /** * SubmitOpenClassTask请求参数结构体 */ export interface SubmitOpenClassTaskRequest { /** * 输入分析对象内容,输入数据格式参考FileType参数释义 */ FileContent: string; /** * 输入分析对象类型,picture_url:图片地址,vod_url:视频地址,live_url:直播地址,picture: 图片二进制数据的BASE64编码 */ FileType: string; /** * 查询人员库列表,可填写学生们的注册照所在人员库 */ LibrarySet?: Array<string>; /** * 视频评估时间,单位秒,点播场景默认值为2小时(无法探测长度时)或完整视频,直播场景默认值为10分钟或直播提前结束 */ MaxVideoDuration?: number; } /** * SubmitAudioTask请求参数结构体 */ export interface SubmitAudioTaskRequest { /** * 音频源的语言,默认0为英文,1为中文 */ Lang: number; /** * 音频URL。客户请求为URL方式时必须带此字段指名音频的url。 */ Url: string; /** * 语音编码类型 1:pcm */ VoiceEncodeType: number; /** * 语音文件类型 1:raw, 2:wav, 3:mp3,10:视频(三种音频格式目前仅支持16k采样率16bit) */ VoiceFileType: number; /** * 功能开关列表,表示是否需要打开相应的功能,返回相应的信息 */ Functions?: Function; /** * 视频文件类型,默认点播,直播填 live_url */ FileType?: string; /** * 静音阈值设置,如果静音检测开关开启,则静音时间超过这个阈值认为是静音片段,在结果中会返回, 没给的话默认值为3s */ MuteThreshold?: number; /** * 识别词库名列表,评估过程使用这些词汇库中的词汇进行词汇使用行为分析 */ VocabLibNameList?: Array<string>; } /** * CreateVocab请求参数结构体 */ export interface CreateVocabRequest { /** * 要添加词汇的词汇库名 */ VocabLibName: string; /** * 要添加的词汇列表 */ VocabList: Array<string>; } /** * TransmitAudioStream请求参数结构体 */ export interface TransmitAudioStreamRequest { /** * 功能开关列表,表示是否需要打开相应的功能,返回相应的信息 */ Functions: Function; /** * 流式数据包的序号,从1开始,当IsEnd字段为1后后续序号无意义。 */ SeqId: number; /** * 语音段唯一标识,一个完整语音一个SessionId。 */ SessionId: string; /** * 当前数据包数据, 流式模式下数据包大小可以按需设置,在网络良好的情况下,建议设置为0.5k,且必须保证分片帧完整(16bit的数据必须保证音频长度为偶数),编码格式要求为BASE64。 */ UserVoiceData: string; /** * 语音编码类型 1:pcm。 */ VoiceEncodeType: number; /** * 语音文件类型 1: raw, 2: wav, 3: mp3 (语言文件格式目前仅支持 16k 采样率 16bit 编码单声道,如有不一致可能导致评估不准确或失败)。 */ VoiceFileType: number; /** * 是否传输完毕标志,若为0表示未完毕,若为1则传输完毕开始评估,非流式模式下无意义。 */ IsEnd?: number; /** * 音频源的语言,默认0为英文,1为中文 */ Lang?: number; /** * 是否临时保存 音频链接 */ StorageMode?: number; /** * 识别词库名列表,评估过程使用这些词汇库中的词汇进行词汇使用行为分析 */ VocabLibNameList?: Array<string>; } /** * FaceAttrResult */ export interface FaceAttrResult { /** * 年龄 */ Age?: number; /** * 性别 */ Sex?: string; } /** * SubmitTraditionalClassTask返回参数结构体 */ export interface SubmitTraditionalClassTaskResponse { /** * 图像任务直接返回结果,包括: ActionInfo、FaceAttr、 FaceExpression、 FaceIdentify、 FaceInfo、 FacePose、 TimeInfo */ ImageResults?: Array<ImageTaskResult>; /** * 任务ID */ TaskId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * AIAssistant返回参数结构体 */ export interface AIAssistantResponse { /** * 图像任务直接返回结果 */ ImageResults?: Array<ImageTaskResult>; /** * 任务ID */ TaskId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * SubmitImageTask返回参数结构体 */ export interface SubmitImageTaskResponse { /** * 识别结果 */ ResultSet?: Array<ImageTaskResult>; /** * 任务标识符 */ JobId?: number; /** * 任务进度 */ Progress?: number; /** * 结果总数目 */ TotalCount?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * CreateVocabLib请求参数结构体 */ export interface CreateVocabLibRequest { /** * 词汇库名称 */ VocabLibName: string; } /** * CreateLibrary返回参数结构体 */ export interface CreateLibraryResponse { /** * 人员库唯一标识符 */ LibraryId?: string; /** * 人员库名称 */ LibraryName?: string; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * FaceIdentifyResult */ export interface FaceIdentifyResult { /** * 人脸标识符 */ FaceId: string; /** * 人员库标识符 */ LibraryId: string; /** * 人员标识符 */ PersonId: string; /** * 相似度 */ Similarity: number; } /** * SubmitAudioTask返回参数结构体 */ export interface SubmitAudioTaskResponse { /** * 查询结果时指名的jobid。在URL方式时提交请求后会返回一个jobid,后续查询该url的结果时使用这个jobid进行查询。 */ JobId?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 所有静音片段。 */ export interface MuteSlice { /** * 起始时间。 */ MuteBtm?: number; /** * 终止时间。 */ MuteEtm?: number; } /** * DescribeConversationTask请求参数结构体 */ export interface DescribeConversationTaskRequest { /** * 音频任务唯一id。在URL方式时提交请求后会返回一个jobid,后续查询该url的结果时使用这个jobid进行查询。 */ JobId: number; /** * 要查询明细的流的身份,1 老师 2 学生 */ Identity?: number; /** * 限制数目 */ Limit?: number; /** * 偏移量 */ Offset?: number; } /** * 词汇库中的单词出现在音频中的那个句子的起始时间和结束时间信息 */ export interface VocabDetailInfomation { /** * 词汇库中的单词出现在该音频中的那个句子的时间戳,出现了几次,就返回对应次数的起始和结束时间戳 */ VocabDetailInfo?: Array<DetailInfo>; /** * 词汇库名 */ VocabLibName?: string; } /** * CreateLibrary请求参数结构体 */ export interface CreateLibraryRequest { /** * 人员库名称 */ LibraryName: string; /** * 人员库唯一标志符,为空则系统自动生成。 */ LibraryId?: string; } /** * FaceInfoResult */ export interface FaceInfoResult { /** * 人脸尺寸的占比 */ FaceRatio?: number; /** * 帧高度 */ FrameHeight?: number; /** * 帧宽度 */ FrameWidth?: number; /** * 人脸高度 */ Height?: number; /** * 人脸左坐标 */ Left?: number; /** * 人脸顶坐标 */ Top?: number; /** * 人脸宽度 */ Width?: number; } /** * 人脸监测统计信息 */ export interface FaceDetectStatistic { /** * 人脸大小占画面平均占比 */ FaceSizeRatio?: number; /** * 检测到正脸次数 */ FrontalFaceCount?: number; /** * 正脸时长占比 */ FrontalFaceRatio?: number; /** * 正脸时长在总出现时常占比 */ FrontalFaceRealRatio?: number; /** * 人员唯一标识符 */ PersonId?: string; /** * 检测到侧脸次数 */ SideFaceCount?: number; /** * 侧脸时长占比 */ SideFaceRatio?: number; /** * 侧脸时长在总出现时常占比 */ SideFaceRealRatio?: number; }
the_stack
import { EarningsTree } from "../../lib/earningsTree"; import { utils } from "ethers"; let earningsSnapshot; if (process.env.NEXT_PUBLIC_NETWORK === "mainnet") { earningsSnapshot = require("../../data/earningsTree"); } else if (process.env.NEXT_PUBLIC_NETWORK === "rinkeby") { earningsSnapshot = require("../../data/earningsTree_rinkeby"); } else { earningsSnapshot = ""; } /** * Approve an amount for an ERC20 token transfer * @param obj * @param {string} type - The approval type * @param {string} amount - The approval amount * @return {Promise} */ export async function approve(_obj, _args, _ctx) { const { type, amount } = _args; let gas; let txHash; switch (type) { case "bond": gas = await _ctx.livepeer.rpc.estimateGas("LivepeerToken", "approve", [ _ctx.livepeer.config.contracts.BondingManager.address, amount, ]); txHash = await _ctx.livepeer.rpc.approveTokenBondAmount(amount, { gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; case "createPoll": gas = await _ctx.livepeer.rpc.estimateGas("LivepeerToken", "approve", [ _ctx.livepeer.config.contracts.PollCreator.address, amount, ]); txHash = await _ctx.livepeer.rpc.approveTokenPollCreationCost(amount, { gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; default: throw new Error(`Approval type "${type}" is not supported.`); } } async function encodeClaimSnapshotAndStakingAction(_args, stakingAction, _ctx) { const { lastClaimRound, delegator } = _args; if (!lastClaimRound || lastClaimRound === 0) { return null; } const LIP52Round = ( await _ctx.livepeer.rpc.getLipUpgradeRound(52) ).toNumber(); if (lastClaimRound > LIP52Round) { return null; } // get pendingStake and pendingFees for delegator const [pendingStake, pendingFees] = await Promise.all([ _ctx.livepeer.rpc.getPendingStake(delegator, LIP52Round), _ctx.livepeer.rpc.getPendingFees(delegator, LIP52Round), ]); // generate the merkle tree from JSON const tree = EarningsTree.fromJSON(earningsSnapshot); // generate the proof const leaf = utils.defaultAbiCoder.encode( ["address", "uint256", "uint256"], [delegator, pendingStake, pendingFees] ); const proof = tree.getHexProof(leaf); if ( !(await _ctx.livepeer.rpc.verifySnapshot( utils.keccak256(utils.toUtf8Bytes("LIP-52")), proof, utils.keccak256(leaf) )) ) return null; return _ctx.livepeer.rpc.getCalldata( "BondingManager", "claimSnapshotEarnings", [pendingStake, pendingFees, proof, stakingAction] ); } /** * Submits a bond transaction for a previously approved amount * @param obj * @param {string} to - The ETH address of the delegate to bond to * @param {string} amount - The approval amount * @return {Promise} */ export async function bond(_obj, _args, _ctx) { const { amount, to, oldDelegateNewPosPrev, oldDelegateNewPosNext, currDelegateNewPosPrev, currDelegateNewPosNext, } = _args; let data = _ctx.livepeer.rpc.getCalldata("BondingManager", "bondWithHint", [ amount, to, oldDelegateNewPosPrev, oldDelegateNewPosNext, currDelegateNewPosPrev, currDelegateNewPosNext, ]); const claimData = await encodeClaimSnapshotAndStakingAction( _args, data, _ctx ); data = claimData ? claimData : data; const gas = await _ctx.livepeer.rpc.estimateGasRaw({ ..._ctx.livepeer.config.defaultTx, to: _ctx.livepeer.config.contracts["BondingManager"].address, data, }); const txHash = await _ctx.livepeer.rpc.sendTransaction({ ..._ctx.livepeer.config.defaultTx, to: _ctx.livepeer.config.contracts["BondingManager"].address, data, gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; } /** * Submits an unbond transaction * @param obj * @return {Promise} */ export async function unbond(_obj, _args, _ctx) { const { amount } = _args; let data = _ctx.livepeer.rpc.getCalldata("BondingManager", "unbond", [ amount, ]); const claimData = await encodeClaimSnapshotAndStakingAction( _args, data, _ctx ); data = claimData ? claimData : data; const gas = await _ctx.livepeer.rpc.estimateGasRaw({ ..._ctx.livepeer.config.defaultTx, to: _ctx.livepeer.config.contracts["BondingManager"].address, data, }); const txHash = await _ctx.livepeer.rpc.sendTransaction({ ..._ctx.livepeer.config.defaultTx, to: _ctx.livepeer.config.contracts["BondingManager"].address, data, gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; } /** * Submits a rebond transaction * @param obj * @return {Promise} */ export async function rebond(_obj, _args, _ctx) { const { unbondingLockId, newPosPrev, newPosNext } = _args; let data = _ctx.livepeer.rpc.getCalldata("BondingManager", "rebondWithHint", [ unbondingLockId, newPosPrev, newPosNext, ]); const claimData = await encodeClaimSnapshotAndStakingAction( _args, data, _ctx ); data = claimData ? claimData : data; const gas = await _ctx.livepeer.rpc.estimateGasRaw({ ..._ctx.livepeer.config.defaultTx, to: _ctx.livepeer.config.contracts["BondingManager"].address, data, }); const txHash = await _ctx.livepeer.rpc.sendTransaction({ ..._ctx.livepeer.config.defaultTx, to: _ctx.livepeer.config.contracts["BondingManager"].address, data, gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; } /** * Submits a withdrawStake transaction * @param obj * @return {Promise} */ export async function withdrawStake(_obj, _args, _ctx) { const { unbondingLockId } = _args; const gas = await _ctx.livepeer.rpc.estimateGas( "BondingManager", "withdrawStake", [unbondingLockId] ); const txHash = await _ctx.livepeer.rpc.withdrawStake(unbondingLockId, { ..._ctx.livepeer.config.defaultTx, gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; } /** * Submits a withdrawFees transaction * @param obj * @return {Promise} */ export async function withdrawFees(_obj, _args, _ctx) { let data = _ctx.livepeer.rpc.getCalldata( "BondingManager", "withdrawFees", [] ); const claimData = await encodeClaimSnapshotAndStakingAction( _args, data, _ctx ); data = claimData ? claimData : data; const gas = await _ctx.livepeer.rpc.estimateGasRaw({ ..._ctx.livepeer.config.defaultTx, to: _ctx.livepeer.config.contracts["BondingManager"].address, data, }); const txHash = await _ctx.livepeer.rpc.sendTransaction({ ..._ctx.livepeer.config.defaultTx, to: _ctx.livepeer.config.contracts["BondingManager"].address, data, gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; } /** * Submits a rebondFromUnbonded transaction * @param obj * @return {Promise} */ export async function rebondFromUnbonded(_obj, _args, _ctx) { const { delegate, unbondingLockId, newPosPrev, newPosNext } = _args; let data = _ctx.livepeer.rpc.getCalldata( "BondingManager", "rebondFromUnbondedWithHint", [delegate, unbondingLockId, newPosPrev, newPosNext] ); const claimData = await encodeClaimSnapshotAndStakingAction( _args, data, _ctx ); data = claimData ? claimData : data; const gas = await _ctx.livepeer.rpc.estimateGasRaw({ ..._ctx.livepeer.config.defaultTx, to: _ctx.livepeer.config.contracts["BondingManager"].address, data, }); const txHash = await _ctx.livepeer.rpc.sendTransaction({ ..._ctx.livepeer.config.defaultTx, to: _ctx.livepeer.config.contracts["BondingManager"].address, data, gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; } /** * Submits a round initialization transaction * @param obj * @return {Promise} */ export async function initializeRound(_obj, _args, _ctx) { const gas = await _ctx.livepeer.rpc.estimateGas( "RoundsManager", "initializeRound", [] ); const txHash = await _ctx.livepeer.rpc.initializeRound({ gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; } /** * Creates a poll * @param obj * @return {Promise} */ export async function createPoll(_obj, _args, _ctx) { const Utils = require("web3-utils"); const { proposal } = _args; const gas = await _ctx.livepeer.rpc.estimateGas("PollCreator", "createPoll", [ Utils.fromAscii(proposal), ]); const txHash = await _ctx.livepeer.rpc.createPoll(Utils.fromAscii(proposal), { ..._ctx.livepeer.config.defaultTx, gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; } /** * Vote in a poll * @param obj * @return {Promise} */ export async function vote(_obj, _args, _ctx) { const { pollAddress, choiceId } = _args; const gas = await _ctx.livepeer.rpc.estimateGas("Poll", "vote", [choiceId]); const txHash = await _ctx.livepeer.rpc.vote(pollAddress, choiceId, { ..._ctx.livepeer.config.defaultTx, gas, returnTxHash: true, }); return { gas, txHash, inputData: { ..._args, }, }; } /** * Update's a user's 3box space * @param obj * @return {Promise} */ export async function updateProfile(_obj, _args, _ctx) { const address = _ctx.address.toLowerCase(); const box = _ctx.box; const space = await box.openSpace("livepeer"); if (_args.proof) { await box.linkAddress({ proof: _args.proof, }); } const allowed = ["name", "website", "description", "image", "defaultProfile"]; const filtered = Object.keys(_args) .filter((key) => allowed.includes(key)) .reduce((obj, key) => { obj[key] = _args[key]; return obj; }, {}); await space.public.setMultiple( Object.keys(filtered), Object.values(filtered) ); return { id: address, ...filtered, }; } /** * Unlink an external account from a user's 3box * @param obj * @return {Promise} */ export async function removeAddressLink(_obj, _args, _ctx) { const address = _args.address.toLowerCase(); const box = _ctx.box; await box.removeAddressLink(address); }
the_stack
// Type definitions for libsodium-wrappers 0.7 // Project: https://github.com/jedisct1/libsodium.js // Definitions by: Florian Keller <https://github.com/ffflorian> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped // TypeScript Version: 2.1 // Copyright 2020-present the denosaurs team. All rights reserved. MIT license. export type Uint8ArrayOutputFormat = "uint8array"; export type StringOutputFormat = "text" | "hex" | "base64"; export type KeyType = "curve25519" | "ed25519" | "x25519"; export enum base64_variants { ORIGINAL, ORIGINAL_NO_PADDING, URLSAFE, URLSAFE_NO_PADDING, } export interface CryptoBox { ciphertext: Uint8Array; mac: Uint8Array; } export interface StringCryptoBox { ciphertext: string; mac: string; } export interface CryptoKX { sharedRx: Uint8Array; sharedTx: Uint8Array; } export interface StringCryptoKX { sharedRx: string; sharedTx: string; } export interface KeyPair { keyType: KeyType; privateKey: Uint8Array; publicKey: Uint8Array; } export interface StringKeyPair { keyType: KeyType; privateKey: string; publicKey: string; } export interface SecretBox { cipher: Uint8Array; mac: Uint8Array; } export interface StringSecretBox { cipher: string; mac: string; } export interface StateAddress { name: string; } export interface MessageTag { message: Uint8Array; tag: number; } export interface StringMessageTag { message: string; tag: number; } export interface Sodium { readonly crypto_aead_chacha20poly1305_ABYTES: number; readonly crypto_aead_chacha20poly1305_ietf_ABYTES: number; readonly crypto_aead_chacha20poly1305_ietf_KEYBYTES: number; readonly crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX: number; readonly crypto_aead_chacha20poly1305_ietf_NPUBBYTES: number; readonly crypto_aead_chacha20poly1305_ietf_NSECBYTES: number; readonly crypto_aead_chacha20poly1305_KEYBYTES: number; readonly crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX: number; readonly crypto_aead_chacha20poly1305_NPUBBYTES: number; readonly crypto_aead_chacha20poly1305_NSECBYTES: number; readonly crypto_aead_xchacha20poly1305_ietf_ABYTES: number; readonly crypto_aead_xchacha20poly1305_ietf_KEYBYTES: number; readonly crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX: number; readonly crypto_aead_xchacha20poly1305_ietf_NPUBBYTES: number; readonly crypto_aead_xchacha20poly1305_ietf_NSECBYTES: number; readonly crypto_auth_BYTES: number; readonly crypto_auth_KEYBYTES: number; readonly crypto_box_BEFORENMBYTES: number; readonly crypto_box_MACBYTES: number; readonly crypto_box_MESSAGEBYTES_MAX: number; readonly crypto_box_NONCEBYTES: number; readonly crypto_box_PUBLICKEYBYTES: number; readonly crypto_box_SEALBYTES: number; readonly crypto_box_SECRETKEYBYTES: number; readonly crypto_box_SEEDBYTES: number; readonly crypto_generichash_BYTES_MAX: number; readonly crypto_generichash_BYTES_MIN: number; readonly crypto_generichash_BYTES: number; readonly crypto_generichash_KEYBYTES_MAX: number; readonly crypto_generichash_KEYBYTES_MIN: number; readonly crypto_generichash_KEYBYTES: number; readonly crypto_hash_BYTES: number; readonly crypto_kdf_BYTES_MAX: number; readonly crypto_kdf_BYTES_MIN: number; readonly crypto_kdf_CONTEXTBYTES: number; readonly crypto_kdf_KEYBYTES: number; readonly crypto_kx_PUBLICKEYBYTES: number; readonly crypto_kx_SECRETKEYBYTES: number; readonly crypto_kx_SEEDBYTES: number; readonly crypto_kx_SESSIONKEYBYTES: number; readonly crypto_pwhash_ALG_ARGON2I13: number; readonly crypto_pwhash_ALG_ARGON2ID13: number; readonly crypto_pwhash_ALG_DEFAULT: number; readonly crypto_pwhash_BYTES_MAX: number; readonly crypto_pwhash_BYTES_MIN: number; readonly crypto_pwhash_MEMLIMIT_INTERACTIVE: number; readonly crypto_pwhash_MEMLIMIT_MAX: number; readonly crypto_pwhash_MEMLIMIT_MIN: number; readonly crypto_pwhash_MEMLIMIT_MODERATE: number; readonly crypto_pwhash_MEMLIMIT_SENSITIVE: number; readonly crypto_pwhash_OPSLIMIT_INTERACTIVE: number; readonly crypto_pwhash_OPSLIMIT_MAX: number; readonly crypto_pwhash_OPSLIMIT_MIN: number; readonly crypto_pwhash_OPSLIMIT_MODERATE: number; readonly crypto_pwhash_OPSLIMIT_SENSITIVE: number; readonly crypto_pwhash_PASSWD_MAX: number; readonly crypto_pwhash_PASSWD_MIN: number; readonly crypto_pwhash_SALTBYTES: number; readonly crypto_pwhash_STR_VERIFY: number; readonly crypto_pwhash_STRBYTES: number; readonly crypto_pwhash_STRPREFIX: string; readonly crypto_scalarmult_BYTES: number; readonly crypto_scalarmult_SCALARBYTES: number; readonly crypto_secretbox_KEYBYTES: number; readonly crypto_secretbox_MACBYTES: number; readonly crypto_secretbox_MESSAGEBYTES_MAX: number; readonly crypto_secretbox_NONCEBYTES: number; readonly crypto_secretstream_xchacha20poly1305_ABYTES: number; readonly crypto_secretstream_xchacha20poly1305_HEADERBYTES: number; readonly crypto_secretstream_xchacha20poly1305_KEYBYTES: number; readonly crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX: number; readonly crypto_secretstream_xchacha20poly1305_TAG_FINAL: number; readonly crypto_secretstream_xchacha20poly1305_TAG_MESSAGE: number; readonly crypto_secretstream_xchacha20poly1305_TAG_PUSH: number; readonly crypto_secretstream_xchacha20poly1305_TAG_REKEY: number; readonly crypto_shorthash_BYTES: number; readonly crypto_shorthash_KEYBYTES: number; readonly crypto_sign_BYTES: number; readonly crypto_sign_MESSAGEBYTES_MAX: number; readonly crypto_sign_PUBLICKEYBYTES: number; readonly crypto_sign_SECRETKEYBYTES: number; readonly crypto_sign_SEEDBYTES: number; readonly randombytes_SEEDBYTES: number; readonly SODIUM_LIBRARY_VERSION_MAJOR: number; readonly SODIUM_LIBRARY_VERSION_MINOR: number; readonly SODIUM_VERSION_STRING: string; readonly ready: Promise<void>; add(a: Uint8Array, b: Uint8Array): void; compare(b1: Uint8Array, b2: Uint8Array): number; crypto_aead_chacha20poly1305_decrypt( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_chacha20poly1305_decrypt( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_aead_chacha20poly1305_decrypt_detached( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, mac: Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_chacha20poly1305_decrypt_detached( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, mac: Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_aead_chacha20poly1305_encrypt( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_chacha20poly1305_encrypt( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_aead_chacha20poly1305_encrypt_detached( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): CryptoBox; crypto_aead_chacha20poly1305_encrypt_detached( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): StringCryptoBox; crypto_aead_chacha20poly1305_ietf_decrypt( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_chacha20poly1305_ietf_decrypt( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_aead_chacha20poly1305_ietf_decrypt_detached( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, mac: Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_chacha20poly1305_ietf_decrypt_detached( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, mac: Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_aead_chacha20poly1305_ietf_encrypt( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_chacha20poly1305_ietf_encrypt( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_aead_chacha20poly1305_ietf_encrypt_detached( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): CryptoBox; crypto_aead_chacha20poly1305_ietf_encrypt_detached( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): StringCryptoBox; crypto_aead_chacha20poly1305_ietf_keygen( outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_chacha20poly1305_ietf_keygen( outputFormat: StringOutputFormat, ): string; crypto_aead_chacha20poly1305_keygen( outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_chacha20poly1305_keygen(outputFormat: StringOutputFormat): string; crypto_aead_xchacha20poly1305_ietf_decrypt( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_xchacha20poly1305_ietf_decrypt( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_aead_xchacha20poly1305_ietf_decrypt_detached( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, mac: Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_xchacha20poly1305_ietf_decrypt_detached( secret_nonce: string | Uint8Array | null, ciphertext: string | Uint8Array, mac: Uint8Array, additional_data: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_aead_xchacha20poly1305_ietf_encrypt( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_xchacha20poly1305_ietf_encrypt( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_aead_xchacha20poly1305_ietf_encrypt_detached( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): CryptoBox; crypto_aead_xchacha20poly1305_ietf_encrypt_detached( message: string | Uint8Array, additional_data: string | Uint8Array | null, secret_nonce: string | Uint8Array | null, public_nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): StringCryptoBox; crypto_aead_xchacha20poly1305_ietf_keygen( outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_aead_xchacha20poly1305_ietf_keygen( outputFormat: StringOutputFormat, ): string; crypto_auth( message: string | Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_auth( message: string | Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_auth_keygen(outputFormat?: Uint8ArrayOutputFormat | null): Uint8Array; crypto_auth_keygen(outputFormat: StringOutputFormat): string; crypto_auth_verify( tag: Uint8Array, message: string | Uint8Array, key: Uint8Array, ): boolean; crypto_box_beforenm( publicKey: Uint8Array, privateKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_box_beforenm( publicKey: Uint8Array, privateKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_box_detached( message: string | Uint8Array, nonce: Uint8Array, publicKey: Uint8Array, privateKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): CryptoBox; crypto_box_detached( message: string | Uint8Array, nonce: Uint8Array, publicKey: Uint8Array, privateKey: Uint8Array, outputFormat: StringOutputFormat, ): StringCryptoBox; crypto_box_easy( message: string | Uint8Array, nonce: Uint8Array, publicKey: Uint8Array, privateKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_box_easy( message: string | Uint8Array, nonce: Uint8Array, publicKey: Uint8Array, privateKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_box_easy_afternm( message: string | Uint8Array, nonce: Uint8Array, sharedKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_box_easy_afternm( message: string | Uint8Array, nonce: Uint8Array, sharedKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_box_keypair(outputFormat?: Uint8ArrayOutputFormat | null): KeyPair; crypto_box_keypair(outputFormat: StringOutputFormat): StringKeyPair; crypto_box_open_detached( ciphertext: string | Uint8Array, mac: Uint8Array, nonce: Uint8Array, publicKey: Uint8Array, privateKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_box_open_detached( ciphertext: string | Uint8Array, mac: Uint8Array, nonce: Uint8Array, publicKey: Uint8Array, privateKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_box_open_easy( ciphertext: string | Uint8Array, nonce: Uint8Array, publicKey: Uint8Array, privateKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_box_open_easy( ciphertext: string | Uint8Array, nonce: Uint8Array, publicKey: Uint8Array, privateKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_box_open_easy_afternm( ciphertext: string | Uint8Array, nonce: Uint8Array, sharedKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_box_open_easy_afternm( ciphertext: string | Uint8Array, nonce: Uint8Array, sharedKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_box_seal( message: string | Uint8Array, publicKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_box_seal( message: string | Uint8Array, publicKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_box_seal_open( ciphertext: string | Uint8Array, publicKey: Uint8Array, privateKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_box_seal_open( ciphertext: string | Uint8Array, publicKey: Uint8Array, privateKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_box_seed_keypair( seed: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): KeyPair; crypto_box_seed_keypair( seed: Uint8Array, outputFormat: StringOutputFormat, ): StringKeyPair; crypto_generichash( hash_length: number, message: string | Uint8Array, key?: string | Uint8Array | null, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_generichash( hash_length: number, message: string | Uint8Array, key: string | Uint8Array | null, outputFormat: StringOutputFormat, ): string; crypto_generichash_final( state_address: StateAddress, hash_length: number, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_generichash_final( state_address: StateAddress, hash_length: number, outputFormat: StringOutputFormat, ): string; crypto_generichash_init( key: string | Uint8Array | null, hash_length: number, ): StateAddress; crypto_generichash_keygen( outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_generichash_keygen(outputFormat: StringOutputFormat): string; crypto_generichash_update( state_address: StateAddress, message_chunk: string | Uint8Array, ): void; crypto_hash( message: string | Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_hash( message: string | Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_kdf_derive_from_key( subkey_len: number, subkey_id: number, ctx: string, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_kdf_derive_from_key( subkey_len: number, subkey_id: number, ctx: string, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_kdf_keygen(outputFormat?: Uint8ArrayOutputFormat | null): Uint8Array; crypto_kdf_keygen(outputFormat: StringOutputFormat): string; crypto_kx_client_session_keys( clientPublicKey: Uint8Array, clientSecretKey: Uint8Array, serverPublicKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): CryptoKX; crypto_kx_client_session_keys( clientPublicKey: Uint8Array, clientSecretKey: Uint8Array, serverPublicKey: Uint8Array, outputFormat: StringOutputFormat, ): StringCryptoKX; crypto_kx_keypair(outputFormat?: Uint8ArrayOutputFormat | null): KeyPair; crypto_kx_keypair(outputFormat: StringOutputFormat): StringKeyPair; crypto_kx_seed_keypair( seed: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): KeyPair; crypto_kx_seed_keypair( seed: Uint8Array, outputFormat: StringOutputFormat, ): StringKeyPair; crypto_kx_server_session_keys( serverPublicKey: Uint8Array, serverSecretKey: Uint8Array, clientPublicKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): CryptoKX; crypto_kx_server_session_keys( serverPublicKey: Uint8Array, serverSecretKey: Uint8Array, clientPublicKey: Uint8Array, outputFormat: StringOutputFormat, ): StringCryptoKX; crypto_pwhash( keyLength: number, password: string | Uint8Array, salt: Uint8Array, opsLimit: number, memLimit: number, algorithm: number, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_pwhash( keyLength: number, password: string | Uint8Array, salt: Uint8Array, opsLimit: number, memLimit: number, algorithm: number, outputFormat: StringOutputFormat, ): string; crypto_pwhash_str( password: string | Uint8Array, opsLimit: number, memLimit: number, ): string; crypto_pwhash_str_needs_rehash( hashed_password: string | Uint8Array, opsLimit: number, memLimit: number, ): boolean; crypto_pwhash_str_verify( hashed_password: string, password: string | Uint8Array, ): boolean; crypto_scalarmult( privateKey: Uint8Array, publicKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_scalarmult( privateKey: Uint8Array, publicKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_scalarmult_base( privateKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_scalarmult_base( privateKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_secretbox_detached( message: string | Uint8Array, nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): SecretBox; crypto_secretbox_detached( message: string | Uint8Array, nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): StringSecretBox; crypto_secretbox_easy( message: string | Uint8Array, nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_secretbox_easy( message: string | Uint8Array, nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_secretbox_keygen( outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_secretbox_keygen(outputFormat: StringOutputFormat): string; crypto_secretbox_open_detached( ciphertext: string | Uint8Array, mac: Uint8Array, nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_secretbox_open_detached( ciphertext: string | Uint8Array, mac: Uint8Array, nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_secretbox_open_easy( ciphertext: string | Uint8Array, nonce: Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_secretbox_open_easy( ciphertext: string | Uint8Array, nonce: Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_secretstream_xchacha20poly1305_init_pull( header: Uint8Array, key: Uint8Array, ): StateAddress; crypto_secretstream_xchacha20poly1305_init_push( key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): { state: StateAddress; header: Uint8Array }; crypto_secretstream_xchacha20poly1305_init_push( key: Uint8Array, outputFormat: StringOutputFormat, ): { state: StateAddress; header: string }; crypto_secretstream_xchacha20poly1305_keygen( outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_secretstream_xchacha20poly1305_keygen( outputFormat: StringOutputFormat, ): string; crypto_secretstream_xchacha20poly1305_pull( state_address: StateAddress, cipher: string | Uint8Array, ad?: string | Uint8Array | null, outputFormat?: Uint8ArrayOutputFormat | null, ): MessageTag; crypto_secretstream_xchacha20poly1305_pull( state_address: StateAddress, cipher: string | Uint8Array, ad: string | Uint8Array | null, outputFormat: StringOutputFormat, ): StringMessageTag; crypto_secretstream_xchacha20poly1305_push( state_address: StateAddress, message_chunk: string | Uint8Array, ad: string | Uint8Array | null, tag: number, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_secretstream_xchacha20poly1305_push( state_address: StateAddress, message_chunk: string | Uint8Array, ad: string | Uint8Array | null, tag: number, outputFormat: StringOutputFormat, ): string; crypto_secretstream_xchacha20poly1305_rekey( state_address: StateAddress, ): true; crypto_shorthash( message: string | Uint8Array, key: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_shorthash( message: string | Uint8Array, key: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_shorthash_keygen( outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_shorthash_keygen(outputFormat: StringOutputFormat): string; crypto_sign( message: string | Uint8Array, privateKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_sign( message: string | Uint8Array, privateKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_sign_detached( message: string | Uint8Array, privateKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_sign_detached( message: string | Uint8Array, privateKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_sign_ed25519_pk_to_curve25519( edPk: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_sign_ed25519_pk_to_curve25519( edPk: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_sign_ed25519_sk_to_curve25519( edSk: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_sign_ed25519_sk_to_curve25519( edSk: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_sign_final_create( state_address: StateAddress, privateKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_sign_final_create( state_address: StateAddress, privateKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_sign_final_verify( state_address: StateAddress, signature: Uint8Array, publicKey: Uint8Array, ): boolean; crypto_sign_init(): StateAddress; crypto_sign_keypair(outputFormat?: Uint8ArrayOutputFormat | null): KeyPair; crypto_sign_keypair(outputFormat: StringOutputFormat): StringKeyPair; crypto_sign_open( signedMessage: string | Uint8Array, publicKey: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; crypto_sign_open( signedMessage: string | Uint8Array, publicKey: Uint8Array, outputFormat: StringOutputFormat, ): string; crypto_sign_seed_keypair( seed: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): KeyPair; crypto_sign_seed_keypair( seed: Uint8Array, outputFormat: StringOutputFormat, ): StringKeyPair; crypto_sign_update( state_address: StateAddress, message_chunk: string | Uint8Array, ): void; crypto_sign_verify_detached( signature: Uint8Array, message: string | Uint8Array, publicKey: Uint8Array, ): boolean; from_base64(input: string, variant?: base64_variants): Uint8Array; from_hex(input: string): Uint8Array; from_string(str: string): Uint8Array; increment(bytes: Uint8Array): void; is_zero(bytes: Uint8Array): boolean; memcmp(b1: Uint8Array, b2: Uint8Array): boolean; memzero(bytes: Uint8Array): void; output_formats(): Array<Uint8ArrayOutputFormat | StringOutputFormat>; pad(buf: Uint8Array, blocksize: number): Uint8Array; randombytes_buf( length: number, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; randombytes_buf(length: number, outputFormat: StringOutputFormat): string; randombytes_buf_deterministic( length: number, seed: Uint8Array, outputFormat?: Uint8ArrayOutputFormat | null, ): Uint8Array; randombytes_buf_deterministic( length: number, seed: Uint8Array, outputFormat: StringOutputFormat, ): string; randombytes_close(): void; randombytes_random(): number; randombytes_stir(): void; randombytes_uniform(upper_bound: number): number; sodium_version_string(): string; symbols(): string[]; to_base64(input: string | Uint8Array, variant?: base64_variants): string; to_hex(input: string | Uint8Array): string; to_string(bytes: Uint8Array): string; unpad(buf: Uint8Array, blocksize: number): Uint8Array; }
the_stack
* @file Support for defining user-selectable tools. */ import './tool.css'; import debounce from 'lodash/debounce'; import {MouseSelectionState, UserLayer, UserLayerConstructor} from 'neuroglancer/layer'; import {StatusMessage} from 'neuroglancer/status'; import {TrackableValueInterface} from 'neuroglancer/trackable_value'; import {animationFrameDebounce} from 'neuroglancer/util/animation_frame_debounce'; import {Borrowed, Owned, RefCounted} from 'neuroglancer/util/disposable'; import {ActionEvent, EventActionMap, registerActionListener} from 'neuroglancer/util/event_action_map'; import {verifyObject, verifyObjectProperty, verifyString} from 'neuroglancer/util/json'; import {AnyConstructor} from 'neuroglancer/util/mixin'; import {Signal} from 'neuroglancer/util/signal'; const TOOL_KEY_PATTERN = /^[A-Z]$/; export type InputEventMapBinder = (eventActionMap: EventActionMap, context: RefCounted) => void; export class ToolActivation<ToolType extends Tool = Tool> extends RefCounted { constructor(public tool: ToolType, public inputEventMapBinder: InputEventMapBinder) { super(); } bindAction<Info>(action: string, listener: (event: ActionEvent<Info>) => void) { this.registerDisposer(registerActionListener(window, action, listener)); } bindInputEventMap(inputEventMap: EventActionMap) { this.inputEventMapBinder(inputEventMap, this); } cancel() { if (this == this.tool.layer.manager.root.toolBinder.activeTool_) { this.tool.layer.manager.root.toolBinder.deactivate_(); } } } export abstract class Tool<LayerType extends UserLayer = UserLayer> extends RefCounted { changed = new Signal(); keyBinding: string|undefined = undefined; constructor(public layer: LayerType, public toggle: boolean = false) { super(); } get mouseState() { return this.layer.manager.root.layerSelectedValues.mouseState; } abstract activate(activation: ToolActivation<this>): void; abstract toJSON(): any; abstract description: string; unbind() { const {layer} = this; const {keyBinding} = this; if (keyBinding !== undefined) { layer.toolBinder.set(keyBinding, undefined); } } } export abstract class LegacyTool<LayerType extends UserLayer = UserLayer> extends RefCounted { changed = new Signal(); constructor(public layer: LayerType) { super(); } get mouseState() { return this.layer.manager.root.layerSelectedValues.mouseState; } abstract trigger(mouseState: MouseSelectionState): void; abstract toJSON(): any; deactivate(): void {} abstract description: string; unbind() { const {layer} = this; if (layer.tool.value === this) { layer.tool.value = undefined; } } } export function restoreTool(layer: UserLayer, obj: any) { if (obj === undefined) { return undefined; } if (typeof obj === 'string') { obj = {'type': obj}; } verifyObject(obj); const type = verifyObjectProperty(obj, 'type', verifyString); // First look for layer-specific tool. let getter: ToolGetter|undefined = layerTools.get(layer.constructor as UserLayerConstructor)?.get(type); if (getter === undefined) { // Look for layer-independent tool. getter = tools.get(type); } if (getter === undefined) { throw new Error(`Invalid tool type: ${JSON.stringify(obj)}.`); } return getter(layer, obj); } export function restoreLegacyTool(layer: UserLayer, obj: any) { if (obj === undefined) { return undefined; } if (typeof obj === 'string') { obj = {'type': obj}; } verifyObject(obj); const type = verifyObjectProperty(obj, 'type', verifyString); const getter = legacyTools.get(type); if (getter === undefined) { throw new Error(`Invalid tool type: ${JSON.stringify(obj)}.`); } return getter(layer, obj); } export type ToolGetter<LayerType extends UserLayer = UserLayer> = (layer: LayerType, options: any) => Owned<Tool>|undefined; export type LegacyToolGetter<LayerType extends UserLayer = UserLayer> = (layer: LayerType, options: any) => Owned<LegacyTool>|undefined; const legacyTools = new Map<string, LegacyToolGetter>(); const tools = new Map<string, ToolGetter>(); const layerTools = new Map<UserLayerConstructor, Map<string, ToolGetter>>(); export function registerLegacyTool(type: string, getter: LegacyToolGetter) { legacyTools.set(type, getter); } export function registerTool(type: string, getter: ToolGetter) { tools.set(type, getter); } export function registerLayerTool<LayerType extends UserLayer>( layerType: UserLayerConstructor&AnyConstructor<LayerType>, type: string, getter: ToolGetter<LayerType>) { let tools = layerTools.get(layerType); if (tools === undefined) { tools = new Map(); layerTools.set(layerType, tools); } tools.set(type, getter); } export class SelectedLegacyTool extends RefCounted implements TrackableValueInterface<LegacyTool|undefined> { changed = new Signal(); private value_: Owned<LegacyTool>|undefined; get value() { return this.value_; } set value(newValue: Owned<LegacyTool>|undefined) { if (newValue === this.value_) return; this.unregister(); if (newValue !== undefined) { newValue.changed.add(this.changed.dispatch); this.value_ = newValue; } this.changed.dispatch(); } private unregister() { const existingValue = this.value_; if (existingValue !== undefined) { existingValue.changed.remove(this.changed.dispatch); existingValue.dispose(); this.value_ = undefined; } } disposed() { this.unregister(); super.disposed(); } restoreState(obj: unknown) { this.value = restoreLegacyTool(this.layer, obj); } reset() { this.value = undefined; } toJSON() { const value = this.value_; if (value === undefined) return undefined; return value.toJSON(); } constructor(public layer: UserLayer) { super(); } } export class ToolBinder extends RefCounted { bindings = new Map<string, Borrowed<Tool>>(); changed = new Signal(); activeTool_: Owned<ToolActivation>|undefined; // For internal use only- should only be called by ToolBinder and ToolActivation.cancel() private queuedTool: Tool|undefined; private debounceDeactivate = this.registerCancellable(debounce(() => this.deactivate_(), 100)); private debounceReactivate = this.registerCancellable(debounce(() => this.reactivateQueuedTool(), 100)); constructor(private inputEventMapBinder: InputEventMapBinder) { super(); } get(key: string): Borrowed<Tool>|undefined { return this.bindings.get(key); } set(key: string, tool: Owned<Tool>|undefined) { const {bindings} = this; const existingTool = bindings.get(key); if (existingTool !== undefined) { existingTool.keyBinding = undefined; bindings.delete(key); const layerToolBinder = existingTool.layer.toolBinder; layerToolBinder.bindings.delete(key); layerToolBinder.jsonToKey.delete(JSON.stringify(existingTool.toJSON())); this.destroyTool(existingTool); layerToolBinder.changed.dispatch(); } if (tool !== undefined) { const layerToolBinder = tool.layer.toolBinder; const json = JSON.stringify(tool.toJSON()); const existingKey = layerToolBinder.jsonToKey.get(json); if (existingKey !== undefined) { const existingTool = layerToolBinder.bindings.get(existingKey)!; existingTool.keyBinding = undefined; bindings.delete(existingKey); layerToolBinder.bindings.delete(existingKey); layerToolBinder.jsonToKey.delete(json); this.destroyTool(existingTool); } layerToolBinder.bindings.set(key, tool); tool.keyBinding = key; layerToolBinder.jsonToKey.set(json, key); bindings.set(key, tool); layerToolBinder.changed.dispatch(); } this.changed.dispatch(); } activate(key: string): Borrowed<Tool>|undefined { const tool = this.get(key); if (tool === undefined) { this.deactivate_(); return; } this.debounceDeactivate.cancel(); this.debounceReactivate.cancel(); const activeTool = this.activeTool_; if (tool === activeTool?.tool) { if (tool.toggle) { this.deactivate_(); } return; } else if (activeTool !== undefined) { if (activeTool.tool.toggle && !tool.toggle) { this.queuedTool = activeTool.tool; } this.deactivate_(); } const activation = new ToolActivation(tool, this.inputEventMapBinder); this.activeTool_ = activation; if (!tool.toggle) { const expectedCode = `Key${key}`; activation.registerEventListener(window, 'keyup', (event: KeyboardEvent) => { if (event.code === expectedCode) { this.debounceDeactivate(); this.debounceReactivate(); } }); activation.registerEventListener(window, 'blur', () => { this.debounceDeactivate(); this.debounceReactivate(); }); } tool.activate(activation); return tool; } private reactivateQueuedTool() { if (this.queuedTool) { const activation = new ToolActivation(this.queuedTool, this.inputEventMapBinder); this.activeTool_ = activation; this.queuedTool.activate(activation); this.queuedTool = undefined; } } destroyTool(tool: Owned<Tool>) { if (this.queuedTool === tool) { this.queuedTool = undefined; } if (this.activeTool_?.tool === tool) { this.deactivate_(); } tool.dispose(); } disposed() { this.deactivate_(); super.disposed(); } deactivate_() { // For internal use only- should only be called by ToolBinder and ToolActivation.cancel() this.debounceDeactivate.cancel(); const activation = this.activeTool_; if (activation === undefined) return; this.activeTool_ = undefined; activation.dispose(); } } export class LayerToolBinder { // Maps the the tool key (i.e. "A", "B", ...) to the bound tool. bindings = new Map<string, Owned<Tool>>(); // Maps the serialized json representation of the tool to the tool key. jsonToKey = new Map<string, string>(); changed = new Signal(); private get globalBinder() { return this.layer.manager.root.toolBinder; } constructor(public layer: UserLayer) { layer.registerDisposer(() => this.clear()); } get(key: string): Borrowed<Tool>|undefined { return this.bindings.get(key); } set(key: string, tool: Owned<Tool>|undefined) { this.globalBinder.set(key, tool); } setJson(key: string, toolJson: any) { const tool = restoreTool(this.layer, toolJson); if (tool === undefined) return; this.set(key, tool); } removeJsonString(toolJsonString: string) { const key = this.jsonToKey.get(toolJsonString); if (key === undefined) return; this.set(key, undefined); } toJSON(): any { const {bindings} = this; if (bindings.size === 0) return undefined; const obj: any = {}; for (const [key, value] of bindings) { obj[key] = value.toJSON(); } return obj; } clear() { const {globalBinder, bindings} = this; if (bindings.size !== 0) { for (const [key, tool] of bindings) { tool.keyBinding = undefined; globalBinder.bindings.delete(key); globalBinder.destroyTool(tool); } bindings.clear(); this.jsonToKey.clear(); globalBinder.changed.dispatch(); this.changed.dispatch(); } } reset() { this.clear(); } restoreState(obj: any) { if (obj === undefined) return; verifyObject(obj); for (const [key, value] of Object.entries(obj)) { if (!key.match(TOOL_KEY_PATTERN)) { throw new Error(`Invalid tool key: ${JSON.stringify(key)}`); } const tool = restoreTool(this.layer, value); if (tool === undefined) return; this.set(key, tool); } } } export class ToolBindingWidget<LayerType extends UserLayer> extends RefCounted { element = document.createElement('div'); private toolJsonString = JSON.stringify(this.toolJson); constructor(public layer: LayerType, public toolJson: any) { super(); const {element} = this; element.classList.add('neuroglancer-tool-key-binding'); this.registerDisposer(layer.toolBinder.changed.add( this.registerCancellable(animationFrameDebounce(() => this.updateView())))); this.updateView(); element.title = 'click → bind key, dbclick → unbind'; element.addEventListener('dblclick', () => { this.layer.toolBinder.removeJsonString(this.toolJsonString); }); addToolKeyBindHandlers(this, element, key => this.layer.toolBinder.setJson(key, this.toolJson)); } private updateView() { const {toolBinder} = this.layer; const key = toolBinder.jsonToKey.get(this.toolJsonString); this.element.textContent = key ?? ' '; } } export function addToolKeyBindHandlers( context: RefCounted, element: HTMLElement, bindKey: (key: string) => void) { let mousedownContext: RefCounted|undefined; element.addEventListener('mousedown', event => { if (event.button !== 0 || mousedownContext !== undefined) return; event.preventDefault(); event.stopPropagation(); mousedownContext = new RefCounted(); context.registerDisposer(mousedownContext); const message = mousedownContext.registerDisposer(new StatusMessage(false)); message.setText('Press A-Z to bind key'); mousedownContext.registerEventListener(window, 'keydown', (event: KeyboardEvent) => { const {code} = event; const m = code.match(/^Key([A-Z])$/); if (m === null) return; event.stopPropagation(); event.preventDefault(); const key = m[1]; bindKey(key); }, {capture: true}); mousedownContext.registerEventListener(window, 'mouseup', (event: MouseEvent) => { if (event.button !== 0 || mousedownContext === undefined) return; event.preventDefault(); event.stopPropagation(); context.unregisterDisposer(mousedownContext); mousedownContext.dispose(); mousedownContext = undefined; }); }); element.addEventListener('click', event => { event.preventDefault(); event.stopPropagation(); }); } export function makeToolButton( context: RefCounted, layer: UserLayer, options: {toolJson: any, label: string, title?: string}) { const element = document.createElement('div'); element.classList.add('neuroglancer-tool-button'); element.appendChild( context.registerDisposer(new ToolBindingWidget(layer, options.toolJson)).element); const labelElement = document.createElement('div'); labelElement.classList.add('neuroglancer-tool-button-label'); labelElement.textContent = options.label; if (options.title) { labelElement.title = options.title; } element.appendChild(labelElement); return element; } export function makeToolActivationStatusMessage(activation: ToolActivation) { const message = activation.registerDisposer(new StatusMessage(false)); message.element.classList.add('neuroglancer-tool-status'); const content = document.createElement('div'); content.classList.add('neuroglancer-tool-status-content'); message.element.appendChild(content); const {inputEventMapBinder} = activation; activation.inputEventMapBinder = (inputEventMap: EventActionMap, context: RefCounted) => { const bindingHelp = document.createElement('div'); bindingHelp.textContent = inputEventMap.describe(); bindingHelp.classList.add('neuroglancer-tool-status-bindings'); message.element.appendChild(bindingHelp); inputEventMapBinder(inputEventMap, context); }; return {message, content}; } export function makeToolActivationStatusMessageWithHeader(activation: ToolActivation) { const {message, content} = makeToolActivationStatusMessage(activation); const header = document.createElement('div'); header.classList.add('neuroglancer-tool-status-header'); const headerContainer = document.createElement('div'); headerContainer.classList.add('neuroglancer-tool-status-header-container'); headerContainer.appendChild(header); content.appendChild(headerContainer); const body = document.createElement('div'); body.classList.add('neuroglancer-tool-status-body'); content.appendChild(body); return {message, body, header}; }
the_stack
export interface paths { "/": { get: { responses: { /** OK */ 200: unknown; }; }; }; "/auth_key": { get: { parameters: { query: { id?: parameters["rowFilter.auth_key.id"]; name?: parameters["rowFilter.auth_key.name"]; secret?: parameters["rowFilter.auth_key.secret"]; user_id?: parameters["rowFilter.auth_key.user_id"]; inserted_at?: parameters["rowFilter.auth_key.inserted_at"]; updated_at?: parameters["rowFilter.auth_key.updated_at"]; deleted_at?: parameters["rowFilter.auth_key.deleted_at"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["auth_key"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** auth_key */ auth_key?: definitions["auth_key"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { id?: parameters["rowFilter.auth_key.id"]; name?: parameters["rowFilter.auth_key.name"]; secret?: parameters["rowFilter.auth_key.secret"]; user_id?: parameters["rowFilter.auth_key.user_id"]; inserted_at?: parameters["rowFilter.auth_key.inserted_at"]; updated_at?: parameters["rowFilter.auth_key.updated_at"]; deleted_at?: parameters["rowFilter.auth_key.deleted_at"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { id?: parameters["rowFilter.auth_key.id"]; name?: parameters["rowFilter.auth_key.name"]; secret?: parameters["rowFilter.auth_key.secret"]; user_id?: parameters["rowFilter.auth_key.user_id"]; inserted_at?: parameters["rowFilter.auth_key.inserted_at"]; updated_at?: parameters["rowFilter.auth_key.updated_at"]; deleted_at?: parameters["rowFilter.auth_key.deleted_at"]; }; body: { /** auth_key */ auth_key?: definitions["auth_key"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/backup": { get: { parameters: { query: { id?: parameters["rowFilter.backup.id"]; upload_id?: parameters["rowFilter.backup.upload_id"]; url?: parameters["rowFilter.backup.url"]; inserted_at?: parameters["rowFilter.backup.inserted_at"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["backup"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** backup */ backup?: definitions["backup"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { id?: parameters["rowFilter.backup.id"]; upload_id?: parameters["rowFilter.backup.upload_id"]; url?: parameters["rowFilter.backup.url"]; inserted_at?: parameters["rowFilter.backup.inserted_at"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { id?: parameters["rowFilter.backup.id"]; upload_id?: parameters["rowFilter.backup.upload_id"]; url?: parameters["rowFilter.backup.url"]; inserted_at?: parameters["rowFilter.backup.inserted_at"]; }; body: { /** backup */ backup?: definitions["backup"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/content": { get: { parameters: { query: { cid?: parameters["rowFilter.content.cid"]; dag_size?: parameters["rowFilter.content.dag_size"]; inserted_at?: parameters["rowFilter.content.inserted_at"]; updated_at?: parameters["rowFilter.content.updated_at"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["content"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** content */ content?: definitions["content"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { cid?: parameters["rowFilter.content.cid"]; dag_size?: parameters["rowFilter.content.dag_size"]; inserted_at?: parameters["rowFilter.content.inserted_at"]; updated_at?: parameters["rowFilter.content.updated_at"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { cid?: parameters["rowFilter.content.cid"]; dag_size?: parameters["rowFilter.content.dag_size"]; inserted_at?: parameters["rowFilter.content.inserted_at"]; updated_at?: parameters["rowFilter.content.updated_at"]; }; body: { /** content */ content?: definitions["content"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/migration_tracker": { get: { parameters: { query: { id?: parameters["rowFilter.migration_tracker.id"]; cid?: parameters["rowFilter.migration_tracker.cid"]; duration?: parameters["rowFilter.migration_tracker.duration"]; dump_started_at?: parameters["rowFilter.migration_tracker.dump_started_at"]; dump_ended_at?: parameters["rowFilter.migration_tracker.dump_ended_at"]; inserted_at?: parameters["rowFilter.migration_tracker.inserted_at"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["migration_tracker"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** migration_tracker */ migration_tracker?: definitions["migration_tracker"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { id?: parameters["rowFilter.migration_tracker.id"]; cid?: parameters["rowFilter.migration_tracker.cid"]; duration?: parameters["rowFilter.migration_tracker.duration"]; dump_started_at?: parameters["rowFilter.migration_tracker.dump_started_at"]; dump_ended_at?: parameters["rowFilter.migration_tracker.dump_ended_at"]; inserted_at?: parameters["rowFilter.migration_tracker.inserted_at"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { id?: parameters["rowFilter.migration_tracker.id"]; cid?: parameters["rowFilter.migration_tracker.cid"]; duration?: parameters["rowFilter.migration_tracker.duration"]; dump_started_at?: parameters["rowFilter.migration_tracker.dump_started_at"]; dump_ended_at?: parameters["rowFilter.migration_tracker.dump_ended_at"]; inserted_at?: parameters["rowFilter.migration_tracker.inserted_at"]; }; body: { /** migration_tracker */ migration_tracker?: definitions["migration_tracker"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/name": { get: { parameters: { query: { key?: parameters["rowFilter.name.key"]; record?: parameters["rowFilter.name.record"]; has_v2_sig?: parameters["rowFilter.name.has_v2_sig"]; seqno?: parameters["rowFilter.name.seqno"]; validity?: parameters["rowFilter.name.validity"]; inserted_at?: parameters["rowFilter.name.inserted_at"]; updated_at?: parameters["rowFilter.name.updated_at"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["name"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** name */ name?: definitions["name"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { key?: parameters["rowFilter.name.key"]; record?: parameters["rowFilter.name.record"]; has_v2_sig?: parameters["rowFilter.name.has_v2_sig"]; seqno?: parameters["rowFilter.name.seqno"]; validity?: parameters["rowFilter.name.validity"]; inserted_at?: parameters["rowFilter.name.inserted_at"]; updated_at?: parameters["rowFilter.name.updated_at"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { key?: parameters["rowFilter.name.key"]; record?: parameters["rowFilter.name.record"]; has_v2_sig?: parameters["rowFilter.name.has_v2_sig"]; seqno?: parameters["rowFilter.name.seqno"]; validity?: parameters["rowFilter.name.validity"]; inserted_at?: parameters["rowFilter.name.inserted_at"]; updated_at?: parameters["rowFilter.name.updated_at"]; }; body: { /** name */ name?: definitions["name"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/pin": { get: { parameters: { query: { id?: parameters["rowFilter.pin.id"]; status?: parameters["rowFilter.pin.status"]; content_cid?: parameters["rowFilter.pin.content_cid"]; pin_location_id?: parameters["rowFilter.pin.pin_location_id"]; inserted_at?: parameters["rowFilter.pin.inserted_at"]; updated_at?: parameters["rowFilter.pin.updated_at"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["pin"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** pin */ pin?: definitions["pin"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { id?: parameters["rowFilter.pin.id"]; status?: parameters["rowFilter.pin.status"]; content_cid?: parameters["rowFilter.pin.content_cid"]; pin_location_id?: parameters["rowFilter.pin.pin_location_id"]; inserted_at?: parameters["rowFilter.pin.inserted_at"]; updated_at?: parameters["rowFilter.pin.updated_at"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { id?: parameters["rowFilter.pin.id"]; status?: parameters["rowFilter.pin.status"]; content_cid?: parameters["rowFilter.pin.content_cid"]; pin_location_id?: parameters["rowFilter.pin.pin_location_id"]; inserted_at?: parameters["rowFilter.pin.inserted_at"]; updated_at?: parameters["rowFilter.pin.updated_at"]; }; body: { /** pin */ pin?: definitions["pin"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/pin_location": { get: { parameters: { query: { id?: parameters["rowFilter.pin_location.id"]; peer_id?: parameters["rowFilter.pin_location.peer_id"]; peer_name?: parameters["rowFilter.pin_location.peer_name"]; region?: parameters["rowFilter.pin_location.region"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["pin_location"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** pin_location */ pin_location?: definitions["pin_location"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { id?: parameters["rowFilter.pin_location.id"]; peer_id?: parameters["rowFilter.pin_location.peer_id"]; peer_name?: parameters["rowFilter.pin_location.peer_name"]; region?: parameters["rowFilter.pin_location.region"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { id?: parameters["rowFilter.pin_location.id"]; peer_id?: parameters["rowFilter.pin_location.peer_id"]; peer_name?: parameters["rowFilter.pin_location.peer_name"]; region?: parameters["rowFilter.pin_location.region"]; }; body: { /** pin_location */ pin_location?: definitions["pin_location"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/pin_request": { get: { parameters: { query: { id?: parameters["rowFilter.pin_request.id"]; content_cid?: parameters["rowFilter.pin_request.content_cid"]; attempts?: parameters["rowFilter.pin_request.attempts"]; inserted_at?: parameters["rowFilter.pin_request.inserted_at"]; updated_at?: parameters["rowFilter.pin_request.updated_at"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["pin_request"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** pin_request */ pin_request?: definitions["pin_request"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { id?: parameters["rowFilter.pin_request.id"]; content_cid?: parameters["rowFilter.pin_request.content_cid"]; attempts?: parameters["rowFilter.pin_request.attempts"]; inserted_at?: parameters["rowFilter.pin_request.inserted_at"]; updated_at?: parameters["rowFilter.pin_request.updated_at"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { id?: parameters["rowFilter.pin_request.id"]; content_cid?: parameters["rowFilter.pin_request.content_cid"]; attempts?: parameters["rowFilter.pin_request.attempts"]; inserted_at?: parameters["rowFilter.pin_request.inserted_at"]; updated_at?: parameters["rowFilter.pin_request.updated_at"]; }; body: { /** pin_request */ pin_request?: definitions["pin_request"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/pin_sync_request": { get: { parameters: { query: { id?: parameters["rowFilter.pin_sync_request.id"]; pin_id?: parameters["rowFilter.pin_sync_request.pin_id"]; inserted_at?: parameters["rowFilter.pin_sync_request.inserted_at"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["pin_sync_request"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** pin_sync_request */ pin_sync_request?: definitions["pin_sync_request"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { id?: parameters["rowFilter.pin_sync_request.id"]; pin_id?: parameters["rowFilter.pin_sync_request.pin_id"]; inserted_at?: parameters["rowFilter.pin_sync_request.inserted_at"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { id?: parameters["rowFilter.pin_sync_request.id"]; pin_id?: parameters["rowFilter.pin_sync_request.pin_id"]; inserted_at?: parameters["rowFilter.pin_sync_request.inserted_at"]; }; body: { /** pin_sync_request */ pin_sync_request?: definitions["pin_sync_request"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/upload": { get: { parameters: { query: { id?: parameters["rowFilter.upload.id"]; user_id?: parameters["rowFilter.upload.user_id"]; auth_key_id?: parameters["rowFilter.upload.auth_key_id"]; content_cid?: parameters["rowFilter.upload.content_cid"]; source_cid?: parameters["rowFilter.upload.source_cid"]; type?: parameters["rowFilter.upload.type"]; name?: parameters["rowFilter.upload.name"]; inserted_at?: parameters["rowFilter.upload.inserted_at"]; updated_at?: parameters["rowFilter.upload.updated_at"]; deleted_at?: parameters["rowFilter.upload.deleted_at"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["upload"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** upload */ upload?: definitions["upload"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { id?: parameters["rowFilter.upload.id"]; user_id?: parameters["rowFilter.upload.user_id"]; auth_key_id?: parameters["rowFilter.upload.auth_key_id"]; content_cid?: parameters["rowFilter.upload.content_cid"]; source_cid?: parameters["rowFilter.upload.source_cid"]; type?: parameters["rowFilter.upload.type"]; name?: parameters["rowFilter.upload.name"]; inserted_at?: parameters["rowFilter.upload.inserted_at"]; updated_at?: parameters["rowFilter.upload.updated_at"]; deleted_at?: parameters["rowFilter.upload.deleted_at"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { id?: parameters["rowFilter.upload.id"]; user_id?: parameters["rowFilter.upload.user_id"]; auth_key_id?: parameters["rowFilter.upload.auth_key_id"]; content_cid?: parameters["rowFilter.upload.content_cid"]; source_cid?: parameters["rowFilter.upload.source_cid"]; type?: parameters["rowFilter.upload.type"]; name?: parameters["rowFilter.upload.name"]; inserted_at?: parameters["rowFilter.upload.inserted_at"]; updated_at?: parameters["rowFilter.upload.updated_at"]; deleted_at?: parameters["rowFilter.upload.deleted_at"]; }; body: { /** upload */ upload?: definitions["upload"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/user": { get: { parameters: { query: { id?: parameters["rowFilter.user.id"]; name?: parameters["rowFilter.user.name"]; picture?: parameters["rowFilter.user.picture"]; email?: parameters["rowFilter.user.email"]; issuer?: parameters["rowFilter.user.issuer"]; github?: parameters["rowFilter.user.github"]; public_address?: parameters["rowFilter.user.public_address"]; inserted_at?: parameters["rowFilter.user.inserted_at"]; updated_at?: parameters["rowFilter.user.updated_at"]; /** Filtering Columns */ select?: parameters["select"]; /** Ordering */ order?: parameters["order"]; /** Limiting and Pagination */ offset?: parameters["offset"]; /** Limiting and Pagination */ limit?: parameters["limit"]; }; header: { /** Limiting and Pagination */ Range?: parameters["range"]; /** Limiting and Pagination */ "Range-Unit"?: parameters["rangeUnit"]; /** Preference */ Prefer?: parameters["preferCount"]; }; }; responses: { /** OK */ 200: { schema: definitions["user"][]; }; /** Partial Content */ 206: unknown; }; }; post: { parameters: { body: { /** user */ user?: definitions["user"]; }; query: { /** Filtering Columns */ select?: parameters["select"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** Created */ 201: unknown; }; }; delete: { parameters: { query: { id?: parameters["rowFilter.user.id"]; name?: parameters["rowFilter.user.name"]; picture?: parameters["rowFilter.user.picture"]; email?: parameters["rowFilter.user.email"]; issuer?: parameters["rowFilter.user.issuer"]; github?: parameters["rowFilter.user.github"]; public_address?: parameters["rowFilter.user.public_address"]; inserted_at?: parameters["rowFilter.user.inserted_at"]; updated_at?: parameters["rowFilter.user.updated_at"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; patch: { parameters: { query: { id?: parameters["rowFilter.user.id"]; name?: parameters["rowFilter.user.name"]; picture?: parameters["rowFilter.user.picture"]; email?: parameters["rowFilter.user.email"]; issuer?: parameters["rowFilter.user.issuer"]; github?: parameters["rowFilter.user.github"]; public_address?: parameters["rowFilter.user.public_address"]; inserted_at?: parameters["rowFilter.user.inserted_at"]; updated_at?: parameters["rowFilter.user.updated_at"]; }; body: { /** user */ user?: definitions["user"]; }; header: { /** Preference */ Prefer?: parameters["preferReturn"]; }; }; responses: { /** No Content */ 204: never; }; }; }; "/rpc/pin_dag_size_total": { post: { parameters: { body: { args: { [key: string]: unknown }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/pgrst_watch": { post: { parameters: { body: { args: { [key: string]: unknown }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/user_auth_keys_list": { post: { parameters: { body: { args: { query_user_id: number; }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/pin_from_status_total": { post: { parameters: { body: { args: { query_status: string; }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/find_deals_by_content_cids": { post: { parameters: { body: { args: { cids: string; }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/upsert_pin": { post: { parameters: { body: { args: { data: string; }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/json_arr_to_text_arr": { post: { parameters: { body: { args: { _json: string; }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/publish_name_record": { post: { parameters: { body: { args: { data: string; }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/content_dag_size_total": { post: { parameters: { body: { args: { [key: string]: unknown }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/create_key": { post: { parameters: { body: { args: { data: string; }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/json_arr_to_json_element_array": { post: { parameters: { body: { args: { _json: string; }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/user_used_storage": { post: { parameters: { body: { args: { query_user_id: number; }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; "/rpc/create_upload": { post: { parameters: { body: { args: { data: string; }; }; header: { /** Preference */ Prefer?: parameters["preferParams"]; }; }; responses: { /** OK */ 200: unknown; }; }; }; } export interface definitions { auth_key: { /** * Note: * This is a Primary Key.<pk/> */ id: number; name: string; secret: string; /** * Note: * This is a Foreign Key to `user.id`.<fk table='user' column='id'/> */ user_id: number; inserted_at: string; updated_at: string; deleted_at?: string; }; backup: { /** * Note: * This is a Primary Key.<pk/> */ id: number; /** * Note: * This is a Foreign Key to `upload.id`.<fk table='upload' column='id'/> */ upload_id: number; url: string; inserted_at: string; }; content: { /** * Note: * This is a Primary Key.<pk/> */ cid: string; dag_size?: number; inserted_at: string; updated_at: string; }; migration_tracker: { /** * Note: * This is a Primary Key.<pk/> */ id: number; cid: string; duration?: number; dump_started_at?: string; dump_ended_at: string; inserted_at: string; }; name: { /** * Note: * This is a Primary Key.<pk/> */ key: string; record: string; has_v2_sig: boolean; seqno: number; validity: number; inserted_at: string; updated_at: string; }; pin: { /** * Note: * This is a Primary Key.<pk/> */ id: number; status: | "Undefined" | "ClusterError" | "PinError" | "UnpinError" | "Pinned" | "Pinning" | "Unpinning" | "Unpinned" | "Remote" | "PinQueued" | "UnpinQueued" | "Sharded"; /** * Note: * This is a Foreign Key to `content.cid`.<fk table='content' column='cid'/> */ content_cid: string; /** * Note: * This is a Foreign Key to `pin_location.id`.<fk table='pin_location' column='id'/> */ pin_location_id: number; inserted_at: string; updated_at: string; }; pin_location: { /** * Note: * This is a Primary Key.<pk/> */ id: number; peer_id: string; peer_name?: string; region?: string; }; pin_request: { /** * Note: * This is a Primary Key.<pk/> */ id: number; /** * Note: * This is a Foreign Key to `content.cid`.<fk table='content' column='cid'/> */ content_cid: string; attempts?: number; inserted_at: string; updated_at: string; }; pin_sync_request: { /** * Note: * This is a Primary Key.<pk/> */ id: number; /** * Note: * This is a Foreign Key to `pin.id`.<fk table='pin' column='id'/> */ pin_id: number; inserted_at: string; }; upload: { /** * Note: * This is a Primary Key.<pk/> */ id: number; /** * Note: * This is a Foreign Key to `user.id`.<fk table='user' column='id'/> */ user_id: number; /** * Note: * This is a Foreign Key to `auth_key.id`.<fk table='auth_key' column='id'/> */ auth_key_id?: number; /** * Note: * This is a Foreign Key to `content.cid`.<fk table='content' column='cid'/> */ content_cid: string; source_cid: string; type: "Car" | "Upload" | "Blob" | "Multipart"; name?: string; inserted_at: string; updated_at: string; deleted_at?: string; }; user: { /** * Note: * This is a Primary Key.<pk/> */ id: number; name: string; picture?: string; email: string; issuer: string; github?: string; public_address: string; inserted_at: string; updated_at: string; }; } export interface parameters { /** Preference */ preferParams: "params=single-object"; /** Preference */ preferReturn: "return=representation" | "return=minimal" | "return=none"; /** Preference */ preferCount: "count=none"; /** Filtering Columns */ select: string; /** On Conflict */ on_conflict: string; /** Ordering */ order: string; /** Limiting and Pagination */ range: string; /** Limiting and Pagination */ rangeUnit: string; /** Limiting and Pagination */ offset: string; /** Limiting and Pagination */ limit: string; /** auth_key */ "body.auth_key": definitions["auth_key"]; "rowFilter.auth_key.id": string; "rowFilter.auth_key.name": string; "rowFilter.auth_key.secret": string; "rowFilter.auth_key.user_id": string; "rowFilter.auth_key.inserted_at": string; "rowFilter.auth_key.updated_at": string; "rowFilter.auth_key.deleted_at": string; /** backup */ "body.backup": definitions["backup"]; "rowFilter.backup.id": string; "rowFilter.backup.upload_id": string; "rowFilter.backup.url": string; "rowFilter.backup.inserted_at": string; /** content */ "body.content": definitions["content"]; "rowFilter.content.cid": string; "rowFilter.content.dag_size": string; "rowFilter.content.inserted_at": string; "rowFilter.content.updated_at": string; /** migration_tracker */ "body.migration_tracker": definitions["migration_tracker"]; "rowFilter.migration_tracker.id": string; "rowFilter.migration_tracker.cid": string; "rowFilter.migration_tracker.duration": string; "rowFilter.migration_tracker.dump_started_at": string; "rowFilter.migration_tracker.dump_ended_at": string; "rowFilter.migration_tracker.inserted_at": string; /** name */ "body.name": definitions["name"]; "rowFilter.name.key": string; "rowFilter.name.record": string; "rowFilter.name.has_v2_sig": string; "rowFilter.name.seqno": string; "rowFilter.name.validity": string; "rowFilter.name.inserted_at": string; "rowFilter.name.updated_at": string; /** pin */ "body.pin": definitions["pin"]; "rowFilter.pin.id": string; "rowFilter.pin.status": string; "rowFilter.pin.content_cid": string; "rowFilter.pin.pin_location_id": string; "rowFilter.pin.inserted_at": string; "rowFilter.pin.updated_at": string; /** pin_location */ "body.pin_location": definitions["pin_location"]; "rowFilter.pin_location.id": string; "rowFilter.pin_location.peer_id": string; "rowFilter.pin_location.peer_name": string; "rowFilter.pin_location.region": string; /** pin_request */ "body.pin_request": definitions["pin_request"]; "rowFilter.pin_request.id": string; "rowFilter.pin_request.content_cid": string; "rowFilter.pin_request.attempts": string; "rowFilter.pin_request.inserted_at": string; "rowFilter.pin_request.updated_at": string; /** pin_sync_request */ "body.pin_sync_request": definitions["pin_sync_request"]; "rowFilter.pin_sync_request.id": string; "rowFilter.pin_sync_request.pin_id": string; "rowFilter.pin_sync_request.inserted_at": string; /** upload */ "body.upload": definitions["upload"]; "rowFilter.upload.id": string; "rowFilter.upload.user_id": string; "rowFilter.upload.auth_key_id": string; "rowFilter.upload.content_cid": string; "rowFilter.upload.source_cid": string; "rowFilter.upload.type": string; "rowFilter.upload.name": string; "rowFilter.upload.inserted_at": string; "rowFilter.upload.updated_at": string; "rowFilter.upload.deleted_at": string; /** user */ "body.user": definitions["user"]; "rowFilter.user.id": string; "rowFilter.user.name": string; "rowFilter.user.picture": string; "rowFilter.user.email": string; "rowFilter.user.issuer": string; "rowFilter.user.github": string; "rowFilter.user.public_address": string; "rowFilter.user.inserted_at": string; "rowFilter.user.updated_at": string; } export interface operations {} export interface external {}
the_stack
import resolveInternalRoute from './internalRouteMapReader'; import InternalRoute from '../server/InternalRoute'; import URLBar from './URLBar'; import BrowserBar from './BrowserBar'; import Viewport from './Viewport'; import StatusIndicator from './StatusIndicator'; import ResponseRendererFactory from './ResponseRendererFactory'; import Dialog from './Dialog'; import IReadonlyHistory from './IReadonlyHistory'; import History from './History'; import HistoryEntry from './HistoryEntry'; import IFrameBindings from './IFrameBindings'; import IPrivilegedFrameBindings from './IPrivilegedFrameBindings'; import IBrowserConfiguration from '../server/IBrowserConfiguration'; import WritableBrowserConfig from './WritableBrowserConfig'; import * as configSection from './BrowserConfigSection'; import { internalConfirm, initialize as initializePrompts } from './webapi/prompts'; declare function escape(str: string): string; declare function unescape(str: string): string; /** * The complete browser window, including browser bar and viewport. */ export default class BrowserWindow { constructor( private readonly config = new WritableBrowserConfig(), private readonly browserBar?: BrowserBar, private readonly viewport?: Viewport ) { this.browserBar = this.browserBar || new BrowserBar( new URLBar(this.config), dialog => this.renderDialog(dialog), url => this.load(url) ); this.history.push(new HistoryEntry('about://start', Date.now())); this.viewport = this.viewport || new Viewport(() => this.createFrameBindings()); this.viewport.onBeginNavigation.bind(this.handleViewportBeginningNavigation.bind(this)); this.viewport.onAfterNavigation.bind(this.handleViewportNavigating.bind(this)); } public async render(): Promise<void> { // status indicator await this.statusIndicator.render(); document.body.appendChild(this.statusIndicator.getDOM()); const statusIndicatorTicket = this.statusIndicator.show('initializing'); // browser bar this.browserBar.urlBar.onChange.bind(async () => { this.load(await this.browserBar.urlBar.getURL()); }); this.browserBar.onHomeButtonPressed.bind(() => { this.load('about://home'); }); this.browserBar.onRefreshButtonPressed.bind(() => { this.load(this.history.getCurrent().uri); }); this.browserBar.onNoCacheRefreshButtonPressed.bind(() => { this.load(this.history.getCurrent().uri); }); this.browserBar.onBackButtonPressed.bind(async () => { await this.history.goBack(); this.load(this.history.getCurrent().uri); }); this.browserBar.onForwardButtonPressed.bind(async () => { await this.history.goForward(); this.load(this.history.getCurrent().uri); }); await this.browserBar.render(); this.updateHistoryButtons(); document.body.appendChild(this.browserBar.getDOM()); // browser viewport await this.viewport.render(); document.body.appendChild(this.viewport.getDOM()); this.updateViewportHeight(false); this.viewport.onScroll.bind(this.handleViewportScroll.bind(this)); // hide the status indicator this.statusIndicator.hide(statusIndicatorTicket); // resize the viewport when the window size changes window.addEventListener('resize', () => this.expandBrowserBar(false)); } public getHistory(): IReadonlyHistory<HistoryEntry> { return this.history; } public async loadInitialPage(): Promise<void> { // load the initial page let initialUrl = 'about://home'; if (await this.config.get(configSection.showWelcomePage)) { initialUrl = 'about://welcome'; } await this.load(initialUrl); await this.updateBrowserConfigField('showWelcomePage', false); } /** * Loads a URI and renders it in the browser. * @param uri The URI to load. */ public async load(uri: string, deferHistoryUdpate = false): Promise<void> { if (uri.trim() !== 'about://welcome' && !(await this.ensureDisclaimerAccepted())) { return; } if (deferHistoryUdpate) { this.browserBar.showLoadingIndicator(); this.statusIndicator.show(`loading...`); } else { this.history.push(new HistoryEntry(uri, Date.now())); this.updateHistoryButtons(); await this.browserBar.urlBar.setURL(uri); this.statusIndicator.show(`loading ${uri}`); await this.browserBar.showLoadingProgress(10); } // refresh the `autoToggleAddressBar` config this.autoToggleAddressBar = await this.config.get(configSection.autoToggleAddressBar); const collapseBrowserBar = this.isBrowserBarCollapsed() && this.autoToggleAddressBar; if (collapseBrowserBar) { this.expandBrowserBar(true); } const response = await this.request(uri); const renderer = ResponseRendererFactory.getRenderer(this.viewport, response); const responseURI = response.getResponseHeader('actual-uri') || uri; let statusIndicatorTicket = this.statusIndicator.show(`rendering ${responseURI}`); // update the browser bar to the actual URL of the page we're now on if (deferHistoryUdpate) { this.browserBar.urlBar.setURL(responseURI, false); this.history.push(new HistoryEntry(responseURI, Date.now())); this.updateHistoryButtons(); } else if (responseURI !== uri) { this.browserBar.urlBar.setURL(responseURI, false); } // render the actual response await renderer.renderResponse(responseURI, response); // render the favicon const icon = await renderer.generateFavicon(responseURI, response); if (typeof icon === 'string') { this.browserBar.urlBar.setFavicon(icon); } else { this.browserBar.urlBar.setFavicon(undefined); } await this.browserBar.showLoadingProgress(100); await this.browserBar.hideLoadingIndicator(); this.statusIndicator.hide(statusIndicatorTicket); // collapse the browser bar if it was collapsed before loading started if (collapseBrowserBar) { await this.collapseBrowserBar(); } } /** * Checks whether the browser bar is currently collapsed or expanded. */ public isBrowserBarCollapsed(): boolean { return this.browserBar.isCollapsed(); } /** * Collapses the browser bar and returns when the animation is complete. */ public async collapseBrowserBar(): Promise<void> { await Promise.all([ this.browserBar.collapse(), this.viewport.updateHeight(document.body.getBoundingClientRect().height, true) ]); } /** * Expands the browser bar and returns when the animation is complete. * @param overlayMode When `true`, the browser bar will open as an overlay. */ public async expandBrowserBar(overlayMode = false): Promise<void> { const updateViewportHeight = overlayMode ? // in overlay mode, the viewport is at 100% height: () => this.viewport.updateHeight(document.body.getBoundingClientRect().height, true) : // if not in overlay mode, fit the viewport into available horizontal space: () => this.updateViewportHeight(true); await Promise.all([ this.browserBar.expand(), updateViewportHeight() ]); } public async renderDialog(dialog: Dialog): Promise<void> { await dialog.render(); document.body.appendChild(dialog.getDOM()); } private async request(uri: string) { return new Promise<XMLHttpRequest>((resolve, reject) => { const request = new XMLHttpRequest(); request.onerror = reject; request.onreadystatechange = async () => { if (request.readyState === XMLHttpRequest.DONE) { await this.browserBar.showLoadingProgress(90); resolve(request); } }; request.onprogress = e => { if (e.lengthComputable) { this.browserBar.showLoadingProgress(((e.loaded / e.total) * 100) - 20); } else { this.browserBar.showLoadingIndicator(); } }; request.open('GET', `${resolveInternalRoute(InternalRoute.LoadBase)}?${escape(uri)}`, true); request.send(); }); } /** * Presents the disclaimer to the user and asks to accept it. * Returns `true` when the user accepts it, `false` if not. */ private async askToAcceptDisclaimer(): Promise<boolean> { if (this.disclaimerPromptVisible) { return; } this.disclaimerPromptVisible = true; const response = await this.request('about://disclaimer'); const accepted = await internalConfirm( this, `Accept 'Chrome VS Code' Terms of Use to continue browsing`, response.responseText, true, 'Accept Terms of Use', 'Don\'t accept' ); this.disclaimerPromptVisible = false; return accepted; } /** * Returns `true` when the user has accepted the disclaimer, `false` if not. */ private async ensureDisclaimerAccepted(): Promise<boolean> { const notAccepted = () => { this.viewport.renderHTML(''); this.browserBar.urlBar.setURL('about://welcome'); }; // disclaimer was already accepted if (await this.config.get(configSection.disclaimerReadAndAccepted)) { return true; } // disclaimer was not accepted yet const accepted = await this.askToAcceptDisclaimer(); // update the browser config await this.updateBrowserConfigField('disclaimerReadAndAccepted', accepted); if (!accepted) { notAccepted(); return; } // Don't return the `accepted` value from above, but rather refresh the browser config // and return the config value from 'disclaimerReadAndAccepted'. This way, we can make // sure the config file is in sync. if (!(await this.config.get(configSection.disclaimerReadAndAccepted))) { notAccepted(); return false; } return true; } private updateHistoryButtons(): void { // forward button if (this.history.canGoForward()) { this.browserBar.enableHistoryForwardButton(); } else { this.browserBar.disableHistoryForwardButton(); } // back button if (this.history.canGoBackward()) { this.browserBar.enableHistoryBackButton(); } else { this.browserBar.disableHistoryBackButton(); } } private updateViewportHeight(animated: boolean): void { const bodyHeight = document.body.getBoundingClientRect().height; const browserBarHeight = this.browserBar.getDOM().getBoundingClientRect().height; this.viewport.updateHeight(bodyHeight - browserBarHeight, animated); } private async handleViewportScroll(): Promise<void> { if (!this.autoToggleAddressBar) { return; } const now = Date.now(); if (now - this.lastViewportScroll.recordedTime <= 300) { return; } const currentScrollY = this.viewport.getScroll().y; const threshold = this.viewport.getDOM().getBoundingClientRect().height / 10; if (Math.abs(currentScrollY - this.lastViewportScroll.scrollY) < threshold) { return; } // scrolling down: if (currentScrollY > this.lastViewportScroll.scrollY) { this.collapseBrowserBar(); } else { // scrolling up: this.expandBrowserBar(); } this.lastViewportScroll.recordedTime = now; this.lastViewportScroll.scrollY = currentScrollY; } private isInternalURL(url: string): boolean { const getInternalRouteRegex = (routeIdentifier: InternalRoute) => { const asString = resolveInternalRoute(routeIdentifier) // remove any leading slashes .replace(/^\//, '') // escape all slashes (except the leading one removed above) .replace(/\//, '\\/'); return new RegExp(`${window.location.host}\/+${asString}`); }; return ( getInternalRouteRegex(InternalRoute.Load).test(url) || getInternalRouteRegex(InternalRoute.LoadBase).test(url) ); } private async handleViewportBeginningNavigation(): Promise<void> { this.expandBrowserBar(); this.browserBar.showLoadingIndicator(); } private async handleViewportNavigating(uri: string): Promise<void> { uri = unescape((<string>uri) || ''); if (this.isInternalURL(uri)) { uri = uri.replace(/^.*?\?/, ''); } await this.load(uri, true); } /** * Loads and returns the current browser configuration from the back end. */ private async loadBrowserConfig(): Promise<IBrowserConfiguration> { return new Promise<IBrowserConfiguration>((resolve, reject) => { const request = new XMLHttpRequest(); request.onerror = reject; request.onreadystatechange = () => { if (request.readyState === XMLHttpRequest.DONE) { resolve(JSON.parse(request.responseText)); } }; request.open('GET', resolveInternalRoute(InternalRoute.ConfigRead), true); request.send(); }); } private async updateConfig(config: { [section: string]: { [key: string]: any; }; }): Promise<void> { await new Promise<void>((resolve, reject) => { const request = new XMLHttpRequest(); request.onerror = reject; request.onreadystatechange = () => { if (request.readyState === XMLHttpRequest.DONE) { resolve(); } }; request.open('GET', `${resolveInternalRoute(InternalRoute.ConfigWrite)}?${escape(JSON.stringify(config))}`, true); request.send(); }); } private async updateConfigField(section: string, key: string, value: any): Promise<void> { const object: any = {}; object[section] = {}; object[section][key] = value; return this.updateConfig(object); } private async updateBrowserConfigField(key: string, value: any): Promise<void> { return this.updateConfigField('chromevscode', key, value); } private createFrameBindings(): IFrameBindings { const browserWindow = this; class FrameBindings implements IFrameBindings { /** * Initializes the frame's web API bindings. */ public async initializeWebAPIs(frameWindow: Window): Promise<void> { await initializePrompts(browserWindow, frameWindow); } /** * Updates the browser location to another URI. * @param uri The URI to open. */ public async load(uri: string): Promise<void> { return browserWindow.load(uri); } /** * Attempts to show the address bar. Returns `true` when successful, `false` if not. */ public async showAddressBar(): Promise<boolean> { await browserWindow.expandBrowserBar(); return true; } /** * Attempts to hide the address bar. Returns `true` when successful, `false` if not. */ public async hideAddressBar(): Promise<boolean> { await browserWindow.collapseBrowserBar(); return true; } } class PrivilegedFrameBindings extends FrameBindings implements IPrivilegedFrameBindings { /** * Returns the browser configuration as an object. */ public async getConfiguration(): Promise<IBrowserConfiguration> { return browserWindow.loadBrowserConfig(); } } if (/^about:\/\//.test(this.history.getCurrent().uri)) { return new PrivilegedFrameBindings(); } else { return new FrameBindings(); } } private readonly statusIndicator = new StatusIndicator(); private readonly history = new History(); private autoToggleAddressBar = true; private lastViewportScroll: { recordedTime: number; scrollY: number; } = { recordedTime: Date.now(), scrollY: 0 }; /** * This is `true` if the disclaimer prompt is currently visible. */ private disclaimerPromptVisible = false; }
the_stack
import { ScatterplotLayer, LineLayer, ArcLayer } from '@deck.gl/layers'; import { LayerProps, CompositeLayer } from '@deck.gl/core'; import { SimpleMeshLayer, ScenegraphLayer } from '@deck.gl/mesh-layers'; import { CubeGeometry } from '@luma.gl/engine' import CubeGraphLayer from '../cubegraph-layer'; import { onHoverClick, pickParams, checkClickedObjectToBeRemoved } from '../../library'; import { COLOR1 } from '../../constants/settings'; import { RoutePaths, MovedData, Movesbase, ClickedObject, LayerTypes, IconDesignation } from '../../types'; import * as Actions from '../../actions'; import {registerLoaders} from '@loaders.gl/core'; import {GLTFLoader} from '@loaders.gl/gltf'; registerLoaders([GLTFLoader]); // prettier-ignore const CUBE_POSITIONS = new Float32Array([ -1,-1,2,1,-1,2,1,1,2,-1,1,2, -1,-1,-2,-1,1,-2,1,1,-2,1,-1,-2, -1,1,-2,-1,1,2,1,1,2,1,1,-2, -1,-1,-2,1,-1,-2,1,-1,2,-1,-1,2, 1,-1,-2,1,1,-2,1,1,2,1,-1,2, -1,-1,-2,-1,-1,2,-1,1,2,-1,1,-2 ]); const ATTRIBUTES = { POSITION: {size: 3, value: new Float32Array(CUBE_POSITIONS)}, }; const defaultmesh = new CubeGeometry({attributes: ATTRIBUTES}); const defaultScenegraph = 'https://raw.githubusercontent.com/uber-common/deck.gl-data/master/examples/scenegraph-layer/airplane.glb'; interface Props extends LayerProps { routePaths?: RoutePaths[], layerRadiusScale?: number, layerOpacity?: number, movedData: MovedData[], movesbase?: Movesbase[], clickedObject?: null | ClickedObject[], actions?: typeof Actions, optionVisible?: boolean, optionArcVisible?: boolean, optionLineVisible?: boolean, optionChange?: boolean, optionOpacity?: number, optionCellSize?: number, optionElevationScale?: number, optionCentering?: boolean, optionDisplayPosition?: number, iconlayer?: LayerTypes, iconChange?: boolean, // Invalid if there is iconDesignations definition iconCubeType?: number, // Invalid if there is iconDesignations definition iconDesignations?: IconDesignation[], getRouteColor?: (x: MovedData) => number[], getRouteWidth?: (x: MovedData) => number, getRadius?: (x: MovedData) => number, getCubeColor?: (x: MovedData) => number[][], getCubeElevation?: (x: MovedData) => number[], getArchWidth?: (x: MovedData) => number, getLinehWidth?: (x: MovedData) => number, scenegraph?: any, mesh?: any, sizeScale?: number, getOrientation?: (x: MovedData) => number[], getScale?: (x: MovedData) => number[], getTranslation?: (x: MovedData) => number[], } export default class MovesLayer extends CompositeLayer<Props> { constructor(props: Props) { super(props); }; static defaultProps = { id: 'MovesLayer', layerRadiusScale: 1, layerOpacity: 0.75, optionVisible: true, optionLineVisible: false, optionChange: false, optionOpacity: 0.25, optionCellSize: 12, optionElevationScale: 1, optionCentering: false, optionDisplayPosition: 30, visible: true, iconChange: true, iconCubeType: 0, getRouteColor: (x: MovedData) => x.routeColor || x.color || COLOR1, getRouteWidth: (x: MovedData) => x.routeWidth || 10, getRadius: (x: MovedData) => x.radius || 20, getCubeColor: (x: MovedData) => x.optColor || [x.color] || [COLOR1], getCubeElevation: (x: MovedData) => x.optElevation, getArchWidth: (x: MovedData) => x.archWidth || 10, getLinehWidth: (x: MovedData) => 10, scenegraph: defaultScenegraph, mesh: defaultmesh, sizeScale: 20, getOrientation: (x: MovedData) => x.direction ? [0,-x.direction,90] : [0,0,90], getScale: (x: MovedData) => x.scale || [1,1,1], getTranslation: [0,0,0], pickable: true, }; static layerName = 'MovesLayer'; getPickingInfo(pickParams: pickParams):void { const { getRouteColor, getRouteWidth, iconDesignations } = this.props; onHoverClick(pickParams, getRouteColor, getRouteWidth, iconDesignations); } getIconLayer(movedData:MovedData[]):any[] { const { id, layerRadiusScale, layerOpacity, pickable, getRadius, iconlayer, iconChange, iconCubeType, visible, scenegraph, mesh, sizeScale, getOrientation, getScale, getTranslation, iconDesignations:propIconDesignations } = this.props; const selectlayer = iconlayer || (!iconChange ? 'Scatterplot': iconCubeType === 0 ? 'SimpleMesh':iconCubeType === 1 ? 'Scenegraph':'Scatterplot'); const defaultIconDesignations = [{'type':undefined,'layer':selectlayer}]; const iconDesignations = propIconDesignations || defaultIconDesignations; const getColor = (x: MovedData) => x.color || COLOR1; return iconDesignations.map((iconDesignation:IconDesignation, idx:number)=>{ const {type, layer, radiusScale:overradiusScale, getColor:overgetColor, getOrientation:overgetOrientation, getScale:overgetScale, getTranslation:overgetTranslation, getRadius:overgetRadius, sizeScale:oversizeScale, mesh:overmesh, scenegraph:overscenegraph} = iconDesignation; if(layer && layer === 'Scatterplot'){ return new ScatterplotLayer({ id: id + '-moves-Scatterplot-' + String(idx), data: movedData, radiusScale: overradiusScale || layerRadiusScale, getPosition:(x: MovedData) => !type || !x.type || (x.type && x.type === type) ? x.position : null, getFillColor: overgetColor || getColor, getRadius: overgetRadius || getRadius, visible, opacity: layerOpacity, pickable, radiusMinPixels: 1 }); }else if(layer && layer === 'SimpleMesh'){ return new SimpleMeshLayer({ id: id + '-moves-SimpleMesh-' + String(idx), data: movedData, mesh: overmesh || mesh, sizeScale: oversizeScale || sizeScale, getPosition:(x: MovedData) => !type || !x.type || (x.type && x.type === type) ? x.position : null, getColor: overgetColor || getColor, getOrientation: overgetOrientation || getOrientation, getScale: overgetScale || getScale, getTranslation: overgetTranslation || getTranslation, visible, opacity: layerOpacity, pickable, }); }else if(layer && layer === 'Scenegraph'){ return new ScenegraphLayer({ id: id + '-moves-Scenegraph-' + String(idx), data: movedData, scenegraph: overscenegraph || scenegraph, sizeScale: oversizeScale || sizeScale, getPosition:(x: MovedData) => !type || !x.type || (x.type && x.type === type) ? x.position : null, getColor: overgetColor || getColor, getOrientation: overgetOrientation || getOrientation, getScale: overgetScale || getScale, getTranslation: overgetTranslation || getTranslation, visible, opacity: layerOpacity, pickable, }); }else{ console.log('iconDesignations layer undefined.'); return null; } }); } renderLayers():any[] { const { id, routePaths, layerOpacity, movedData, pickable, clickedObject, actions, optionElevationScale, optionOpacity, optionCellSize, optionDisplayPosition, optionVisible, optionArcVisible, optionLineVisible, optionChange, iconChange, visible, getCubeColor, getCubeElevation, getArchWidth, getLinehWidth, optionCentering, } = this.props; if (!movedData || movedData.length === 0 || !visible) { return null; } const stacking1 = visible && optionVisible && optionChange; const optPlacement = visible && iconChange ? ()=>optionDisplayPosition : ()=>0; const arcVisible = optionArcVisible !== undefined ? optionArcVisible : optionVisible; const lineVisible = optionLineVisible !== undefined ? optionLineVisible : optionVisible; const movedDataPosition = movedData.filter((x)=>x.position); const arcData = movedData.filter((data)=>data.sourcePosition); checkClickedObjectToBeRemoved(movedDataPosition, clickedObject, routePaths, actions); const iconLayers = this.getIconLayer(movedDataPosition); return [ iconLayers, routePaths && routePaths.length > 0 ? new LineLayer({ id: id + '-route-paths', data: routePaths, widthUnits: 'meters', getWidth: (x: MovedData) => x.routeWidth, widthMinPixels: 0.1, getColor: (x: MovedData) => x.routeColor, visible, pickable: false }) : null, optionVisible ? new CubeGraphLayer({ id: id + '-moves-opt-cube', optionData: movedDataPosition, visible: optionVisible, optionCentering, stacking1, getCubeColor, getCubeElevation, getRadius: optPlacement, opacity: optionOpacity, pickable, cellSize: optionCellSize, elevationScale: optionElevationScale, }) : null, arcData.length > 0 && arcVisible ? new ArcLayer({ id: id + '-moves-opt-arc', data: arcData, visible: arcVisible, pickable, widthUnits: 'meters', widthMinPixels: 0.1, getSourcePosition: (x: MovedData) => x.sourcePosition, getTargetPosition: (x: MovedData) => x.targetPosition, getSourceColor: (x: MovedData) => x.sourceColor || x.color || COLOR1, getTargetColor: (x: MovedData) => x.targetColor || x.color || COLOR1, getWidth: getArchWidth, opacity: layerOpacity }) : null, arcData.length > 0 && lineVisible ? new LineLayer({ id: id + '-moves-opt-line', data: arcData, visible: lineVisible, pickable, widthUnits: 'meters', widthMinPixels: 0.1, getSourcePosition: (x: MovedData) => x.sourcePosition, getTargetPosition: (x: MovedData) => x.targetPosition, getColor: (x: MovedData) => x.sourceColor || x.color || COLOR1, getWidth: getLinehWidth, opacity: layerOpacity }) : null, ]; } }
the_stack
import { Component, OnDestroy, ChangeDetectorRef, NgZone } from '@angular/core'; import { BsModalService } from 'ngx-bootstrap/modal'; import { WidgetService } from '../../../widget.service'; import { NotificationService, Base64Service, Principal, WidgetEventBusService } from '../../../../../shared'; import { JhiEventManager } from 'ng-jhipster'; import { AbstractSecondaryBarChartWidgetComponent } from './abstract-secondary-bar-chart-widget.component'; import { DataSourceService } from '../../../../data-source/data-source.service'; import { SecondaryWidget } from '../../secondary-widget'; import { Router } from '@angular/router'; /** * This component allows a tabular analysis of data fetched from the datasource * through queries, full text search, class scan loading. */ @Component({ selector: 'query-secondary-bar-chart-widget', templateUrl: './query-bar-chart-widget.component.html', styleUrls: ['./query-bar-chart-widget.component.scss'] }) export class QuerySecondaryBarChartWidgetComponent extends AbstractSecondaryBarChartWidgetComponent implements SecondaryWidget, OnDestroy { categoryProperty: string; valueProperty: string; // multi series nodes fetching multiSeriesSelectedCategoryProperty: string; multiSeriesSelectedValueProperty: string; multiSeriesSelectedClassProperties: string[]; // multiSeriesLimitEnabled: boolean = true; // multiSeriesLimitForNodeFetching: number = 100; tableColumns: Object[] = [ { id: 'categoryproperty', text: 'Category', width: '45%' }, { id: 'valueProperty', text: 'Value', width: '45%' }, // { // id: 'Count', // text: 'Count', // width: '25%' // }, { id: 'button', text: '' } ]; multiSeriesName2info: Object = {}; constructor( protected ngZone: NgZone, protected principal: Principal, protected widgetService: WidgetService, protected notificationService: NotificationService, protected dataSourceService: DataSourceService, protected eventManager: JhiEventManager, protected cdr: ChangeDetectorRef, protected modalService: BsModalService, protected base64Service: Base64Service, protected widgetEventBusService: WidgetEventBusService, protected router: Router) { super(ngZone, principal, widgetService, notificationService, dataSourceService, eventManager, cdr, modalService, base64Service, widgetEventBusService, router); this.selectedClass = 'Table'; this.multiSeriesSelectedClass = 'Table'; } ngAfterViewInit() { this.performAdditionalInit(); // sidebar height if (!this.embedded) { this.maxSidebarHeight = this.widgetHeight; } else { this.adjustWidgetHeightToEmbeddingIframeHeight(); } if (this.minimizedView) { this.requestDatasetPropagation(); } } ngOnDestroy() { this.eventManager.destroy(this.dashboardPanelResizedSubscriber); this.unsubscribeToEventBus(); } handleSelectedPropertyModelChanging() { this.startSpinner(); this.multiSeriesMode = false; this.performSeriesComputationForCurrentDataset(); } runSeriesComputation(saveAfterUpdate?: boolean, mode?: string) { if (mode === 'single') { this.multiSeriesMode = false; } else if (mode === 'multi') { this.multiSeriesMode = true; } this.performSeriesComputationForCurrentDataset(); } // Override updateBarChartWidgetFromSnapshot(snapshot) { if (snapshot['categoryProperty']) { this.categoryProperty = snapshot['categoryProperty']; } if (snapshot['valueProperty']) { this.valueProperty = snapshot['valueProperty']; } super.updateBarChartWidgetFromSnapshot(snapshot); this.updateMultiSeriesSelectedClassProperties(); } // Override addNewMultiSeries() { const seriesName = this.multiSeriesSelectedCategoryProperty + '_' + this.multiSeriesSelectedValueProperty; this.multiSeriesName2info[seriesName] = { categoryProperty: this.multiSeriesSelectedCategoryProperty, valueProperty: this.multiSeriesSelectedValueProperty }; this.multiSeriesSelectedCategoryProperty = undefined; this.multiSeriesSelectedValueProperty = undefined; this.multiSeriesMode = true; } // Override removeMultiSeries(seriesName) { delete this.multiSeriesName2info[seriesName]; this.multiSeriesMode = true; } // Override updateMultiSeriesSelectedClassProperties() { this.multiSeriesSelectedProperty = undefined; this.multiSeriesSelectedClassProperties = []; this.multiSeriesSelectedClassProperties = this.getClassProperties(this.multiSeriesSelectedClass); if (this.multiSeriesSelectedClassProperties.length === 0) { this.notificationService.push('warning', 'No Property found', 'No property found for the current flat table.'); } } // Override onDatasetUpdate(data: Object, metadata: Object) { this.stopSpinner(); this.updateWidgetDataset(data); this.updateSecondaryMetadataFromPrimaryMetadata(metadata); // updating the class properties, as after metadata update could be some properties not present before super.updateSelectedClassProperties(); let saved: boolean = false; if (this.currentDataset['elements'].length > 0) { if (!this.multiSeriesMode) { // trying to perform single series computation if (this.categoryProperty && this.valueProperty) { this.performSeriesComputationForCurrentDataset(true); saved = true; } else { console.log('[BarChartWidget-id: ' + this.widget.id + ']: cannot perform single series computation because Category and/or Value are not defined.'); } } else { // trying to perform multi series computation if (Object.keys(this.multiSeriesName2info).length > 0) { this.performSeriesComputationForCurrentDataset(true); saved = true; } else { console.log('[BarChartWidget-id: ' + this.widget.id + ']: cannot perform multi series computation because no category-value rules are defined.'); } } } else { // clean the pie chart this.series = []; this.xAxisCategories = []; this.updateBarChart(true); } if (!saved && this.principal.hasAnyAuthorityDirect(['ROLE_ADMIN', 'ROLE_EDITOR'])) { // even though we did not save the widget as we did not perform the series computation, we have to save the new current dataset and metadata this.saveAll(true); } this.updateMultiSeriesSelectedClassProperties(); } /** * It performs the facetig for the current dataset * @param saveAfterUpdate */ performSeriesComputationForCurrentDataset(saveAfterUpdate?: boolean): void { this.series = []; this.xAxisCategories = []; this.barChartLegendData = []; this.barChartLegendDataSelected = {}; if (this.multiSeriesMode) { // single series settings reset this.resetSingleSeriesSettings(); const distribution = this.buildDistribution(); for (const currSeriesName of Object.keys(this.multiSeriesName2info)) { const currSeriesBarChartData = []; // legend data updating this.barChartLegendData.push(currSeriesName); this.barChartLegendDataSelected[currSeriesName] = true; const seriesDistribution = distribution[currSeriesName]; if (!seriesDistribution) { let message = 'No distribution is present for the \'' + currSeriesName + '\' series.'; message += this.checkThresholdsMessage; console.log(message); this.notificationService.push('warning', 'Distribution computation', message); return; } const currSeries: Object = { name: currSeriesName, type: 'bar', label: this.labelOptions, emphasis: this.emphasisOptions }; for (const currValue of Object.keys(seriesDistribution)) { const currBarChartItem = seriesDistribution[currValue]; currSeriesBarChartData.push(currBarChartItem); if (this.xAxisCategories.indexOf(currValue) < 0) { this.xAxisCategories.push(currValue); } } // injecting data into the current series currSeries['data'] = currSeriesBarChartData; // adding the single series this.series.push(currSeries); } } else { // just a series: categoryProperty_valueProperty const distribution = this.buildDistribution(); const seriesBarChartData = []; const seriesName = this.buildSingleSeriesName(); // legend data updating this.barChartLegendData.push(seriesName); this.barChartLegendDataSelected[seriesName] = true; const seriesDistribution = distribution[seriesName]; if (!seriesDistribution) { let message = 'No distribution is present for the \'' + seriesName + '\' series.'; message += this.checkThresholdsMessage; console.log(message); this.notificationService.push('warning', 'Distribution computation', message); return; } const currSeries: Object = { name: seriesName, type: 'bar', label: this.labelOptions, emphasis: this.emphasisOptions }; for (const currValue of Object.keys(seriesDistribution)) { const currBarChartItem = seriesDistribution[currValue]; seriesBarChartData.push(currBarChartItem); if (this.xAxisCategories.indexOf(currValue) < 0) { this.xAxisCategories.push(currValue); } } // injecting data into the current series currSeries['data'] = seriesBarChartData; // adding the single series this.series.push(currSeries); } this.stopSpinner(); this.updateBarChart(true); // updating to-save flag this.toSave = true; if (saveAfterUpdate) { this.saveAll(true); } } buildDistribution(): Object { const globalDistribution = {}; if (this.multiSeriesMode) { for (const seriesName of Object.keys(this.multiSeriesName2info)) { const currSeriesInfo = this.multiSeriesName2info[seriesName]; const currCategoryProperty = currSeriesInfo['categoryProperty']; const currValueyProperty = currSeriesInfo['valueProperty']; const currSeriesDistribution = {}; for (const elem of this.currentDataset['elements']) { const record = elem['data']['record']; currSeriesDistribution[record[currCategoryProperty]] = record[currValueyProperty]; } globalDistribution[seriesName] = currSeriesDistribution; } } else { const seriesName = this.buildSingleSeriesName(); const currSeriesDistribution = {}; for (const elem of this.currentDataset['elements']) { const record = elem['data']['record']; currSeriesDistribution[record[this.categoryProperty]] = record[this.valueProperty]; } globalDistribution[seriesName] = currSeriesDistribution; } return globalDistribution; } /** * Builds the name for a single series according to a specific logic: * seriesName = <categoryProperty>_<valueProperty> */ buildSingleSeriesName(): string { return this.categoryProperty + '_' + this.valueProperty; } /** * Modals handling */ // Override resetSingleSeriesSettings() { this.categoryProperty = undefined; this.valueProperty = undefined; } /** * Saving */ // Override buildSnapshotObject(): Object { const jsonForSnapshotSaving = { series: this.series, xAxisCategories: this.xAxisCategories, barChartLegendData: this.barChartLegendData, barChartLegendDataSelected: this.barChartLegendDataSelected, multiSeriesName2info: this.multiSeriesName2info, currentDataset: this.currentDataset, dataSourceMetadata: this.dataSourceMetadata, currentFaceting: this.currentFaceting, selectedClass: this.selectedClass, limitEnabled: this.limitEnabled, limitForNodeFetching: this.limitForNodeFetching, multiSeriesMode: this.multiSeriesMode, selectedClassProperties: this.selectedClassProperties, categoryProperty: this.categoryProperty, valueProperty: this.valueProperty, multiSeriesLimitEnabled: this.multiSeriesLimitEnabled, multiSeriesLimitForNodeFetching: this.multiSeriesLimitForNodeFetching, showLegend: this.showLegend, yAxisType: this.yAxisType, labelOptions: this.labelOptions, xAxisLabelOptions: this.xAxisLabelOptions, yAxisLabelOptions: this.yAxisLabelOptions, }; const perspective: Object = { barChartTabActive: this.barChartTabActive, datasourceTabActive: this.datasourceTabActive, }; jsonForSnapshotSaving['perspective'] = perspective; return jsonForSnapshotSaving; } }
the_stack
import {debounceTime, map} from "rxjs/operators"; import {Subject, Subscription} from "rxjs"; import {EventEmitter} from "@angular/core"; import {HttpClient} from "@angular/common/http"; import { ComponentDataHelper, DataFilterInfo, DataReviser, DataSortInfo, HttpClientOptions, IAjaxComponentData, IEmittable, IFilterable, IPageable, IServerSidePageable, ISortable, PagingInfo, PreparedHttpClientOptions, SortAs, SortOrder, serializeFilterFunction } from "./component-data"; import {TableData} from "./table-data"; import {CallbackRemoval, CommonUtils} from "../utils/common-utils"; import {JigsawArray} from "../utils/data-collection-utils"; function _fromArray(dest: ArrayCollection<any>, source: any[]): boolean { source = source instanceof Array || (source as any) instanceof ArrayCollection ? source : CommonUtils.isDefined(source) ? [source] : []; const needRefresh = dest.length > 0 || source.length > 0; dest.splice(0, dest.length); if (source.length > 0) { source.forEach(item => dest.push(item)); } return needRefresh; } /** * 这是Jigsaw数据体系中两大分支之一:数组类型的基类。 * * 关于Jigsaw数据体系详细介绍,请参考`IComponentData`的说明 */ export class ArrayCollection<T> extends JigsawArray<T> implements IAjaxComponentData, IEmittable { /** * 用于发起网络请求,在调用`fromAjax()`之前必须设置好此值。 */ public http: HttpClient; public dataReviser: DataReviser; public concat(...items: any[]): ArrayCollection<T> { const acArr = []; for (let i = 0; i < this.length; i++) { acArr.push(this[i]) } let itemArr = []; items.forEach(item => { itemArr = itemArr.concat(item); }); return new ArrayCollection<T>(acArr.concat(itemArr)); } public slice(start?: number, end?: number): ArrayCollection<T> { const acArr = []; for (let i = 0; i < this.length; i++) { acArr.push(this[i]) } return new ArrayCollection<T>(acArr.slice(start, end)); } constructor(source?: T[]) { super(); _fromArray(this, source); } protected _busy: boolean = false; get busy(): boolean { return this._busy; } /** * 调用在`onAjaxStart`里注册的所有回调函数。 */ protected ajaxStartHandler(): void { this._busy = true; this.componentDataHelper.invokeAjaxStartCallback(); } /** * 调用在`onAjaxSuccess`里注册的所有回调函数。 */ protected ajaxSuccessHandler(data: T[]): void { console.log('get data from paging server success!!'); if (data instanceof Array) { this.fromArray(data); } else { console.error('invalid data type: ' + typeof (data) + ', need Array.'); this.fromArray([]); } this._busy = false; this.componentDataHelper.invokeAjaxSuccessCallback(data); } /** * 调用在`onAjaxError`里注册的所有回调函数。 */ protected ajaxErrorHandler(error: Response): void { if (!error) { const reason = 'the array collection is busy now!'; console.error('get data from paging server error!! detail: ' + reason); error = new Response(reason, {status: 409, statusText: reason}); } else { console.error('get data from paging server error!! detail: ' + error['message']); this.fromArray([]); this._busy = false; } this.componentDataHelper.invokeAjaxErrorCallback(error); } /** * 调用在`onAjaxComplete`里注册的所有回调函数。 */ protected ajaxCompleteHandler(): void { console.log('get data from paging server complete!!'); this._busy = false; this.componentDataHelper.invokeAjaxCompleteCallback(); } /** * 安全地调用`dataReviser`函数。 * * @param originData * */ protected reviseData(originData: any): any { if (!this.dataReviser) { return originData; } try { const revisedData = this.dataReviser(originData); if (revisedData == undefined) { console.error('a dataReviser function should NOT return undefined,' + 'use null is you do not have any valid value!' + 'Jigsaw is ignoring this result and using the original value.'); return originData; } else { return revisedData; } } catch (e) { console.error('revise data error: ' + e); console.error(e.stack); return originData; } } public fromAjax(url?: string): void; public fromAjax(options?: HttpClientOptions): void; /** * @internal */ public fromAjax(optionsOrUrl?: HttpClientOptions | string): void { if (!this.http) { console.error('set a valid Http instance to ArrayCollection.http before invoking ArrayCollection.fromAjax()!'); return; } if (this._busy) { this.ajaxErrorHandler(null); return; } this.ajaxStartHandler(); const op = HttpClientOptions.prepare(optionsOrUrl); this.http.request(op.method, op.url, op) .pipe(map(res => this.reviseData(res) as T[])) .subscribe( data => this.ajaxSuccessHandler(data), error => this.ajaxErrorHandler(error), () => this.ajaxCompleteHandler() ); } /** * 将一个普通数组对象`source`的所有元素浅拷贝到当前数据对象中。 * * ``` * const ac = new ArrayCollection<number>(); * ac.fromArray([1, 2, 3]); * console.log(ac); // [1, 2, 3] * ``` * * @param source 源数据 * @returns 返回当前数据对象的引用 */ public fromArray(source: T[]): ArrayCollection<T> { if (_fromArray(this, source)) { this.refresh(); this.componentDataHelper.invokeChangeCallback(); } return this; } protected componentDataHelper: ComponentDataHelper = new ComponentDataHelper(); public refresh(): void { this.componentDataHelper.invokeRefreshCallback(); } public onRefresh(callback: (thisData: ArrayCollection<T>) => void, context?: any): CallbackRemoval { return this.componentDataHelper.getRefreshRemoval({fn: callback, context: context}); } public onChange(callback: (thisData: ArrayCollection<T>) => void, context?: any): CallbackRemoval { return this.componentDataHelper.getChangeRemoval({fn: callback, context: context}); } public onAjaxStart(callback: () => void, context?: any): CallbackRemoval { return this.componentDataHelper.getAjaxStartRemoval({fn: callback, context: context}); } public onAjaxSuccess(callback: (data: any) => void, context?: any): CallbackRemoval { return this.componentDataHelper.getAjaxSuccessRemoval({fn: callback, context: context}); } public onAjaxError(callback: (error: Response) => void, context?: any): CallbackRemoval { return this.componentDataHelper.getAjaxErrorRemoval({fn: callback, context: context}); } public onAjaxComplete(callback: () => void, context?: any): CallbackRemoval { return this.componentDataHelper.getAjaxCompleteRemoval({fn: callback, context: context}); } public destroy(): void { console.log('destroying ArrayCollection....'); this.splice(0, this.length); this.componentDataHelper && this.componentDataHelper.clearCallbacks(); this.componentDataHelper = null; this.dataReviser = null; this._emitter && this._emitter.unsubscribe(); this._emitter = null; } private _emitter = new EventEmitter<any>(); public emit(value?: any): void { this._emitter.emit(value); } public subscribe(callback?: (value: any) => void): Subscription { return this._emitter.subscribe(callback); } public unsubscribe() { this._emitter.unsubscribe(); } } /** * 这是实际使用时最常用的数组对象,具备服务端分页、服务端排序、服务端过滤能力。 * 注意:需要有一个统一的具备服务端分页、服务端排序、服务端过滤能力的REST服务配合使用, * 更多信息请参考`PagingInfo.pagingServerUrl` * * 实际用法请参考[这个demo]($demo=data-encapsulation/array-ssp) * * 关于Jigsaw数据体系详细介绍,请参考`IComponentData`的说明 */ export class PageableArray extends ArrayCollection<any> implements IServerSidePageable, ISortable, IFilterable { public pagingInfo: PagingInfo; public filterInfo: DataFilterInfo; public sortInfo: DataSortInfo; public pagingServerUrl: string; /** * 参考`PageableTableData.sourceRequestOptions`的说明 */ public sourceRequestOptions: HttpClientOptions; private _filterSubject = new Subject<DataFilterInfo>(); private _sortSubject = new Subject<DataSortInfo>(); private _dataSourceChanged: boolean = false; constructor(public http: HttpClient, requestOptionsOrUrl: HttpClientOptions | string) { super(); if (!http) { throw new Error('invalid http!'); } this.pagingInfo = new PagingInfo(); this.pagingInfo.subscribe(() => { this._ajax(); }); this.sourceRequestOptions = typeof requestOptionsOrUrl === 'string' ? {url: requestOptionsOrUrl} : requestOptionsOrUrl; this._initSubjects(); } private _initSubjects(): void { this._filterSubject.pipe(debounceTime(300)).subscribe(filter => { this.filterInfo = filter; this._ajax(); }); this._sortSubject.pipe(debounceTime(300)).subscribe(sort => { this.sortInfo = sort; this._ajax(); }); } public updateDataSource(options: HttpClientOptions): void public updateDataSource(url: string): void; /** * @internal */ public updateDataSource(optionsOrUrl: HttpClientOptions | string): void { this.sourceRequestOptions = typeof optionsOrUrl === 'string' ? {url: optionsOrUrl} : optionsOrUrl; this.pagingInfo.currentPage = 1; this.pagingInfo.totalRecord = 0; this.filterInfo = null; this.sortInfo = null; this._dataSourceChanged = true; } public fromAjax(url?: string): void; public fromAjax(options?: HttpClientOptions): void; /** * @internal */ public fromAjax(optionsOrUrl?: HttpClientOptions | string): void { if (optionsOrUrl instanceof HttpClientOptions) { this.updateDataSource(<HttpClientOptions>optionsOrUrl); } else if (!!optionsOrUrl) { this.updateDataSource(<string>optionsOrUrl); } else { this._dataSourceChanged = true; } this._ajax(); } protected _ajax(): void { if (this._busy) { this.ajaxErrorHandler(null); return; } const options = HttpClientOptions.prepare(this.sourceRequestOptions); if (!options) { console.error('invalid source request options, use updateDataSource() to reset the option.'); return; } this._busy = true; this.ajaxStartHandler(); const method = this.sourceRequestOptions.method ? this.sourceRequestOptions.method.toLowerCase() : 'get'; const paramProperty = method == 'get' ? 'params' : 'body'; let originParams = this.sourceRequestOptions[paramProperty]; delete options.params; delete options.body; options[paramProperty] = { service: options.url, paging: this.pagingInfo.valueOf() }; if (CommonUtils.isDefined(originParams)) { options[paramProperty].peerParam = originParams; } if (CommonUtils.isDefined(this.filterInfo)) { options[paramProperty].filter = this.filterInfo; } if (CommonUtils.isDefined(this.sortInfo)) { options[paramProperty].sort = this.sortInfo; } if (paramProperty == 'params') { options.params = PreparedHttpClientOptions.prepareParams(options.params) } const pagingService = this.pagingServerUrl || PagingInfo.pagingServerUrl; this.http.request(options.method, pagingService, options) .pipe( map(res => this.reviseData(res)), map(data => { this._updatePagingInfo(data); const tableData: TableData = new TableData(); if (TableData.isTableData(data)) { tableData.fromObject(data); } else { console.error('invalid data format, need a TableData object.'); } return tableData; })) .subscribe( tableData => this.ajaxSuccessHandler(tableData), error => this.ajaxErrorHandler(error), () => this.ajaxCompleteHandler() ); } protected _updatePagingInfo(data: any): void { if (!data.hasOwnProperty('paging')) { return; } const paging = data.paging; this.pagingInfo.totalRecord = paging.hasOwnProperty('totalRecord') ? paging.totalRecord : this.pagingInfo.totalRecord; } protected ajaxSuccessHandler(data: any): void { console.log('get data from paging server success!!'); if (_fromArray(this, data.toArray())) { this.refresh(); if (this._dataSourceChanged) { this.componentDataHelper.invokeChangeCallback(); } } this._dataSourceChanged = false; this.componentDataHelper.invokeAjaxSuccessCallback(data); } public filter(callback: (value: any, index: number, array: any[]) => any, thisArg?: any): PageableArray; public filter(term: string, fields?: string[] | number[]): PageableArray; public filter(term: DataFilterInfo): PageableArray; /** * @internal */ public filter(term: string | DataFilterInfo | Function, fields?: string[] | number[]): PageableArray { let pfi: DataFilterInfo; if (term instanceof DataFilterInfo) { pfi = term; } else if (term instanceof Function) { // 这里的fields相当于thisArg,即函数执行的上下文对象 pfi = new DataFilterInfo(undefined, undefined, serializeFilterFunction(term), fields); } else { pfi = new DataFilterInfo(term, fields); } this._filterSubject.next(pfi); return this; } public sort(compareFn?: (a: any, b: any) => number): PageableArray; public sort(as: SortAs, order: SortOrder, field: string | number): PageableArray; public sort(sort: DataSortInfo): PageableArray; /** * @internal */ public sort(as, order?: SortOrder, field?: string | number): PageableArray { if (as instanceof Function) { throw 'compare function is NOT accepted by this class!'; } const psi = as instanceof DataSortInfo ? as : new DataSortInfo(as, order, field); this._sortSubject.next(psi); return this; } public changePage(currentPage: number, pageSize?: number): void; public changePage(info: PagingInfo): void; /** * @internal */ public changePage(currentPage, pageSize?: number): void { pageSize = isNaN(+pageSize) ? this.pagingInfo.pageSize : pageSize; const pi: PagingInfo = currentPage instanceof PagingInfo ? currentPage : new PagingInfo(currentPage, +pageSize); let needRefresh: boolean = false; if (pi.currentPage >= 1 && pi.currentPage <= this.pagingInfo.totalPage) { this.pagingInfo.currentPage = pi.currentPage; needRefresh = true; } else { console.error(`invalid currentPage[${pi.currentPage}], it should be between in [1, ${this.pagingInfo.totalPage}]`); } if (pi.pageSize > 0) { this.pagingInfo.pageSize = pi.pageSize; needRefresh = true; } else { console.error(`invalid pageSize[${pi.pageSize}], it should be greater than 0`); } if (needRefresh) { this._ajax(); } } public firstPage(): void { this.changePage(1); } public previousPage(): void { this.changePage(this.pagingInfo.currentPage - 1); } public nextPage(): void { this.changePage(this.pagingInfo.currentPage + 1); } public lastPage(): void { this.changePage(this.pagingInfo.pageSize); } public destroy(): void { super.destroy(); this.http = null; this.sourceRequestOptions = null; this.pagingInfo && this.pagingInfo.unsubscribe(); this.pagingInfo = null; this.filterInfo = null; this.sortInfo = null; this._filterSubject && this._filterSubject.unsubscribe(); this._filterSubject = null; this._sortSubject && this._sortSubject.unsubscribe(); this._sortSubject = null; } } /** * 如果你没有统一的服务端分页、过滤、排序服务,则需要使用这个数据对象,并且请求的提供数据的服务需要自行处理分页、过滤、排序等。 * * Jigsaw暂未实现此功能,如有需要,请给我们[提issue](https://github.com/rdkmaster/jigsaw/issues/new)。 * * 关于Jigsaw数据体系详细介绍,请参考`IComponentData`的说明 */ export class DirectPageableArray extends PageableArray { protected _ajax(): void { if (this._busy) { this.ajaxErrorHandler(null); return; } const options = HttpClientOptions.prepare(this.sourceRequestOptions); if (!options) { console.error('invalid source request options, use updateDataSource() to reset the option.'); return; } this._busy = true; this.ajaxStartHandler(); this.http.request(options.method, options.url, options) .pipe( map(res => this.reviseData(res)), map(data => { this._updatePagingInfo(data); const tableData: TableData = new TableData(); if (TableData.isTableData(data)) { tableData.fromObject(data); } else { console.error('invalid data format, need a TableData object.'); } return tableData; })) .subscribe( tableData => this.ajaxSuccessHandler(tableData), error => this.ajaxErrorHandler(error), () => this.ajaxCompleteHandler() ); } } /** * 在本地分页、排序、过滤的数组。 * * 关于Jigsaw数据体系详细介绍,请参考`IComponentData`的说明 */ export class LocalPageableArray<T> extends ArrayCollection<T> implements IPageable { public pagingInfo: PagingInfo; private _bakData: T[] = []; private _filterSubject = new Subject<DataFilterInfo>(); private _sortSubject = new Subject<DataSortInfo>(); private _filteredData: T[]; public get filteredData(): T[] { return this._filteredData; } public set filteredData(value: T[]) { this._filteredData = value; if (this._filteredData instanceof Array || (this._filteredData as any) instanceof ArrayCollection) { this.pagingInfo.totalRecord = this._filteredData.length; } } private _dataSourceChanged: boolean = false; constructor(source?: T[]) { super(source); this._bakData = source; this.pagingInfo = new PagingInfo(); this.pagingInfo.subscribe(() => { if (!this.filteredData) { return; } this._setDataByPageInfo(); this.refresh(); if (this._dataSourceChanged) { this.componentDataHelper.invokeChangeCallback(); this._dataSourceChanged = false; } }); this._initSubjects(); } public fromArray(source: T[]): ArrayCollection<T> { this._bakData = source; this.filteredData = source; this.firstPage(); this._dataSourceChanged = true; return this; } /** * @internal * @param item * @param keyword * @param fields * */ public static filterItemByKeyword(item: any, keyword: string, fields: any[]): boolean { if (typeof item == 'string') { return item.toLowerCase().includes(keyword.toLowerCase()) } else if (fields) { return fields.find(field => { const value: string = !item || item[field] === undefined || item[field] === null ? '' : item[field].toString(); return value.toLowerCase().includes(keyword.toLowerCase()) }) } else { return false } } private _initSubjects(): void { this._filterSubject.pipe(debounceTime(300)).subscribe(filter => { this.filteredData = this._bakData.filter(item => LocalPageableArray.filterItemByKeyword(item, filter.key, filter.field)); this.firstPage(); }); this._sortSubject.pipe(debounceTime(300)).subscribe((sortInfo: DataSortInfo) => { const orderFlag = sortInfo.order == SortOrder.asc ? 1 : -1; if (sortInfo.as == SortAs.number) { this.filteredData.sort((a, b) => orderFlag * (Number(sortInfo.field ? a[sortInfo.field] : a) - Number(sortInfo.field ? b[sortInfo.field] : b))); } else { this.filteredData.sort((a, b) => orderFlag * String(sortInfo.field ? a[sortInfo.field] : a).localeCompare(String(sortInfo.field ? b[sortInfo.field] : b))); } this.firstPage(); }) } public filter(callback: (value: any, index: number, array: any[]) => any, context?: any): LocalPageableArray<T>; public filter(term: string, fields?: string[] | number[]): LocalPageableArray<T>; public filter(term: DataFilterInfo): LocalPageableArray<T>; /** * @internal */ public filter(term, fields?: string[] | number[]): LocalPageableArray<T> { if (!this._bakData) { return this; } if (term instanceof Function) { this.filteredData = this._bakData.filter(term.bind(fields)); this.firstPage(); } else { const pfi = term instanceof DataFilterInfo ? term : new DataFilterInfo(term, fields); this._filterSubject.next(pfi); } return this; } public sort(compareFn?: (a: any, b: any) => number): LocalPageableArray<T>; public sort(as: SortAs, order: SortOrder, field?: string | number): LocalPageableArray<T>; public sort(sort: DataSortInfo): LocalPageableArray<T>; /** * @internal */ public sort(as, order?: SortOrder, field?: string | number): LocalPageableArray<T> { if (!this.filteredData) { return this; } if (as instanceof Function) { this.filteredData.sort(as); this.firstPage(); } const psi = as instanceof DataSortInfo ? as : new DataSortInfo(as, order, field); this._sortSubject.next(psi); return this; } public changePage(currentPage: number, pageSize?: number): void; public changePage(info: PagingInfo): void; /** * @internal */ public changePage(currentPage, pageSize?: number): void { if (!this.filteredData) { return; } if (!isNaN(pageSize) && +pageSize > 0) { this.pagingInfo.pageSize = pageSize; } let cp: number = 0; if (currentPage instanceof PagingInfo) { this.pagingInfo.pageSize = currentPage.pageSize; cp = currentPage.currentPage; } else if (!isNaN(+currentPage)) { cp = +currentPage; } if (cp >= 1 && cp <= this.pagingInfo.totalPage) { this.pagingInfo.currentPage = cp; } else { console.error(`invalid currentPage[${cp}], it should be between in [1, ${this.pagingInfo.totalPage}]`); } } private _setDataByPageInfo() { let source: T[]; if (this.pagingInfo.pageSize == Infinity) { source = this.filteredData; } else { const begin = (this.pagingInfo.currentPage - 1) * this.pagingInfo.pageSize; const end = this.pagingInfo.currentPage * this.pagingInfo.pageSize < this.pagingInfo.totalRecord ? this.pagingInfo.currentPage * this.pagingInfo.pageSize : this.pagingInfo.totalRecord; source = this.filteredData.slice(begin, end); } if (_fromArray(this, source)) { this.refresh(); } } protected ajaxErrorHandler(error: Response): void { if (!error) { super.ajaxErrorHandler(error); return; } console.error('get data from paging server error!! detail: ' + error['message']); if (_fromArray(this, [])) { this.refresh(); this.componentDataHelper.invokeChangeCallback(); } this._busy = false; this.componentDataHelper.invokeAjaxErrorCallback(error); } public firstPage(): void { this.changePage(1); } public previousPage(): void { this.changePage(this.pagingInfo.currentPage - 1); } public nextPage(): void { this.changePage(this.pagingInfo.currentPage + 1); } public lastPage(): void { this.changePage(this.pagingInfo.totalPage); } public destroy() { super.destroy(); this._filterSubject && this._filterSubject.unsubscribe(); this._sortSubject && this._sortSubject.unsubscribe(); this.pagingInfo && this.pagingInfo.unsubscribe(); this._bakData = null; this.filteredData = null; this.pagingInfo = null; this._filterSubject = null; this._sortSubject = null; } }
the_stack
import React, { FunctionComponent, useEffect, useRef } from "react"; import { Text, View } from "react-native"; import { BIP44HDPath, ExportKeyRingData, KeyRingStatus, } from "@keplr-wallet/background"; import { DrawerActions, NavigationContainer, NavigationContainerRef, useNavigation, } from "@react-navigation/native"; import { useStore } from "./stores"; import { observer } from "mobx-react-lite"; import { HomeScreen } from "./screens/home"; import { createBottomTabNavigator } from "@react-navigation/bottom-tabs"; import { createStackNavigator, TransitionPresets, } from "@react-navigation/stack"; import { SendScreen } from "./screens/send"; import { GovernanceDetailsScreen, GovernanceScreen, } from "./screens/governance"; import { createDrawerNavigator, useIsDrawerOpen, } from "@react-navigation/drawer"; import { DrawerContent } from "./components/drawer"; import { useStyle } from "./styles"; import { BorderlessButton } from "react-native-gesture-handler"; import { createSmartNavigatorProvider, SmartNavigator } from "./hooks"; import { SettingScreen } from "./screens/setting"; import { SettingSelectAccountScreen } from "./screens/setting/screens/select-account"; import { ViewPrivateDataScreen } from "./screens/setting/screens/view-private-data"; import { SettingChainListScreen } from "./screens/setting/screens/chain-list"; import { WebScreen } from "./screens/web"; import { RegisterIntroScreen } from "./screens/register"; import { NewMnemonicConfig, NewMnemonicScreen, RecoverMnemonicScreen, VerifyMnemonicScreen, } from "./screens/register/mnemonic"; import { RegisterEndScreen } from "./screens/register/end"; import { RegisterNewUserScreen } from "./screens/register/new-user"; import { RegisterNotNewUserScreen } from "./screens/register/not-new-user"; import { AddressBookConfig, AddressBookData, IMemoConfig, IRecipientConfig, RegisterConfig, } from "@keplr-wallet/hooks"; import { DelegateScreen, StakingDashboardScreen, ValidatorDetailsScreen, ValidatorListScreen, } from "./screens/stake"; import { OpenDrawerIcon, ScanIcon } from "./components/icon"; import { AddAddressBookScreen, AddressBookScreen, } from "./screens/setting/screens/address-book"; import { NewLedgerScreen } from "./screens/register/ledger"; import { PageScrollPositionProvider } from "./providers/page-scroll-position"; import { BlurredHeaderScreenOptionsPreset, getPlainHeaderScreenOptionsPresetWithBackgroundColor, HeaderLeftButton, HeaderRightButton, PlainHeaderScreenOptionsPreset, } from "./components/header"; import { TokensScreen } from "./screens/tokens"; import { UndelegateScreen } from "./screens/stake/undelegate"; import { RedelegateScreen } from "./screens/stake/redelegate"; import { CameraScreen } from "./screens/camera"; import { FocusedScreenProvider, useFocusedScreen, } from "./providers/focused-screen"; import Svg, { Path, Rect } from "react-native-svg"; import { TxFailedResultScreen, TxPendingResultScreen, TxSuccessResultScreen, } from "./screens/tx-result"; import { TorusSignInScreen } from "./screens/register/torus"; import { HeaderAddIcon, HeaderWalletConnectIcon, } from "./components/header/icon"; import { BlurredBottomTabBar } from "./components/bottom-tabbar"; import { UnlockScreen } from "./screens/unlock"; import { KeplrVersionScreen } from "./screens/setting/screens/version"; import { ManageWalletConnectScreen } from "./screens/manage-wallet-connect"; import { ImportFromExtensionIntroScreen, ImportFromExtensionScreen, ImportFromExtensionSetPasswordScreen, } from "./screens/register/import-from-extension"; import { OsmosisWebpageScreen, StargazeWebpageScreen, } from "./screens/web/webpages"; import { WebpageScreenScreenOptionsPreset } from "./screens/web/components/webpage-screen"; import Bugsnag from "@bugsnag/react-native"; const { SmartNavigatorProvider, useSmartNavigation, } = createSmartNavigatorProvider( new SmartNavigator({ "Register.Intro": { upperScreenName: "Register", }, "Register.NewUser": { upperScreenName: "Register", }, "Register.NotNewUser": { upperScreenName: "Register", }, "Register.NewMnemonic": { upperScreenName: "Register", }, "Register.VerifyMnemonic": { upperScreenName: "Register", }, "Register.RecoverMnemonic": { upperScreenName: "Register", }, "Register.NewLedger": { upperScreenName: "Register", }, "Register.TorusSignIn": { upperScreenName: "Register", }, "Register.ImportFromExtension.Intro": { upperScreenName: "Register", }, "Register.ImportFromExtension": { upperScreenName: "Register", }, "Register.ImportFromExtension.SetPassword": { upperScreenName: "Register", }, "Register.End": { upperScreenName: "Register", }, Home: { upperScreenName: "Main", }, Send: { upperScreenName: "Others", }, Tokens: { upperScreenName: "Others", }, Camera: { upperScreenName: "Others", }, ManageWalletConnect: { upperScreenName: "Others", }, "Staking.Dashboard": { upperScreenName: "Others", }, "Validator.Details": { upperScreenName: "Others", }, "Validator.List": { upperScreenName: "Others", }, Delegate: { upperScreenName: "Others", }, Undelegate: { upperScreenName: "Others", }, Redelegate: { upperScreenName: "Others", }, Governance: { upperScreenName: "Others", }, "Governance Details": { upperScreenName: "Others", }, Setting: { upperScreenName: "Settings", }, SettingSelectAccount: { upperScreenName: "Settings", }, "Setting.ViewPrivateData": { upperScreenName: "Settings", }, "Setting.Version": { upperScreenName: "Settings", }, "Setting.ChainList": { upperScreenName: "ChainList", }, AddressBook: { upperScreenName: "AddressBooks", }, AddAddressBook: { upperScreenName: "AddressBooks", }, Result: { upperScreenName: "Others", }, TxPendingResult: { upperScreenName: "Others", }, TxSuccessResult: { upperScreenName: "Others", }, TxFailedResult: { upperScreenName: "Others", }, "Web.Intro": { upperScreenName: "Web", }, "Web.Osmosis": { upperScreenName: "Web", }, "Web.Stargaze": { upperScreenName: "Web", }, }).withParams<{ "Register.NewMnemonic": { registerConfig: RegisterConfig; }; "Register.VerifyMnemonic": { registerConfig: RegisterConfig; newMnemonicConfig: NewMnemonicConfig; bip44HDPath: BIP44HDPath; }; "Register.RecoverMnemonic": { registerConfig: RegisterConfig; }; "Register.NewLedger": { registerConfig: RegisterConfig; }; "Register.TorusSignIn": { registerConfig: RegisterConfig; type: "google" | "apple"; }; "Register.ImportFromExtension.Intro": { registerConfig: RegisterConfig; }; "Register.ImportFromExtension": { registerConfig: RegisterConfig; }; "Register.ImportFromExtension.SetPassword": { registerConfig: RegisterConfig; exportKeyRingDatas: ExportKeyRingData[]; addressBooks: { [chainId: string]: AddressBookData[] | undefined }; }; "Register.End": { password?: string; }; Send: { chainId?: string; currency?: string; recipient?: string; }; "Validator.Details": { validatorAddress: string; }; "Validator.List": { validatorSelector?: (validatorAddress: string) => void; }; Delegate: { validatorAddress: string; }; Undelegate: { validatorAddress: string; }; Redelegate: { validatorAddress: string; }; "Governance Details": { proposalId: string; }; "Setting.ViewPrivateData": { privateData: string; privateDataType: string; }; AddressBook: { recipientConfig?: IRecipientConfig; memoConfig?: IMemoConfig; }; AddAddressBook: { chainId: string; addressBookConfig: AddressBookConfig; }; TxPendingResult: { chainId?: string; txHash: string; }; TxSuccessResult: { chainId?: string; txHash: string; }; TxFailedResult: { chainId?: string; txHash: string; }; }>() ); export { useSmartNavigation }; const Stack = createStackNavigator(); const Drawer = createDrawerNavigator(); const Tab = createBottomTabNavigator(); const HomeScreenHeaderLeft: FunctionComponent = observer(() => { const { chainStore } = useStore(); const style = useStyle(); const navigation = useNavigation(); return ( <HeaderLeftButton onPress={() => { navigation.dispatch(DrawerActions.toggleDrawer()); }} > <View style={style.flatten(["flex-row", "items-center"])}> <OpenDrawerIcon size={28} color={style.get("color-primary").color} /> <Text style={style.flatten([ "h4", "color-text-black-high", "margin-left-4", ])} > {chainStore.current.chainName} </Text> </View> </HeaderLeftButton> ); }); const HomeScreenHeaderRight: FunctionComponent = observer(() => { const { walletConnectStore } = useStore(); const style = useStyle(); const navigation = useNavigation(); return ( <React.Fragment> <HeaderRightButton onPress={() => { navigation.navigate("Others", { screen: "Camera", }); }} > <ScanIcon size={28} color={style.get("color-primary").color} /> </HeaderRightButton> {walletConnectStore.sessions.length > 0 ? ( <HeaderRightButton style={{ right: 42, }} onPress={() => { navigation.navigate("Others", { screen: "ManageWalletConnect", }); }} > <HeaderWalletConnectIcon /> </HeaderRightButton> ) : null} </React.Fragment> ); }); export const MainNavigation: FunctionComponent = () => { return ( <Stack.Navigator screenOptions={{ ...BlurredHeaderScreenOptionsPreset, headerTitle: "", }} initialRouteName="Home" headerMode="screen" > <Stack.Screen options={{ headerLeft: () => <HomeScreenHeaderLeft />, headerRight: () => <HomeScreenHeaderRight />, }} name="Home" component={HomeScreen} /> </Stack.Navigator> ); }; export const RegisterNavigation: FunctionComponent = () => { const style = useStyle(); return ( <Stack.Navigator screenOptions={{ ...PlainHeaderScreenOptionsPreset, headerTitleStyle: style.flatten(["h5", "color-text-black-high"]), }} initialRouteName="Register.Intro" headerMode="screen" > <Stack.Screen options={{ title: "", }} name="Register.Intro" component={RegisterIntroScreen} /> <Stack.Screen options={{ title: "Create a New Wallet", }} name="Register.NewUser" component={RegisterNewUserScreen} /> <Stack.Screen options={{ title: "Import Existing Wallet", }} name="Register.NotNewUser" component={RegisterNotNewUserScreen} /> <Stack.Screen options={{ title: "Create New Mnemonic", }} name="Register.NewMnemonic" component={NewMnemonicScreen} /> <Stack.Screen options={{ title: "Verify Mnemonic", }} name="Register.VerifyMnemonic" component={VerifyMnemonicScreen} /> <Stack.Screen options={{ title: "Import Existing Wallet", }} name="Register.RecoverMnemonic" component={RecoverMnemonicScreen} /> <Stack.Screen options={{ title: "Import Hardware Wallet", }} name="Register.NewLedger" component={NewLedgerScreen} /> <Stack.Screen name="Register.TorusSignIn" component={TorusSignInScreen} /> <Stack.Screen options={{ // Only show the back button. title: "", }} name="Register.ImportFromExtension.Intro" component={ImportFromExtensionIntroScreen} /> <Stack.Screen options={{ headerShown: false, }} name="Register.ImportFromExtension" component={ImportFromExtensionScreen} /> <Stack.Screen options={{ title: "Import Extension", }} name="Register.ImportFromExtension.SetPassword" component={ImportFromExtensionSetPasswordScreen} /> <Stack.Screen options={{ headerShown: false, }} name="Register.End" component={RegisterEndScreen} /> </Stack.Navigator> ); }; export const OtherNavigation: FunctionComponent = () => { const style = useStyle(); return ( <Stack.Navigator screenOptions={{ ...BlurredHeaderScreenOptionsPreset, headerTitleStyle: style.flatten(["h5", "color-text-black-high"]), }} headerMode="screen" > <Stack.Screen options={{ title: "Send", }} name="Send" component={SendScreen} /> <Stack.Screen options={{ title: "Tokens", }} name="Tokens" component={TokensScreen} /> <Stack.Screen options={{ headerShown: false, }} name="Camera" component={CameraScreen} /> <Stack.Screen options={{ title: "WalletConnect", }} name="ManageWalletConnect" component={ManageWalletConnectScreen} /> <Stack.Screen options={{ title: "Validator List", }} name="Validator List" component={ValidatorListScreen} /> <Stack.Screen options={{ title: "Validator Details", }} name="Validator Details" component={ValidatorDetailsScreen} /> <Stack.Screen options={{ title: "Governance", }} name="Governance" component={GovernanceScreen} /> <Stack.Screen options={{ title: "Proposal", }} name="Governance Details" component={GovernanceDetailsScreen} /> <Stack.Screen options={{ title: "Staking Dashboard", }} name="Staking.Dashboard" component={StakingDashboardScreen} /> <Stack.Screen options={{ title: "Validator Details", }} name="Validator.Details" component={ValidatorDetailsScreen} /> <Stack.Screen options={{ title: "All Active Validators", }} name="Validator.List" component={ValidatorListScreen} /> <Stack.Screen options={{ title: "Stake", }} name="Delegate" component={DelegateScreen} /> <Stack.Screen options={{ title: "Unstake", }} name="Undelegate" component={UndelegateScreen} /> <Stack.Screen options={{ title: "Switch Validator", }} name="Redelegate" component={RedelegateScreen} /> <Stack.Screen options={{ gestureEnabled: false, headerShown: false, }} name="TxPendingResult" component={TxPendingResultScreen} /> <Stack.Screen options={{ gestureEnabled: false, headerShown: false, }} name="TxSuccessResult" component={TxSuccessResultScreen} /> <Stack.Screen options={{ gestureEnabled: false, headerShown: false, }} name="TxFailedResult" component={TxFailedResultScreen} /> </Stack.Navigator> ); }; export const SettingStackScreen: FunctionComponent = () => { const style = useStyle(); const navigation = useNavigation(); const { analyticsStore } = useStore(); return ( <Stack.Navigator screenOptions={{ ...PlainHeaderScreenOptionsPreset, headerTitleStyle: style.flatten(["h5", "color-text-black-high"]), }} headerMode="screen" > <Stack.Screen options={{ title: "Settings", ...getPlainHeaderScreenOptionsPresetWithBackgroundColor( style.get("color-setting-screen-background").color ), headerTitleStyle: style.flatten(["h3", "color-text-black-high"]), }} name="Setting" component={SettingScreen} /> <Stack.Screen name="SettingSelectAccount" options={{ title: "Select Account", headerRight: () => ( <HeaderRightButton onPress={() => { analyticsStore.logEvent("Add additional account started"); navigation.navigate("Register", { screen: "Register.Intro", }); }} > <HeaderAddIcon /> </HeaderRightButton> ), ...BlurredHeaderScreenOptionsPreset, }} component={SettingSelectAccountScreen} /> <Stack.Screen name="Setting.ViewPrivateData" component={ViewPrivateDataScreen} /> <Stack.Screen options={{ title: "Version", }} name="Setting.Version" component={KeplrVersionScreen} /> </Stack.Navigator> ); }; export const AddressBookStackScreen: FunctionComponent = () => { const style = useStyle(); return ( <Stack.Navigator screenOptions={{ ...BlurredHeaderScreenOptionsPreset, headerTitleStyle: style.flatten(["h5", "color-text-black-high"]), }} headerMode="screen" > <Stack.Screen options={{ title: "Address Book", }} name="AddressBook" component={AddressBookScreen} /> <Stack.Screen options={{ title: "New Address Book", }} name="AddAddressBook" component={AddAddressBookScreen} /> </Stack.Navigator> ); }; export const ChainListStackScreen: FunctionComponent = () => { const style = useStyle(); return ( <Stack.Navigator screenOptions={{ ...BlurredHeaderScreenOptionsPreset, headerTitleStyle: style.flatten(["h5", "color-text-black-high"]), }} headerMode="screen" > <Stack.Screen options={{ title: "Chain List", }} name="Setting.ChainList" component={SettingChainListScreen} /> </Stack.Navigator> ); }; export const WebNavigation: FunctionComponent = () => { return ( <Stack.Navigator initialRouteName="Web.Intro" screenOptions={{ ...WebpageScreenScreenOptionsPreset, }} headerMode="screen" > <Stack.Screen options={{ headerShown: false }} name="Web.Intro" component={WebScreen} /> <Stack.Screen name="Web.Osmosis" component={OsmosisWebpageScreen} /> <Stack.Screen name="Web.Stargaze" component={StargazeWebpageScreen} /> </Stack.Navigator> ); }; export const MainTabNavigation: FunctionComponent = () => { const style = useStyle(); const navigation = useNavigation(); const focusedScreen = useFocusedScreen(); const isDrawerOpen = useIsDrawerOpen(); useEffect(() => { // When the focused screen is not "Home" screen and the drawer is open, // try to close the drawer forcely. if (focusedScreen.name !== "Home" && isDrawerOpen) { navigation.dispatch(DrawerActions.toggleDrawer()); } }, [focusedScreen.name, isDrawerOpen, navigation]); return ( <Tab.Navigator screenOptions={({ route }) => ({ tabBarIcon: ({ color }) => { const size = 24; switch (route.name) { case "Main": return ( <Svg width={size} height={size} fill="none" viewBox="0 0 24 24"> <Rect width="8" height="8" x="3" y="3" fill={color} rx="1.5" /> <Rect width="8" height="8" x="3" y="13" fill={color} rx="1.5" /> <Rect width="8" height="8" x="13" y="3" fill={color} rx="1.5" /> <Rect width="8" height="8" x="13" y="13" fill={color} rx="1.5" /> </Svg> ); case "Web": return ( <Svg width={size} height={size} fill="none" viewBox="0 0 24 24"> <Path fill={color} d="M12 2C8.741 2 5.849 3.577 4.021 6H4v.027A9.931 9.931 0 002 12c0 5.511 4.489 10 10 10s10-4.489 10-10S17.511 2 12 2zm3 2.584A7.98 7.98 0 0120 12c0 2.088-.8 3.978-2.102 5.4A1.993 1.993 0 0016 16a1 1 0 01-1-1v-2a1 1 0 00-1-1h-4a1 1 0 010-2 1 1 0 001-1V8a1 1 0 011-1h1a2 2 0 002-2v-.416zM4.207 10.207L9 15v1a2 2 0 002 2v1.932a7.979 7.979 0 01-6.793-9.725z" /> </Svg> ); case "Settings": return ( <Svg width={size} height={size} fill="none" viewBox="0 0 24 24"> <Path fill={color} d="M12 2c-.528 0-1.046.045-1.55.131l-.311 1.302c-.484 2.023-2.544 3.225-4.52 2.635l-1.084-.325A10.124 10.124 0 003 8.598l.805.781a3.663 3.663 0 010 5.242L3 15.402c.36 1.043.882 2.006 1.535 2.855l1.084-.325c1.976-.59 4.036.612 4.52 2.635l.31 1.302a9.187 9.187 0 003.101 0l.311-1.302c.484-2.023 2.544-3.225 4.52-2.635l1.084.325A10.124 10.124 0 0021 15.402l-.805-.781a3.663 3.663 0 010-5.242L21 8.598a10.113 10.113 0 00-1.535-2.855l-1.084.325c-1.976.59-4.036-.612-4.52-2.635l-.31-1.302A9.184 9.184 0 0012 2zm0 7.273c1.491 0 2.7 1.22 2.7 2.727 0 1.506-1.209 2.727-2.7 2.727S9.3 13.507 9.3 12c0-1.506 1.209-2.727 2.7-2.727z" /> </Svg> ); } }, tabBarButton: (props) => ( <View style={{ display: "flex", flex: 1, justifyContent: "center", alignItems: "center", }} > {/* eslint-disable-next-line @typescript-eslint/ban-ts-comment */} {/* @ts-ignore */} <BorderlessButton {...props} activeOpacity={1} style={{ height: "100%", aspectRatio: 1.9, maxWidth: "100%", }} /> </View> ), })} tabBarOptions={{ activeTintColor: style.get("color-primary").color, inactiveTintColor: style.get("color-text-black-very-very-low").color, style: { borderTopWidth: 0.5, borderTopColor: style.get("border-color-border-white").borderColor, shadowColor: style.get("color-transparent").color, elevation: 0, paddingLeft: 30, paddingRight: 30, }, showLabel: false, }} tabBar={(props) => ( <BlurredBottomTabBar {...props} enabledScreens={["Home"]} /> )} > <Tab.Screen name="Main" component={MainNavigation} /> <Tab.Screen name="Web" component={WebNavigation} /> <Tab.Screen name="Settings" component={SettingStackScreen} options={{ unmountOnBlur: true, }} /> </Tab.Navigator> ); }; export const MainTabNavigationWithDrawer: FunctionComponent = () => { const focused = useFocusedScreen(); return ( <Drawer.Navigator drawerType="slide" drawerContent={(props) => <DrawerContent {...props} />} screenOptions={{ // If the focused screen is not "Home" screen, // disable the gesture to open drawer. swipeEnabled: focused.name === "Home", gestureEnabled: focused.name === "Home", }} gestureHandlerProps={{ hitSlop: {}, }} > <Drawer.Screen name="MainTab" component={MainTabNavigation} /> </Drawer.Navigator> ); }; const BugsnagNavigationContainerPlugin = Bugsnag.getPlugin("reactNavigation"); // The returned BugsnagNavigationContainer has exactly the same usage // except now it tracks route information to send with your error reports const BugsnagNavigationContainer = (() => { if (BugsnagNavigationContainerPlugin) { console.log("BugsnagNavigationContainerPlugin found"); return BugsnagNavigationContainerPlugin.createNavigationContainer( NavigationContainer ); } else { console.log( "WARNING: BugsnagNavigationContainerPlugin is null. Fallback to use basic NavigationContainer" ); return NavigationContainer; } })(); export const AppNavigation: FunctionComponent = observer(() => { const { keyRingStore, analyticsStore } = useStore(); const navigationRef = useRef<NavigationContainerRef | null>(null); const routeNameRef = useRef<string | null>(null); return ( <PageScrollPositionProvider> <FocusedScreenProvider> <SmartNavigatorProvider> <BugsnagNavigationContainer ref={navigationRef} onReady={() => { const routerName = navigationRef.current?.getCurrentRoute(); if (routerName) { routeNameRef.current = routerName.name; analyticsStore.logPageView(routerName.name); } }} onStateChange={() => { const routerName = navigationRef.current?.getCurrentRoute(); if (routerName) { const previousRouteName = routeNameRef.current; const currentRouteName = routerName.name; if (previousRouteName !== currentRouteName) { analyticsStore.logPageView(currentRouteName); } routeNameRef.current = currentRouteName; } }} > <Stack.Navigator initialRouteName={ keyRingStore.status !== KeyRingStatus.UNLOCKED ? "Unlock" : "MainTabDrawer" } screenOptions={{ headerShown: false, ...TransitionPresets.SlideFromRightIOS, }} headerMode="screen" > <Stack.Screen name="Unlock" component={UnlockScreen} /> <Stack.Screen name="MainTabDrawer" component={MainTabNavigationWithDrawer} /> <Stack.Screen name="Register" component={RegisterNavigation} /> <Stack.Screen name="Others" component={OtherNavigation} /> <Stack.Screen name="AddressBooks" component={AddressBookStackScreen} /> <Stack.Screen name="ChainList" component={ChainListStackScreen} /> </Stack.Navigator> </BugsnagNavigationContainer> {/* <ModalsRenderer /> */} </SmartNavigatorProvider> </FocusedScreenProvider> </PageScrollPositionProvider> ); });
the_stack
import BaseStore from "#SRC/js/stores/BaseStore"; import * as ActionTypes from "../constants/ActionTypes"; import * as EventTypes from "../constants/EventTypes"; import ACLServiceAccountActions from "../actions/ACLServiceAccountActions"; import ServiceAccount from "../structs/ServiceAccount"; import ServiceAccountList from "../structs/ServiceAccountList"; import { getSDK } from "../../../SDK"; import { ServiceAccountFormData } from "../../../utils/ServiceAccountFormUtil"; class ACLServiceAccountsStore extends BaseStore { constructor() { super(); getSDK().addStoreConfig({ store: this, storeID: "aclServiceAccounts", events: { // ServiceAccounts change change: EventTypes.ACL_SERVICE_ACCOUNTS_CHANGE, error: EventTypes.ACL_SERVICE_ACCOUNTS_ERROR, // Individual ServiceAccount events fetchSuccess: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_SERVICE_ACCOUNT_CHANGE, fetchError: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_SERVICE_ACCOUNT_ERROR, createSuccess: EventTypes.ACL_SERVICE_ACCOUNT_CREATE_SUCCESS, createError: EventTypes.ACL_SERVICE_ACCOUNT_CREATE_ERROR, deleteSuccess: EventTypes.ACL_SERVICE_ACCOUNT_DELETE_SUCCESS, deleteError: EventTypes.ACL_SERVICE_ACCOUNT_DELETE_ERROR, fetchedDetailsSuccess: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_FETCHED_SUCCESS, fetchedDetailsError: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_FETCHED_ERROR, groupsSuccess: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_GROUPS_CHANGE, groupsError: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_GROUPS_ERROR, permissionsSuccess: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_PERMISSIONS_CHANGE, permissionsError: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_PERMISSIONS_ERROR, updateSuccess: EventTypes.ACL_SERVICE_ACCOUNT_UPDATE_SUCCESS, updateError: EventTypes.ACL_SERVICE_ACCOUNT_UPDATE_ERROR, }, unmountWhen: () => false, }); getSDK().onDispatch((action: any) => { switch (action.type) { // Get serviceAccount details case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_SUCCESS: this.processServiceAccount(action.data); break; case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_ERROR: this.processServiceAccountError(action.serviceAccountID); break; // Delete serviceAccount case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_DELETE_SUCCESS: this.fetchAll(); this.emit( EventTypes.ACL_SERVICE_ACCOUNT_DELETE_SUCCESS, action.serviceAccountID ); break; case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_DELETE_ERROR: this.emit( EventTypes.ACL_SERVICE_ACCOUNT_DELETE_ERROR, action.data, action.serviceAccountID, action.xhr ); break; // Create serviceAccount case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_CREATE_SUCCESS: this.fetchAll(); this.emit( EventTypes.ACL_SERVICE_ACCOUNT_CREATE_SUCCESS, action.serviceAccountID ); break; case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_CREATE_ERROR: this.emit( EventTypes.ACL_SERVICE_ACCOUNT_CREATE_ERROR, action.data, action.serviceAccountID, action.xhr ); break; // Get groups for serviceAccount case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_GROUPS_SUCCESS: this.processServiceAccountGroups( action.data, action.serviceAccountID ); break; case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_GROUPS_ERROR: this.processServiceAccountGroupsError(action.serviceAccountID); break; // Get ACLs for serviceAccount case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_PERMISSIONS_SUCCESS: this.processServiceAccountPermissions( action.data, action.serviceAccountID ); break; case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_PERMISSIONS_ERROR: this.processServiceAccountPermissionsError(action.serviceAccountID); break; // Update serviceAccount case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_UPDATE_SUCCESS: this.processServiceAccountUpdateSuccess( action.serviceAccountID, action.patch ); break; case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNT_UPDATE_ERROR: this.emit( EventTypes.ACL_SERVICE_ACCOUNT_UPDATE_ERROR, action.data, action.serviceAccountID, action.xhr ); break; case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNTS_SUCCESS: this.processServiceAccounts(action.data); break; case ActionTypes.REQUEST_ACL_SERVICE_ACCOUNTS_ERROR: this.emit( EventTypes.ACL_SERVICE_ACCOUNTS_ERROR, action.data, action.serviceAccountID ); break; } }); } public addListener(event: string, callback: () => void) { return super.addListener(event, callback); } public removeListener(event: string, callback: () => void) { return super.removeListener(event, callback); } public emit(event: string | symbol, ...args: any[]) { return super.emit(event, ...args); } public add(formData: ServiceAccountFormData) { return ACLServiceAccountActions.add(formData); } public delete = ACLServiceAccountActions.delete; public fetch = ACLServiceAccountActions.fetch; public fetchAll = ACLServiceAccountActions.fetchAll; public update = ACLServiceAccountActions.update; public getServiceAccounts() { const items = getSDK().Store.getOwnState().serviceAccounts.serviceAccounts; return new ServiceAccountList({ items }); } public getServiceAccountsDetail() { return getSDK().Store.getOwnState().serviceAccounts.serviceAccountsDetail; } public getServiceAccountsFetching() { return getSDK().Store.getOwnState().serviceAccounts.serviceAccountsFetching; } public getServiceAccountRaw(serviceAccountID: string) { return this.getServiceAccountsDetail()[serviceAccountID] || {}; } public getServiceAccount(serviceAccountID: string) { const serviceAccount = this.getServiceAccountRaw(serviceAccountID); if (!Object.keys(serviceAccount).length) { return null; } return new ServiceAccount(serviceAccount); } public setServiceAccount(serviceAccountID: string, serviceAccount: any) { const serviceAccounts = this.getServiceAccountsDetail(); serviceAccounts[serviceAccountID] = serviceAccount; getSDK().dispatch({ type: EventTypes.ACL_SERVICE_ACCOUNT_SET_SERVICE_ACCOUNT, serviceAccounts, }); } /** * Fetch serviceAccount details and all associated data * * @param {Number} serviceAccountID */ public fetchServiceAccountWithDetails(serviceAccountID: string) { const serviceAccountsFetching = this.getServiceAccountsFetching(); serviceAccountsFetching[serviceAccountID] = { serviceAccount: false, groups: false, permissions: false, }; getSDK().dispatch({ type: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_FETCH_START, serviceAccountsFetching, }); ACLServiceAccountActions.fetch(serviceAccountID); ACLServiceAccountActions.fetchGroups(serviceAccountID); ACLServiceAccountActions.fetchPermissions(serviceAccountID); } /** * Validates the details for a serviceAccount have been successfully fetched * * @param {Number} serviceAccountID * @param {String} type The type of detail that has been successfully * received */ public validateServiceAccountWithDetailsFetch( serviceAccountID: string, type: string ) { const serviceAccountsFetching = this.getServiceAccountsFetching(); if (serviceAccountsFetching[serviceAccountID] == null) { return; } serviceAccountsFetching[serviceAccountID][type] = true; const fetchedAll = !Object.keys( serviceAccountsFetching[serviceAccountID] ).some((key) => { return !serviceAccountsFetching[serviceAccountID][key]; }); if (fetchedAll) { delete serviceAccountsFetching[serviceAccountID]; getSDK().dispatch({ type: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_FETCHED_SUCCESS, serviceAccountsFetching, }); this.emit( EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_FETCHED_SUCCESS, serviceAccountID ); } } /** * Emits error if we're in the process of fetching details for a serviceAccount * and one of the requests fails. * * @param {Number} serviceAccountID */ public invalidateServiceAccountWithDetailsFetch(serviceAccountID: string) { const serviceAccountsFetching = this.getServiceAccountsFetching(); if (serviceAccountsFetching[serviceAccountID] == null) { return; } delete serviceAccountsFetching[serviceAccountID]; getSDK().dispatch({ type: EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_FETCHED_ERROR, serviceAccountsFetching, }); this.emit( EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_FETCHED_ERROR, serviceAccountID ); } /** * Process a serviceAccount response * * @param {Object} serviceAccountData */ public processServiceAccount(serviceAccountData: { uid: string }) { let serviceAccount = this.getServiceAccountRaw(serviceAccountData.uid); serviceAccount = { ...serviceAccount, ...serviceAccountData }; this.setServiceAccount(serviceAccount.uid, serviceAccount); this.emit( EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_SERVICE_ACCOUNT_CHANGE, serviceAccount.uid ); this.validateServiceAccountWithDetailsFetch( serviceAccount.uid, "serviceAccount" ); } public processServiceAccountError(serviceAccountID: string) { this.emit( EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_SERVICE_ACCOUNT_ERROR, serviceAccountID ); this.invalidateServiceAccountWithDetailsFetch(serviceAccountID); } /** * Process an array of serviceAccounts * * @param {Array} serviceAccounts All existing serviceAccounts */ public processServiceAccounts(serviceAccounts: Array<{ uid: string }>) { getSDK().dispatch({ type: EventTypes.ACL_SERVICE_ACCOUNTS_CHANGE, serviceAccounts, }); this.emit(EventTypes.ACL_SERVICE_ACCOUNTS_CHANGE); } /** * Process a serviceAccount groups response * * @param {Object} groups Groups associated with serviceAccount * @param {Object} serviceAccountID */ public processServiceAccountGroups( groups: object[], serviceAccountID: string ) { let serviceAccount = this.getServiceAccountRaw(serviceAccountID); serviceAccount = { ...serviceAccount, ...{ groups } }; // Use serviceAccountID throughout as the // serviceAccount may not have been previously set this.setServiceAccount(serviceAccountID, serviceAccount); this.emit( EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_GROUPS_CHANGE, serviceAccountID ); this.validateServiceAccountWithDetailsFetch(serviceAccountID, "groups"); } public processServiceAccountGroupsError(serviceAccountID: string) { this.emit( EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_GROUPS_ERROR, serviceAccountID ); this.invalidateServiceAccountWithDetailsFetch(serviceAccountID); } /** * Process a serviceAccount permissions response * * @param {Object} permissions Permissions associated with serviceAccount * @param {Object} serviceAccountID */ public processServiceAccountPermissions( permissions: object, serviceAccountID: string ) { let serviceAccount = this.getServiceAccountRaw(serviceAccountID); serviceAccount = { ...serviceAccount, ...{ permissions } }; // Use serviceAccountID throughout as the // serviceAccount may not have been previously set this.setServiceAccount(serviceAccountID, serviceAccount); this.emit( EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_PERMISSIONS_CHANGE, serviceAccountID ); this.validateServiceAccountWithDetailsFetch( serviceAccountID, "permissions" ); } public processServiceAccountPermissionsError(serviceAccountID: string) { this.emit( EventTypes.ACL_SERVICE_ACCOUNT_DETAILS_PERMISSIONS_ERROR, serviceAccountID ); this.invalidateServiceAccountWithDetailsFetch(serviceAccountID); } public processServiceAccountUpdateSuccess( serviceAccountID: string, patch: object ) { const serviceAccount = this.getServiceAccountRaw(serviceAccountID); this.setServiceAccount(serviceAccountID, { ...serviceAccount, ...patch }); this.fetchAll(); this.emit( EventTypes.ACL_SERVICE_ACCOUNT_UPDATE_SUCCESS, serviceAccountID, patch ); } } let store: ACLServiceAccountsStore; export default () => store || (store = new ACLServiceAccountsStore());
the_stack
import classNames from "classnames"; import * as React from "react"; import { isDivinaFn, isPdfFn } from "readium-desktop/common/isManifestType"; import { I18nTyped, Translator } from "readium-desktop/common/services/translator"; import { TPublication } from "readium-desktop/common/type/publication.type"; import { formatTime } from "readium-desktop/common/utils/time"; import { IOpdsBaseLinkView } from "readium-desktop/common/views/opds"; import * as stylesBookDetailsDialog from "readium-desktop/renderer/assets/styles/bookDetailsDialog.css"; import * as stylesColumns from "readium-desktop/renderer/assets/styles/components/columns.css"; import * as stylesPublications from "readium-desktop/renderer/assets/styles/components/publications.css"; import * as stylesGlobal from "readium-desktop/renderer/assets/styles/global.css"; import { TaJsonDeserialize } from "@r2-lcp-js/serializable"; import { LocatorExtended } from "@r2-navigator-js/electron/renderer"; import { Publication as R2Publication } from "@r2-shared-js/models/publication"; import Cover from "../../Cover"; import { FormatContributorWithLink } from "./FormatContributorWithLink"; import { FormatPublicationLanguage } from "./formatPublicationLanguage"; import { FormatPublisherDate } from "./formatPublisherDate"; import LcpInfo from "./LcpInfo"; import PublicationInfoDescription from "./PublicationInfoDescription"; export interface IProps { publication: TPublication; r2Publication: R2Publication | null; manifestUrlR2Protocol: string | null; handleLinkUrl: ((url: string) => void) | undefined; toggleCoverZoomCb: (coverZoom: boolean) => void; ControlComponent?: React.ComponentType<any>; TagManagerComponent: React.ComponentType<any>; coverZoom: boolean; focusWhereAmI: boolean; pdfPlayerNumberOfPages: number | undefined; // super hacky :( divinaNumberOfPages: number | undefined; // super hacky :( divinaContinousEqualTrue: boolean; readerReadingLocation: LocatorExtended; translator: Translator; onClikLinkCb?: (tag: IOpdsBaseLinkView) => () => void | undefined; closeDialogCb: () => void; } const Duration = (props: { duration: number; __: I18nTyped; }) => { const { duration, __ } = props; if (!duration) { return <></>; } const sentence = formatTime(duration); return ( sentence ? <> <strong>{`${__("publication.duration.title")}: `}</strong> <i className={stylesBookDetailsDialog.allowUserSelect}> {sentence} </i> <br /> </> : <></>); }; const Progression = (props: { r2Publication: R2Publication | null, manifestUrlR2Protocol: string | null, handleLinkUrl: ((url: string) => void) | undefined; locatorExt: LocatorExtended, focusWhereAmI: boolean, pdfPlayerNumberOfPages: number | undefined, // super hacky :( divinaNumberOfPages: number | undefined, // super hacky :( divinaContinousEqualTrue: boolean, __: I18nTyped; closeDialogCb: () => void; }) => { const { __, closeDialogCb, locatorExt, focusWhereAmI, pdfPlayerNumberOfPages, divinaNumberOfPages, divinaContinousEqualTrue, r2Publication, manifestUrlR2Protocol, handleLinkUrl } = props; const focusRef = React.useRef<HTMLHeadingElement>(null); React.useEffect(() => { if (focusWhereAmI && focusRef.current) { focusRef.current.focus(); } }, [focusWhereAmI]); if (typeof locatorExt?.locator?.locations?.progression === "number") { // try/catch until the code is cleaned-up! // (Audiobooks, PDF, Divina, EPUB FXL and reflow ... page number vs. string types) try { const isAudio = locatorExt.audioPlaybackInfo // total duration can be undefined with badly-constructed publications, // for example we found some LibriVox W3C LPF audiobooks missing duration property on reading order resources && locatorExt.audioPlaybackInfo.globalDuration && typeof locatorExt.locator.locations.position === "number"; // .progression is local to audio item in reading order playlist const isDivina = r2Publication && isDivinaFn(r2Publication); const isPdf = r2Publication && isPdfFn(r2Publication); // locatorExt.docInfo.isFixedLayout const isFixedLayout = r2Publication && // && !r2Publication.PageList r2Publication.Metadata?.Rendition?.Layout === "fixed"; let txtProgression: string | undefined; let txtPagination: string | undefined; let txtHeadings: JSX.Element | undefined; if (isAudio) { const percent = Math.round(locatorExt.locator.locations.position * 100); // const p = Math.round(100 * (locatorExt.audioPlaybackInfo.globalTime / locatorExt.audioPlaybackInfo.globalDuration)); txtProgression = `${percent}% [${formatTime(Math.round(locatorExt.audioPlaybackInfo.globalTime))} / ${formatTime(Math.round(locatorExt.audioPlaybackInfo.globalDuration))}]`; } else if (isDivina) { // console.log("----- ".repeat(100), divinaNumberOfPages, r2Publication?.Spine?.length); let totalPages = (divinaNumberOfPages && !divinaContinousEqualTrue) ? divinaNumberOfPages : (r2Publication?.Spine?.length ? r2Publication.Spine.length : undefined); if (typeof totalPages === "string") { try { totalPages = parseInt(totalPages, 10); } catch (_e) { totalPages = 0; } } let pageNum = !divinaContinousEqualTrue ? (locatorExt.locator.locations.position || 0) : (Math.floor(locatorExt.locator.locations.progression * r2Publication.Spine.length) - 1); if (typeof pageNum === "string") { try { pageNum = parseInt(pageNum, 10) + 1; } catch (_e) { pageNum = 0; } } else if (typeof pageNum === "number") { pageNum = pageNum + 1; } if (totalPages && typeof pageNum === "number") { txtPagination = __("reader.navigation.currentPageTotal", { current: `${pageNum}`, total: `${totalPages}` }); // txtProgression = `${Math.round(100 * (pageNum / totalPages))}%`; txtProgression = `${Math.round(100 * (locatorExt.locator.locations.progression || 0))}%`; } else { // divinaContinousEqualTrue (relative to spine items) if (typeof pageNum === "number") { txtPagination = __("reader.navigation.currentPage", { current: `${pageNum}` }); } // see (locations as any ).totalProgression Divina HACK if (typeof locatorExt.locator.locations.progression === "number") { const percent = Math.round(locatorExt.locator.locations.progression * 100); txtProgression = `${percent}%`; } } } else if (isPdf) { let totalPages = pdfPlayerNumberOfPages ? pdfPlayerNumberOfPages : (r2Publication?.Metadata?.NumberOfPages ? r2Publication.Metadata.NumberOfPages : undefined); if (typeof totalPages === "string") { try { totalPages = parseInt(totalPages, 10); } catch (_e) { totalPages = 0; } } let pageNum = (locatorExt.locator?.href as unknown) as number; if (typeof pageNum === "string") { try { pageNum = parseInt(pageNum, 10); } catch (_e) { pageNum = 0; } } if (totalPages) { txtPagination = __("reader.navigation.currentPageTotal", { current: `${pageNum}`, total: `${totalPages}` }); txtProgression = `${Math.round(100 * (pageNum / totalPages))}%`; } else { txtPagination = __("reader.navigation.currentPage", { current: `${pageNum}` }); } } else if (r2Publication?.Spine && locatorExt.locator?.href) { const spineIndex = r2Publication.Spine.findIndex((l) => { return l.Href === locatorExt.locator.href; }); if (spineIndex >= 0) { if (isFixedLayout) { const pageNum = spineIndex + 1; const totalPages = r2Publication.Spine.length; txtPagination = __("reader.navigation.currentPageTotal", { current: `${pageNum}`, total: `${totalPages}` }); txtProgression = `${Math.round(100 * (pageNum / totalPages))}%`; } else { // reflow: no totalPages, potentially just currentPage which is locatorExt.epubPage if (locatorExt.epubPage) { txtPagination = __("reader.navigation.currentPage", { current: `${locatorExt.epubPage}` }); } // no virtual global .position in the current implementation, // just local percentage .progression (current reading order item) const percent = Math.round(locatorExt.locator.locations.progression * 100); txtProgression = `${spineIndex + 1}/${r2Publication.Spine.length}${locatorExt.locator.title ? ` (${locatorExt.locator.title})` : ""} [${percent}%]`; if (locatorExt.headings && manifestUrlR2Protocol) { // focusWhereAmI let rank = 999; const hs = locatorExt.headings.filter((h, _i) => { if (h.level < rank // && (h.id || i === locatorExt.headings.length - 1) ) { rank = h.level; return true; } return false; }).reverse(); // WARNING: .reverse() is in-place same-array mutation! (not a new array) // ...but we're chaining with .filter() so that locatorExt.headings is not modified let k = 0; const summary = hs.reduce((arr, h, i) => { return arr.concat( <span key={`_h${k++}`}>{i === 0 ? " " : " / "}</span>, <span key={`_h${k++}`} style={{fontWeight: "bold"}}>h{h.level} </span>, <span key={`_h${k++}`} style={{border: "1px solid grey", padding: "2px"}}>{h.txt ? `${h.txt}` : `${h.id ? `[${h.id}]` : "_"}`}</span>, ); }, []); // WARNING: .reverse() is in-place same-array mutation! (not a new array) // ...which is why we use .slice() to create an instance copy // (locatorExt.headings isn't modified) // Note: instead of .slice(), Array.from() works too const details = locatorExt.headings.slice().reverse(). // filter((h, i) => { // return h.id || i === 0; // }). reduce((arr, h, i) => { return arr.concat( <li key={`_li${i}`}> <span style={{fontWeight: "bold"}}>h{h.level} </span> {(h.id || i === 0) ? ( <a href={(h.id || i === 0) ? "#" : undefined} data-id={h.id ? h.id : undefined} data-index={i} onClick={ (e) => { e.preventDefault(); const id = e.currentTarget?.getAttribute("data-id"); const idx = e.currentTarget?.getAttribute("data-index"); const index = idx ? parseInt(idx, 10) : -1; if (id || index === 0) { closeDialogCb(); const url = manifestUrlR2Protocol + "/../" + locatorExt.locator.href.replace(/#[^#]*$/, "") + `#${id ? id : ""}`; handleLinkUrl(url); } } }> <span style={{padding: "2px"}}>{h.txt ? `${h.txt}` : `${h.id ? `[${h.id}]` : "_"}`}</span> </a> ) : ( <span data-id={h.id ? h.id : undefined} data-index={i} style={{padding: "2px"}}>{h.txt ? `${h.txt}` : `${h.id ? `[${h.id}]` : "_"}`}</span> )} </li>); }, []); txtHeadings = <details><summary>{summary}</summary><ul style={{listStyleType: "none"}}>{details}</ul></details>; } } } } return ( <section> <div className={stylesGlobal.heading}> <h3 ref={focusRef} tabIndex={focusWhereAmI ? -1 : 0}>{`${__("publication.progression.title")}: `}</h3> </div> <> {(txtProgression ? (<p><i className={stylesBookDetailsDialog.allowUserSelect}> {txtProgression} </i></p>) : <></>)} {(txtPagination ? (<p><i className={stylesBookDetailsDialog.allowUserSelect}> {txtPagination} </i></p>) : <></>)} {(txtHeadings ? (<><div style={{lineHeight: "2em"}} className={stylesBookDetailsDialog.allowUserSelect}> {txtHeadings} </div></>) : <></>)} </> </section> ); } catch (_err) { return (<>_</>); } } return (<></>); }; export const PublicationInfoContent: React.FC<IProps> = (props) => { // tslint:disable-next-line: max-line-length const { closeDialogCb, readerReadingLocation, pdfPlayerNumberOfPages, divinaNumberOfPages, divinaContinousEqualTrue, r2Publication: r2Publication_, manifestUrlR2Protocol, handleLinkUrl, publication, toggleCoverZoomCb, ControlComponent, TagManagerComponent, coverZoom, translator, onClikLinkCb, focusWhereAmI } = props; const __ = translator.translate; const r2Publication = React.useMemo(() => { if (!r2Publication_ && publication.r2PublicationJson) { // debug("!! r2Publication ".repeat(100)); return TaJsonDeserialize(publication.r2PublicationJson, R2Publication); } // debug("__r2Publication".repeat(100)); return r2Publication_; }, [publication, r2Publication_]); return ( <> <div className={stylesColumns.row}> <div className={stylesColumns.col_book_img}> <div className={stylesPublications.publication_image_wrapper}> <Cover publicationViewMaybeOpds={publication} onClick={() => toggleCoverZoomCb(coverZoom)} onKeyPress={ (e: React.KeyboardEvent<HTMLImageElement>) => e.key === "Enter" && toggleCoverZoomCb(coverZoom) } ></Cover> </div> { ControlComponent && <ControlComponent /> } </div> <div className={stylesColumns.col}> <section> <h2 className={classNames(stylesBookDetailsDialog.allowUserSelect, stylesGlobal.my_10)}> {publication.title} </h2> <FormatContributorWithLink contributors={publication.authors} translator={translator} onClickLinkCb={onClikLinkCb} /> </section> <section> <div className={stylesGlobal.heading}> <h3>{__("catalog.tags")}</h3> </div> <TagManagerComponent /> </section> <section> <PublicationInfoDescription publication={publication} __={__} translator={props.translator} /> </section> <section> <div className={stylesGlobal.heading}> <h3>{__("catalog.moreInfo")}</h3> </div> <div> <FormatPublisherDate publication={publication} __={__} /> { publication.publishers?.length ? <> <strong>{`${__("catalog.publisher")}: `}</strong> <i className={stylesBookDetailsDialog.allowUserSelect}> <FormatContributorWithLink contributors={publication.publishers} translator={translator} onClickLinkCb={onClikLinkCb} /> </i> <br /> </> : undefined } { publication.languages?.length ? <> <strong>{`${__("catalog.lang")}: `}</strong> <FormatPublicationLanguage publication={publication} __={__} /> <br /> </> : undefined } { publication.numberOfPages ? <> <strong>{`${__("catalog.numberOfPages")}: `}</strong> <i className={stylesBookDetailsDialog.allowUserSelect}> {publication.numberOfPages} </i> <br /> </> : undefined } <Duration __={__} duration={publication.duration} /> { publication.nbOfTracks ? <> <strong>{`${__("publication.audio.tracks")}: `}</strong> <i className={stylesBookDetailsDialog.allowUserSelect}> {publication.nbOfTracks} </i> <br /> </> : undefined } </div> </section> {(publication.lcp ? <section> <LcpInfo publicationLcp={publication} /> </section> : <></>)} <Progression __={__} closeDialogCb={closeDialogCb} r2Publication={r2Publication} manifestUrlR2Protocol={manifestUrlR2Protocol} handleLinkUrl={handleLinkUrl} pdfPlayerNumberOfPages={pdfPlayerNumberOfPages} divinaNumberOfPages={divinaNumberOfPages} divinaContinousEqualTrue={divinaContinousEqualTrue} focusWhereAmI={focusWhereAmI} locatorExt={readerReadingLocation || publication.lastReadingLocation} /> </div> </div> </> ); };
the_stack
import { Injectable } from '@angular/core'; import * as d3 from 'd3'; import { LogService } from '../log.service'; import { FnService } from '../util/fn.service'; import { LionService } from './lion.service'; import { NavService } from '../nav/nav.service'; export interface KeyHandler { globalKeys: Object; maskedKeys: Object; dialogKeys: Object; viewKeys: any; viewFn: any; viewGestures: string[][]; } export enum KeysToken { KEYEV = 'keyev' } /** * ONOS GUI -- Keys Service Module. */ @Injectable({ providedIn: 'root', }) export class KeysService { enabled: boolean = true; globalEnabled: boolean = true; keyHandler: KeyHandler = <KeyHandler>{ globalKeys: {}, maskedKeys: {}, dialogKeys: {}, viewKeys: {}, viewFn: null, viewGestures: [], }; seq: any = {}; matching: boolean = false; matched: string = ''; lookup: any; textFieldDoesNotBlock: any = { enter: 1, esc: 1, }; quickHelpShown: boolean = false; constructor( protected log: LogService, protected fs: FnService, protected ls: LionService, protected ns: NavService ) { this.log.debug('KeyService constructed'); } installOn(elem) { this.log.debug('Installing keys handler'); elem.on('keydown', () => { this.keyIn(); }); this.setupGlobalKeys(); } keyBindings(x) { if (x === undefined) { return this.getKeyBindings(); } else { this.setKeyBindings(x); } } unbindKeys() { this.keyHandler.viewKeys = {}; this.keyHandler.viewFn = null; this.keyHandler.viewGestures = []; } dialogKeys(x) { if (x === undefined) { this.unbindDialogKeys(); } else { this.bindDialogKeys(x); } } addSeq(word, data) { this.fs.addToTrie(this.seq, word, data); } remSeq(word) { this.fs.removeFromTrie(this.seq, word); } gestureNotes(g?) { if (g === undefined) { return this.keyHandler.viewGestures; } else { this.keyHandler.viewGestures = this.fs.isA(g) || []; } } enableKeys(b) { this.enabled = b; } enableGlobalKeys(b) { this.globalEnabled = b; } checkNotGlobal(o) { const oops = []; if (this.fs.isO(o)) { o.forEach((val, key) => { if (this.keyHandler.globalKeys[key]) { oops.push(key); } }); if (oops.length) { this.log.warn('Ignoring reserved global key(s):', oops.join(',')); oops.forEach((key) => { delete o[key]; }); } } } protected matchSeq(key) { if (!this.matching && key === 'shift-shift') { this.matching = true; return true; } if (this.matching) { this.matched += key; this.lookup = this.fs.trieLookup(this.seq, this.matched); if (this.lookup === -1) { return true; } this.matching = false; this.matched = ''; if (!this.lookup) { return; } // ee.cluck(lookup); return true; } } protected whatKey(code: number): string { switch (code) { case 8: return 'delete'; case 9: return 'tab'; case 13: return 'enter'; case 16: return 'shift'; case 27: return 'esc'; case 32: return 'space'; case 37: return 'leftArrow'; case 38: return 'upArrow'; case 39: return 'rightArrow'; case 40: return 'downArrow'; case 186: return 'semicolon'; case 187: return 'equals'; case 188: return 'comma'; case 189: return 'dash'; case 190: return 'dot'; case 191: return 'slash'; case 192: return 'backQuote'; case 219: return 'openBracket'; case 220: return 'backSlash'; case 221: return 'closeBracket'; case 222: return 'quote'; default: if ((code >= 48 && code <= 57) || (code >= 65 && code <= 90)) { return String.fromCharCode(code); } else if (code >= 112 && code <= 123) { return 'F' + (code - 111); } return null; } } protected textFieldInput() { const t = d3.event.target.tagName.toLowerCase(); return t === 'input' || t === 'textarea'; } protected keyIn() { const event = d3.event; // d3.events can set the keyCode, but unit tests based on KeyboardEvent // cannot set keyCode since the attribute has been deprecated const code = event.keyCode ? event.keyCode : event.code; const codeNum: number = parseInt(code, 10); let key = this.whatKey(codeNum); this.log.debug('Key detected', event, key, event.code, event.keyCode); const textBlockable = !this.textFieldDoesNotBlock[key]; const modifiers = []; if (event.metaKey) { modifiers.push('cmd'); } if (event.altKey) { modifiers.push('alt'); } if (event.shiftKey) { modifiers.push('shift'); } if (!key) { return; } modifiers.push(key); key = modifiers.join('-'); if (textBlockable && this.textFieldInput()) { return; } const kh: KeyHandler = this.keyHandler; const gk = kh.globalKeys[key]; const gcb = this.fs.isF(gk) || (this.fs.isA(gk) && this.fs.isF(gk[0])); const dk = kh.dialogKeys[key]; const dcb = this.fs.isF(dk); const vk = kh.viewKeys[key]; const kl = this.fs.isF(kh.viewKeys._keyListener); const vcb = this.fs.isF(vk) || (this.fs.isA(vk) && this.fs.isF(vk[0])) || this.fs.isF(kh.viewFn); const token: KeysToken = KeysToken.KEYEV; // indicate this was a key-pressed event event.stopPropagation(); if (this.enabled) { if (this.matchSeq(key)) { return; } // global callback? if (gcb && gcb(token, key, code, event)) { // if the event was 'handled', we are done return; } // dialog callback? if (dcb) { dcb(token, key, code, event); // assume dialog handled the event return; } // otherwise, let the view callback have a shot if (vcb) { this.log.debug('Letting view callback have a shot', vcb, token, key, code, event ); vcb(token, key, code, event); } if (kl) { kl(key); } } } // functions to obtain localized strings deferred from the setup of the // global key data structures. protected qhlion() { return this.ls.bundle('core.fw.QuickHelp'); } protected qhlionShowHide() { return this.qhlion()('qh_hint_show_hide_qh'); } protected qhlionHintEsc() { return this.qhlion()('qh_hint_esc'); } protected qhlionHintT() { return this.qhlion()('qh_hint_t'); } protected setupGlobalKeys() { (<any>Object).assign(this.keyHandler, { globalKeys: { backSlash: [(view, key, code, ev) => this.quickHelp(view, key, code, ev), this.qhlionShowHide], slash: [(view, key, code, ev) => this.quickHelp(view, key, code, ev), this.qhlionShowHide], esc: [(view, key, code, ev) => this.escapeKey(view, key, code, ev), this.qhlionHintEsc], T: [(view, key, code, ev) => this.toggleTheme(view, key, code, ev), this.qhlionHintT], }, globalFormat: ['backSlash', 'slash', 'esc', 'T'], // Masked keys are global key handlers that always return true. // That is, the view will never see the event for that key. maskedKeys: { slash: 1, backSlash: 1, T: 1, }, }); } protected quickHelp(view, key, code, ev) { if (!this.globalEnabled) { return false; } this.quickHelpShown = !this.quickHelpShown; return true; } // returns true if we 'consumed' the ESC keypress, false otherwise protected escapeKey(view, key, code, ev) { this.quickHelpShown = false; return this.ns.hideNav(); } protected toggleTheme(view, key, code, ev) { if (!this.globalEnabled) { return false; } // ts.toggleTheme(); return true; } protected filterMaskedKeys(map: any, caller: any, remove: boolean): any[] { const masked = []; const msgs = []; d3.map(map).keys().forEach((key) => { if (this.keyHandler.maskedKeys[key]) { masked.push(key); msgs.push(caller, ': Key "' + key + '" is reserved'); } }); if (msgs.length) { this.log.warn(msgs.join('\n')); } if (remove) { masked.forEach((k) => { delete map[k]; }); } return masked; } protected unexParam(fname, x) { this.log.warn(fname, ': unexpected parameter-- ', x); } protected setKeyBindings(keyArg) { const fname = 'setKeyBindings()'; const kFunc = this.fs.isF(keyArg); const kMap = this.fs.isO(keyArg); if (kFunc) { // set general key handler callback this.keyHandler.viewFn = kFunc; } else if (kMap) { this.filterMaskedKeys(kMap, fname, true); this.keyHandler.viewKeys = kMap; } else { this.unexParam(fname, keyArg); } } getKeyBindings() { const gkeys = d3.map(this.keyHandler.globalKeys).keys(); const masked = d3.map(this.keyHandler.maskedKeys).keys(); const vkeys = d3.map(this.keyHandler.viewKeys).keys(); const vfn = !!this.fs.isF(this.keyHandler.viewFn); return { globalKeys: gkeys, maskedKeys: masked, viewKeys: vkeys, viewFunction: vfn, }; } protected bindDialogKeys(map) { const fname = 'bindDialogKeys()'; const kMap = this.fs.isO(map); if (kMap) { this.filterMaskedKeys(map, fname, true); this.keyHandler.dialogKeys = kMap; } else { this.unexParam(fname, map); } } protected unbindDialogKeys() { this.keyHandler.dialogKeys = {}; } }
the_stack
import { ValueMapV2 } from "../ValueMapV2"; import HtmlToOperationLogMapping from "../helperFunctions/HtmlToOperationLogMapping"; import { ExecContext } from "../helperFunctions/ExecContext"; import { consoleLog, consoleError, consoleWarn } from "../helperFunctions/logging"; import { regExpContainsNestedGroup, countGroupsInRegExp } from "../regExpHelpers"; import { mapPageHtml } from "../mapPageHtml"; import { safelyReadProperty } from "../util"; import addElOrigin, { addOriginInfoToCreatedElement, addElAttributeNameOrigin, addElAttributeValueOrigin, getElAttributeNameOrigin, getElAttributeValueOrigin, processClonedNode } from "./domHelpers/addElOrigin"; import mapInnerHTMLAssignment from "./domHelpers/mapInnerHTMLAssignment"; import * as cloneRegExp from "clone-regexp"; import { doOperation } from "../FunctionNames"; import * as jsonToAst from "json-to-ast"; import { getJSONPathOffset } from "../getJSONPathOffset"; import * as get from "lodash.get"; import { traverseObject } from "../traverseObject"; import { pathToFileURL } from "url"; import { getShortOperationName, getShortExtraArgName, getShortKnownValueName } from "../names"; import { url } from "inspector"; function getFnArg(args, index) { return args[2][index]; } function getFullUrl(url) { var a = document.createElement("a"); a.href = url; return a.href; } export type SpecialCaseArgs = { ctx: ExecContext; object: any; fnArgTrackingValues: any[]; logData: any; fnArgValues: any[]; ret: any; extraTrackingValues: any; runtimeArgs: any; fn: any; retT: any; args: any[]; context: any; extraState: any; fnKnownValue: string | null; }; const writeFile = ({ fn, ctx, fnArgValues, fnArgTrackingValues, args, logData, context }) => { const path = eval('require("path")'); let absPath = fnArgValues[0]; if (!absPath.startsWith("/")) { const cwd = eval("process.cwd()"); absPath = path.resolve(cwd, absPath); } ctx.registerEvent({ type: "fileWrite", logIndex: fnArgTrackingValues[1], path: fnArgValues[0], absPath }); let ret = fn.apply(ctx, fnArgValues); return [ret, null]; }; function addJsonParseResultTrackingValues( parsed, jsonString, jsonStringValueAndTrackingValueArray, { ctx, logData } ) { if ( typeof parsed === "string" || typeof parsed === "number" || typeof parsed === "boolean" ) { return [ parsed, ctx.createOperationLog({ operation: ctx.operationTypes.jsonParseResult, args: { json: jsonStringValueAndTrackingValueArray }, result: parsed, runtimeArgs: { isPrimitive: true, charIndexAdjustment: typeof parsed === "string" ? 1 /* account for quote sign */ : 0 }, loc: logData.loc }) ]; } traverseObject(parsed, (keyPath, value, key, obj) => { const trackingValue = ctx.createOperationLog({ operation: ctx.operationTypes.jsonParseResult, args: { json: jsonStringValueAndTrackingValueArray }, result: value, runtimeArgs: { keyPath: keyPath, isKey: false }, loc: logData.loc }); const nameTrackingValue = ctx.createOperationLog({ operation: ctx.operationTypes.jsonParseResult, args: { json: jsonStringValueAndTrackingValueArray }, result: key, runtimeArgs: { keyPath: keyPath, isKey: true }, loc: logData.loc }); ctx.trackObjectPropertyAssignment( obj, key, trackingValue, nameTrackingValue ); }); return [parsed, ctx.getEmptyTrackingInfo("JSON.parse result", logData.loc)]; } export const specialCasesWhereWeDontCallTheOriginalFunction: { [knownValueName: string]: (args: SpecialCaseArgs) => any; } = { [getShortKnownValueName("Promise.all")]: ({ extraState, ctx, object, fnArgValues, args, extraTrackingValues, logData, fn, context }) => { let ret = fn.apply(context[0], fnArgValues); return [ ret.then(r => { for (var i = 0; i < r.length; i++) { ctx.trackObjectPropertyAssignment( r, i, ctx.getPromiseResolutionTrackingValue(fnArgValues[0][i]) ); } return r; }), null ]; }, [getShortKnownValueName("Promise.prototype.then")]: ({ extraState, ctx, object, fnArgValues, args, extraTrackingValues, logData, fn, context }) => { let promise = context[0]; let originalThenHandler = fnArgValues[0]; let thenRet; function handler() { const resTv = ctx.getPromiseResolutionTrackingValue(promise); if (resTv) { ctx.argTrackingInfo = [resTv]; } else if (ctx.lastReturnStatementResult) { // returned from async function ctx.argTrackingInfo = [ctx.lastReturnStatementResult[1]]; } //@ts-ignore thenRet = originalThenHandler.apply(this, arguments); const returnedThen = ctx.lastReturnStatementResult && ctx.lastReturnStatementResult[0]; if (returnedThen instanceof Promise) { returnedThen.then(() => { ctx.trackPromiseResolutionValue( ret, ctx.getPromiseResolutionTrackingValue(returnedThen) ); }); } return thenRet; } const ret = fn.apply(context[0], [handler]); return [ret, null]; }, [getShortKnownValueName("String.prototype.replace")]: ({ ctx, object, fnArgValues, args, extraTrackingValues, logData }) => { let index = 0; var ret = ctx.knownValues .getValue("String.prototype.replace") .call(object, fnArgValues[0], function() { var argumentsArray = Array.prototype.slice.apply(arguments, []); var match = argumentsArray[0]; var submatches = argumentsArray.slice(1, argumentsArray.length - 2); var offset = argumentsArray[argumentsArray.length - 2]; var string = argumentsArray[argumentsArray.length - 1]; var newArgsArray = [match, ...submatches, offset, string]; let replacement; let replacementParameter = fnArgValues[1]; if (["string", "number"].includes(typeof replacementParameter)) { let replacementValue = replacementParameter.toString(); replacementValue = replacementValue.replace( new RegExp( // I'm using fromCharCode because the string escaping for helperCode // doesn't work properly... if it's fixed we can just uses backtick directly "\\$([0-9]{1,2}|[$" + String.fromCharCode(96) /* backtick */ + "&'])", "g" ), function(dollarMatch, dollarSubmatch) { var submatchIndex = parseFloat(dollarSubmatch); if (!isNaN(submatchIndex)) { var submatch = submatches[submatchIndex - 1]; // $n is one-based, array is zero-based if (submatch === undefined) { var maxSubmatchIndex = countGroupsInRegExp(getFnArg(args, 0)); var submatchIsDefinedInRegExp = submatchIndex < maxSubmatchIndex; // handle cases like where part of the number isn't for the submatch // e.g. here the match is $1 and 234 should be kept // "".replace(/(a-z)/, "$1234") let submatchIndexStr = submatchIndex + ""; let firstDigit = parseFloat(submatchIndexStr[0]); if (submatchIndex >= 10) { return ( submatches[firstDigit - 1] + submatchIndexStr.slice(1) ); } if (submatchIsDefinedInRegExp) { submatch = ""; } else { submatch = "$" + dollarSubmatch; } } return submatch; } else if (dollarSubmatch === "&") { return match; } else { throw "not handled!!"; } } ); replacement = replacementValue; } else { throw Error("unhandled replacement param type"); } extraTrackingValues[getShortExtraArgName("replacement" + index)] = [ null, ctx.createOperationLog({ operation: ctx.operationTypes.stringReplacement, args: { value: getFnArg(args, 1) }, astArgs: {}, result: replacement, loc: logData.loc, runtimeArgs: { start: offset, end: offset + match.length } }) ]; index++; return replacement; }); var retT = null; return [ret, retT]; }, [getShortKnownValueName("JSON.parse")]: ({ fn, ctx, fnArgValues, args, logData }) => { const jsonString = fnArgValues[0]; const parsed = fn.call(JSON, jsonString); var [ret, retT] = addJsonParseResultTrackingValues( parsed, jsonString, getFnArg(args, 0), { ctx, logData } ); return [ret, retT]; }, [getShortKnownValueName("require")]: ({ fn, ctx, fnArgValues, args, logData, context }) => { let ret = fn.apply(context, fnArgValues); let retT = ctx.getEmptyTrackingInfo("Required value", logData.loc); let path = fnArgValues[0]; if (path.endsWith(".json")) { console.log("required json", path); // Need to use fn (i.e. require) to resolve path relative to // the file that contains the raw code let absPath = fn.resolve(path); const jsonString = require("fs").readFileSync(absPath, "utf-8"); [ret, retT] = addJsonParseResultTrackingValues( ret, jsonString, [ jsonString, ctx.createOperationLog({ operation: "fileContent", args: {}, loc: logData.loc, runtimeArgs: { path: path, readType: "requireJson" }, result: jsonString }) ], { ctx, logData } ); } return [ret, retT]; }, [getShortKnownValueName("fs.readFileSync")]: ({ fn, ctx, fnArgValues, args, logData, context }) => { const jsonString = fnArgValues[0]; const parsed = fn.call(JSON, jsonString); var ret, retT; const path = eval('require("path")'); let filePath = fnArgValues[0]; let encodingArg = fnArgValues[1]; const cwd = eval("process.cwd()"); if (typeof encodingArg === "string" && filePath.endsWith(".js")) { if (!filePath.startsWith("/")) { filePath = path.resolve(cwd, filePath); } filePath = filePath.replace( global["fromJSNodeOutPath"], global["fromJSNodeSourcePath"] ); console.log("Will read", filePath); } ret = fn.apply(context, [filePath, ...fnArgValues.slice(1)]); retT = ctx.createOperationLog({ operation: ctx.operationTypes.readFileSyncResult, args: { // json: getFnArg(args, 0) }, result: ret, runtimeArgs: { filePath, absPath: filePath.startsWith("/") ? filePath : path.resolve(cwd, filePath) }, loc: logData.loc }); return [ret, retT]; }, [getShortKnownValueName("fs.writeFileSync")]: writeFile, [getShortKnownValueName("fs.writeFile")]: writeFile }; // add tracking values to returned objects export const specialValuesForPostprocessing: { [knownValueName: string]: (args: SpecialCaseArgs) => any; } = { [getShortKnownValueName("HTMLElement.prototype.getClientRects")]: ({ object, ctx, logData, fnArgValues, ret, context, fnArgTrackingValues }) => { for (var i = 0; i < ret.length; i++) { const rect = ret[i]; const properties = [ "width", "height", "x", "y", "top", "bottom", "left", "right" ]; properties.forEach(prop => { ctx.trackObjectPropertyAssignment( rect, prop, ctx.createOperationLog({ operation: ctx.operationTypes.genericOperation, runtimeArgs: { name: "clientRect." + prop }, args: {}, loc: logData.loc, result: rect[prop] }) ); }); } }, [getShortKnownValueName("Promise.resolve")]: ({ object, ctx, logData, fnArgValues, ret, context, fnArgTrackingValues }) => { let promise = ret; ctx.trackPromiseResolutionValue(promise, fnArgTrackingValues[0]); }, [getShortKnownValueName("String.prototype.match")]: ({ object, ctx, logData, fnArgValues, ret, context }) => { ctx = <ExecContext>ctx; if (!Array.isArray(ret)) { return; } let regExp = fnArgValues[0]; if (!(regExp instanceof RegExp)) { consoleLog("non regexp match param, is this possible?"); return; } // this will break if inspected code depends on state regExp = cloneRegExp(regExp); let matches: any[] = []; var match; while ((match = regExp.exec(object)) != null) { matches.push(match); if (!regExp.global) { // break because otherwise exec will start over at beginning of the string break; } } if (!regExp.global) { // non global regexp has group match results: // /(a)(b)/.exec("abc") => ["ab", "a", "b"], index 0 let newMatches: any[] = []; let index = matches[0].index; let fullMatch = matches[0][0]; let fullMatchRemaining = fullMatch; newMatches.push({ index: index }); let charsRemovedFromFullMatch = 0; for (var i = 1; i < matches[0].length; i++) { let matchString = matches[0][i]; if (matchString === undefined) { newMatches.push(undefined); continue; } // This can be inaccurate but better than nothing let indexOffset = fullMatchRemaining.indexOf(matchString); if (indexOffset === -1) { debugger; } newMatches.push({ index: index + indexOffset + charsRemovedFromFullMatch }); // cut down match against which we do indexOf(), since we know // a single location can't get double matched // (maybe it could with nested regexp groups but let's not worry about that for now) let charsToRemove = 0; if (!regExpContainsNestedGroup(regExp)) { // nested groups means there can be repetition charsToRemove = indexOffset + matchString.length; } charsRemovedFromFullMatch += charsToRemove; fullMatchRemaining = fullMatchRemaining.slice(charsToRemove); } matches = newMatches; } if (matches.length < ret.length) { debugger; } ret.forEach((item, i) => { if (matches[i] === undefined) { return; } ctx.trackObjectPropertyAssignment( ret, i.toString(), ctx.createOperationLog({ operation: ctx.operationTypes.matchResult, args: { input: context }, result: item, astArgs: {}, runtimeArgs: { matchIndex: matches[i].index }, loc: logData.loc }), ctx.createArrayIndexOperationLog(i, logData.loc) ); }); }, [getShortKnownValueName("RegExp.prototype.exec")]: ({ object, ctx, logData, fnArgValues, ret, context, fnArgTrackingValues }) => { ctx = <ExecContext>ctx; const regExp = object; if (!ret) { return; } if (regExp.global) { ctx.trackObjectPropertyAssignment( ret, 0, ctx.createOperationLog({ operation: ctx.operationTypes.execResult, args: { string: [fnArgValues[0], fnArgTrackingValues[0]] }, result: ret, astArgs: {}, runtimeArgs: { matchIndex: ret.index }, loc: logData.loc }) ); } else { for (var i = 1; i < ret.length + 1; i++) { ctx.trackObjectPropertyAssignment( ret, i, ctx.createOperationLog({ operation: ctx.operationTypes.execResult, args: { string: [fnArgValues[0], fnArgTrackingValues[0]] }, result: ret, astArgs: {}, runtimeArgs: { // will give false results sometimes, but good enough matchIndex: fnArgValues[0].indexOf(ret[i]) }, loc: logData.loc }) ); } } }, [getShortKnownValueName("String.prototype.split")]: ({ object, fnArgTrackingValues, ctx, logData, fnArgValues, ret, context }) => { ctx = <ExecContext>ctx; const str = object; const strT = context[1]; const array = ret; if (!Array.isArray(ret)) { // can happen if separator is something like {[Symbol.split]: fn} return; } // TODO: properly track indices where string came from // I thought I could do that by just capturing the string // and the separator, but the separator can also be a regexp ret.forEach((item, i) => { ctx.trackObjectPropertyAssignment( array, i.toString(), ctx.createOperationLog({ operation: ctx.operationTypes.splitResult, args: { string: [str, strT], separator: [fnArgValues[0], fnArgTrackingValues[0]] }, runtimeArgs: { splitResultIndex: i }, result: item, astArgs: {}, loc: logData.loc }), ctx.createArrayIndexOperationLog(i, logData.loc) ); }); }, [getShortKnownValueName("Array.prototype.push")]: ({ object, fnArgTrackingValues, ctx, logData }) => { const arrayLengthBeforePush = object.length - fnArgTrackingValues.length; fnArgTrackingValues.forEach((arg, i) => { const arrayIndex = arrayLengthBeforePush + i; ctx.trackObjectPropertyAssignment( object, arrayIndex, arg, ctx.createArrayIndexOperationLog(arrayIndex, logData.loc) ); }); return fnArgTrackingValues[fnArgTrackingValues.length - 1]; }, [getShortKnownValueName("Array.prototype.pop")]: ({ extraState }) => { return extraState.poppedValueTrackingValue; }, [getShortKnownValueName("Object.keys")]: ({ ctx, logData, fnArgValues, ret, retT }) => { ret.forEach((key, i) => { const trackingValue = ctx.getObjectPropertyNameTrackingValue( fnArgValues[0], key ); const nameTrackingValue = ctx.createArrayIndexOperationLog( i, logData.loc ); ctx.trackObjectPropertyAssignment( ret, i, trackingValue, nameTrackingValue ); }); return retT; }, [getShortKnownValueName("Object.entries")]: ({ ctx, logData, fnArgValues, ret, retT }) => { const obj = fnArgValues[0]; ret.forEach((entryArr, i) => { const [key, value] = entryArr; const valueTv = ctx.getObjectPropertyTrackingValue(obj, key); const keyTv = ctx.getObjectPropertyNameTrackingValue(obj, key); ctx.trackObjectPropertyAssignment(entryArr, 1, valueTv); ctx.trackObjectPropertyAssignment(entryArr, 0, keyTv); }); return retT; }, [getShortKnownValueName("Object.assign")]: ({ ctx, logData, fnArgValues, fnArgTrackingValues }) => { ctx = <ExecContext>ctx; const target = fnArgValues[0]; const sources = fnArgValues.slice(1); sources.forEach((source, sourceIndex) => { if (!source || typeof source !== "object") { return; } Object.keys(source).forEach(key => { const valueTrackingValue = ctx.createOperationLog({ operation: ctx.operationTypes.objectAssignResult, args: { sourceObject: [source, fnArgTrackingValues[sourceIndex + 1]], value: [null, ctx.getObjectPropertyTrackingValue(source, key)], call: [null, logData.index] }, result: source[key], astArgs: {}, loc: logData.loc }); const nameTrackingValue = ctx.createOperationLog({ operation: ctx.operationTypes.objectAssignResult, args: { value: [null, ctx.getObjectPropertyNameTrackingValue(source, key)], call: [null, logData.index] }, result: key, astArgs: {}, loc: logData.loc }); ctx.trackObjectPropertyAssignment( target, key, valueTrackingValue, nameTrackingValue ); }); }); }, [getShortKnownValueName("Array.prototype.shift")]: ({ object, extraState, ctx }) => { // Note: O(n) is not very efficient... const array = object; for (var i = 0; i < array.length; i++) { ctx.trackObjectPropertyAssignment( array, i.toString(), ctx.getObjectPropertyTrackingValue(array, i + 1), ctx.getObjectPropertyNameTrackingValue(array, i + 1) ); } return extraState.shiftedTrackingValue; }, [getShortKnownValueName("Array.prototype.unshift")]: ({ object, extraState, ctx, fnArgTrackingValues, fnArgValues }) => { // Note: O(n) is not very efficient... const array = object; const unshiftedItems = fnArgValues; for (let i = unshiftedItems.length; i < array.length; i++) { let iBeforeUnshift = i - unshiftedItems.length; ctx.trackObjectPropertyAssignment( array, i.toString(), ctx.getObjectPropertyTrackingValue(array, iBeforeUnshift), ctx.getObjectPropertyNameTrackingValue(array, iBeforeUnshift) ); } for (let i = 0; i < unshiftedItems.length; i++) { ctx.trackObjectPropertyAssignment(array, i, fnArgTrackingValues[i], null); } return extraState.shiftedTrackingValue; }, [getShortKnownValueName("Array.prototype.slice")]: ({ object, ctx, logData, fnArgValues, ret }) => { ctx = <ExecContext>ctx; const resultArray = ret; const inputArray = object; let startIndex, endIndex; if (fnArgValues.length === 0) { startIndex = 0; endIndex = resultArray.length; } else { startIndex = fnArgValues[0]; if (startIndex < 0) { startIndex = inputArray.length + startIndex; } endIndex = fnArgValues[0]; if (endIndex < 0) { endIndex = inputArray.length + endIndex; } } function makeTrackingValue(result, valueTv) { return ctx.createOperationLog({ operation: ctx.operationTypes.arraySlice, args: { value: [ null, valueTv || ctx.getEmptyTrackingInfo( "Unknown Array.prototype.slice value", logData.loc ) ], call: [null, logData.index] }, result: result, astArgs: {}, loc: logData.loc }); } resultArray.forEach((item, i) => { // todo: create slice call action const originalIndex = i + startIndex; ctx.trackObjectPropertyAssignment( resultArray, i.toString(), makeTrackingValue( item, ctx.getObjectPropertyTrackingValue( inputArray, originalIndex.toString() ) ), makeTrackingValue( i, ctx.getObjectPropertyNameTrackingValue( inputArray, originalIndex.toString() ) ) ); }); }, [getShortKnownValueName("Array.prototype.splice")]: ({ object, ctx, logData, fnArgValues, ret }) => { ctx = <ExecContext>ctx; const resultArray = ret; const inputArray = object; let startIndex, deleteCount; if (fnArgValues.length >= 2) { startIndex = fnArgValues[0]; deleteCount = fnArgValues[1]; } resultArray.forEach((value, i) => { const originalIndex = i + startIndex; const tv = ctx.getObjectPropertyTrackingValue( inputArray, originalIndex.toString() ); ctx.trackObjectPropertyAssignment( resultArray, i.toString(), ctx.createOperationLog({ operation: ctx.operationTypes.arraySplice, args: { value: [null, tv], call: [null, logData.index] }, result: value, astArgs: {}, loc: logData.loc }) ); }); // if (fnArgValues.length === 0) { // startIndex = 0; // endIndex = resultArray.length; // } else { // startIndex = fnArgValues[0]; // if (startIndex < 0) { // startIndex = inputArray.length + startIndex; // } // endIndex = fnArgValues[0]; // if (endIndex < 0) { // endIndex = inputArray.length + endIndex; // } // } }, [getShortKnownValueName("Array.prototype.join")]: ({ object, fnArgTrackingValues, ctx, logData, retT, extraTrackingValues }) => { for (var i = 0; i < object.length; i++) { let arrayValueTrackingValue = ctx.getObjectPropertyTrackingValue( object, i ); if (!arrayValueTrackingValue) { arrayValueTrackingValue = ctx.getEmptyTrackingInfo( "Unknown Array Join Value", logData.loc ); } extraTrackingValues["arrayValue" + i] = [ null, // not needed, avoid object[i] lookup which may have side effects arrayValueTrackingValue ]; } if (fnArgTrackingValues[0]) { extraTrackingValues["separator"] = [null, fnArgTrackingValues[0]]; } else { extraTrackingValues["separator"] = [ null, ctx.createOperationLog({ operation: ctx.operationTypes.defaultArrayJoinSeparator, args: {}, astArgs: {}, result: ",", loc: logData.loc }) ]; } return retT; }, [getShortKnownValueName("Array.prototype.concat")]: ({ object, fnArgTrackingValues, ctx, logData, fnArgValues, ret }) => { const concatValues = [object, ...fnArgValues]; let i = 0; concatValues.forEach((concatValue, valueIndex) => { function trackProp(i, value, trackingValue) { ctx.trackObjectPropertyAssignment( ret, i.toString(), ctx.createOperationLog({ operation: ctx.operationTypes.arrayConcat, args: { value: [null, trackingValue] }, result: value, loc: logData.loc }), ctx.createArrayIndexOperationLog(i, logData.loc) ); } if (Array.isArray(concatValue)) { concatValue.forEach((arrayValue, indexInOriginalArray) => { trackProp( i, arrayValue, ctx.getObjectPropertyTrackingValue( concatValue, indexInOriginalArray.toString() ) ); i++; }); } else { trackProp(i, concatValue, fnArgTrackingValues[valueIndex - 1]); i++; } }); }, [getShortKnownValueName("Array.prototype.map")]: ({ extraState, ret, ctx, logData }) => { const { mapResultTrackingValues } = extraState; mapResultTrackingValues.forEach((tv, i) => { ctx.trackObjectPropertyAssignment( ret, i.toString(), mapResultTrackingValues[i], ctx.createArrayIndexOperationLog(i, logData.loc) ); }); }, [getShortKnownValueName("Array.prototype.reduce")]: ({ extraState }) => { return extraState.reduceResultTrackingValue; }, [getShortKnownValueName("Array.prototype.filter")]: ({ extraState, ctx, ret, object, logData }) => { let resultArrayIndex = 0; object.forEach(function(originalArrayItem, originalArrayIndex) { if (extraState.filterResults[originalArrayIndex]) { ctx.trackObjectPropertyAssignment( ret, resultArrayIndex, ctx.getObjectPropertyTrackingValue(object, originalArrayIndex), ctx.createArrayIndexOperationLog(resultArrayIndex, logData.loc) ); resultArrayIndex++; } }); }, [getShortKnownValueName("document.createElement")]: ({ fnArgTrackingValues, ret }) => { addOriginInfoToCreatedElement( ret, fnArgTrackingValues[0], "document.createElement" ); }, [getShortKnownValueName("document.createTextNode")]: ({ fnArgTrackingValues, ret }) => { addElOrigin(ret, "textValue", { trackingValue: fnArgTrackingValues[0] }); }, [getShortKnownValueName("document.createComment")]: ({ fnArgTrackingValues, ret }) => { addElOrigin(ret, "textValue", { trackingValue: fnArgTrackingValues[0] }); }, [getShortKnownValueName("HTMLElement.prototype.cloneNode")]: ({ ret, object, fnArgTrackingValues, fnArgValues }) => { const isDeep = !!fnArgValues[0]; processClonedNode(ret, object, { isDeep }); }, [getShortKnownValueName("document.importNode")]: ({ ret, object, fnArgTrackingValues, fnArgValues }) => { const importedNode = fnArgValues[0]; const isDeep = !!fnArgValues[1]; processClonedNode(ret, importedNode, { isDeep }); }, [getShortKnownValueName("HTMLElement.prototype.setAttribute")]: ({ object, fnArgTrackingValues, fnArgValues }) => { const [attrNameArg, attrValueArg] = fnArgTrackingValues; let attrName = fnArgValues[0]; addElAttributeNameOrigin(object, attrName, { trackingValue: attrNameArg }); addElAttributeValueOrigin(object, attrName, { trackingValue: attrValueArg }); }, [getShortKnownValueName("HTMLElement.prototype.insertAdjacentHTML")]: ({ object, fnArgTrackingValues, fnArgValues }) => { const position = fnArgValues[0].toLowerCase(); if (position !== "afterbegin") { consoleLog("Not tracking insertAdjacentHTML at", position); return; } var el = object; const html = fnArgValues[1]; const helperDiv = document.createElement("div"); helperDiv.innerHTML = html; const nodeAddedCount = helperDiv.childNodes.length; var childNodesBefore = Array.from(el.childNodes).slice(nodeAddedCount); mapInnerHTMLAssignment( el, [html, fnArgTrackingValues[1]], "insertAdjacentHTML", undefined, undefined, childNodesBefore ); }, [getShortKnownValueName("DOMParser.prototype.parseFromString")]: ({ fnArgValues, fnArgTrackingValues, ret }) => { const html = fnArgValues[0]; const htmlArg = [html, fnArgTrackingValues[0]]; const doc = ret; mapPageHtml(doc, html, fnArgTrackingValues[0], "parseFromString"); }, [getShortKnownValueName("JSON.stringify")]: ({ fnArgTrackingValues, ctx, fnArgValues, ret, runtimeArgs }: SpecialCaseArgs) => { const stringifiedObject = fnArgValues[0]; const jsonIndexToTrackingValue = {}; runtimeArgs.jsonIndexToTrackingValue = jsonIndexToTrackingValue; const jsonString = ret; if (!jsonString) { // e.g. return value can be undefined when pass a class into JSON.stringify return; } const objectAfterParse = JSON.parse(jsonString); if (["boolean", "string", "number"].includes(typeof stringifiedObject)) { jsonIndexToTrackingValue[0] = fnArgTrackingValues[0]; } else { const ast = jsonToAst(jsonString); traverseObject( stringifiedObject, (keyPath, value, key, traversedObject) => { const keyExistsInJSON = get(objectAfterParse, keyPath) !== undefined; if (!keyExistsInJSON) { // this property won't be included in the JSON string return; } if (!Array.isArray(traversedObject)) { const jsonKeyIndex = getJSONPathOffset( jsonString, ast, keyPath, true ); jsonIndexToTrackingValue[ jsonKeyIndex ] = ctx.getObjectPropertyNameTrackingValue(traversedObject, key); } let jsonValueIndex = getJSONPathOffset( jsonString, ast, keyPath, false ); if (jsonString[jsonValueIndex] === '"') { jsonValueIndex++; } jsonIndexToTrackingValue[ jsonValueIndex ] = ctx.getObjectPropertyTrackingValue(traversedObject, key); } ); } } }; export function traverseKnownFunction({ operationLog, knownFunction, charIndex }) { switch (knownFunction) { case "String.prototype.toString": return { operationLog: operationLog.args.context, charIndex }; case "String.prototype.slice": return { operationLog: operationLog.args.context, charIndex: charIndex + operationLog.args.arg0.result.primitive }; case "String.prototype.substr": const { context, arg0: start, arg1: length } = operationLog.args; let startValue = parseFloat(start.result.primitive); if (startValue < 0) { startValue = context.result.length + startValue; } return { operationLog: context, charIndex: charIndex + startValue }; case "String.prototype.substring": const parentStr = operationLog.args.context; let startIndex = parseFloat(operationLog.args.arg0.result.primitive); let endIndex; if (operationLog.args.arg1) { endIndex = parseFloat(operationLog.args.arg1.result.primitive); } else { endIndex = parentStr.result.primitive.length; } if (startIndex > endIndex) { let tmp = endIndex; endIndex = startIndex; startIndex = tmp; } return { operationLog: parentStr, charIndex: charIndex + startIndex }; case "encodeURIComponent": var unencodedString: string = operationLog.args.arg0.result.primitive.toString(); var encodedString: string = operationLog.result.primitive!.toString(); const map = new ValueMapV2(unencodedString); for (var i = 0; i < unencodedString.length; i++) { var unencodedChar = unencodedString[i]; var encodedChar = encodeURIComponent(unencodedChar); map.push(i, i + 1, operationLog.args.arg0, encodedChar, true); } return map.getAtResultIndex(charIndex, true); case "decodeURIComponent": var encodedString: string = operationLog.args.arg0.result.primitive.toString(); var unencodedString: string = operationLog.result.primitive!.toString(); const m = new ValueMapV2(encodedString); let extraCharsTotal = 0; for (var i = 0; i < unencodedString.length; i++) { const unencodedChar = unencodedString[i]; const encodedChar = encodeURIComponent(unencodedChar); const extraCharsHere = encodedChar.length - 1; m.push( i + extraCharsTotal, i + extraCharsTotal + extraCharsHere, operationLog.args.arg0, unencodedChar, true ); extraCharsTotal += extraCharsHere; } return m.getAtResultIndex(charIndex, true); case "String.prototype.trim": let str = operationLog.args.context.result.primitive; let whitespaceAtStart = str.match(/^\s*/)[0].length; return { operationLog: operationLog.args.context, charIndex: charIndex + whitespaceAtStart }; case "Array.prototype.pop": return { operationLog: operationLog.extraArgs.returnValue, charIndex }; case "Array.prototype.shift": return { operationLog: operationLog.extraArgs.returnValue, charIndex }; case "Array.prototype.reduce": return { operationLog: operationLog.extraArgs.returnValue, charIndex: charIndex }; case "Array.prototype.join": const parts: any[] = []; let partIndex = 0; let arrayValue; while ( ((arrayValue = operationLog.extraArgs["arrayValue" + partIndex]), arrayValue !== undefined) ) { let joinParameter = arrayValue.result.primitive + ""; if ([null, undefined].includes(arrayValue.result.primitive)) { joinParameter = ""; } parts.push([joinParameter, arrayValue]); parts.push([ operationLog.extraArgs.separator.result.primitive + "", operationLog.extraArgs.separator ]); partIndex++; } parts.pop(); // take off last separator const mapping = new HtmlToOperationLogMapping(parts); const match = mapping.getOriginAtCharacterIndex(charIndex); return { charIndex: match.charIndex, operationLog: match.origin }; case "String.prototype.replace": // I'm not 100% confident about this code, but it works for now let matchingReplacement = null; let totalCharCountDeltaBeforeMatch = 0; const replacements: any[] = []; eachReplacement(operationLog.extraArgs, replacement => { replacements.push(replacement); }); const subjectOperationLog = operationLog.args.context; if (replacements.length === 0) { return { operationLog: subjectOperationLog, charIndex: charIndex }; } const valueMap = new ValueMapV2(subjectOperationLog.result.primitive); let currentIndexInSubjectString = 0; replacements.forEach(replacement => { const { start, end } = replacement.runtimeArgs; let from = currentIndexInSubjectString; let to = start; valueMap.push( from, to, subjectOperationLog, subjectOperationLog.result.primitive.slice(from, to), true ); valueMap.push(start, end, replacement, replacement.result.primitive); currentIndexInSubjectString = end; }); valueMap.push( currentIndexInSubjectString, subjectOperationLog.result.primitive.length, subjectOperationLog, subjectOperationLog.result.primitive.slice(currentIndexInSubjectString), true ); // valueMap.__debugPrint() return valueMap.getAtResultIndex(charIndex); function eachReplacement(extraArgs, callback) { var index = 0; while (extraArgs["replacement" + index]) { callback(extraArgs["replacement" + index]); index++; } } case "JSON.stringify": const { jsonIndexToTrackingValue } = operationLog.runtimeArgs; // not efficient, but it works let closestLoc: any = null; Object.entries(jsonIndexToTrackingValue).forEach(([index, tv]: any) => { index = parseFloat(index); if ( charIndex - index >= 0 && (!closestLoc || closestLoc.index - index < charIndex - index) ) { closestLoc = { index, tv }; } }); if (!closestLoc) { return null; } return { operationLog: closestLoc.tv, charIndex: charIndex - closestLoc.index }; case "Number.prototype.toString": case "Number.prototype.toFixed": return { operationLog: operationLog.args.context, charIndex: charIndex }; case "Number.prototype.toPrecision": return { operationLog: operationLog.args.context, charIndex: charIndex }; case "Number.prototype.constructor": return { operationLog: operationLog.args.arg0, charIndex }; case "String.prototype.constructor": return { operationLog: operationLog.args.arg0, charIndex }; case "String.prototype.charAt": return { operationLog: operationLog.args.context, charIndex: charIndex + operationLog.args.arg0.result.primitive }; case "Math.round": return { operationLog: operationLog.args.arg0, charIndex }; case "Math.floor": return { operationLog: operationLog.args.arg0, charIndex }; case "Math.min": let smallestValue = Number.POSITIVE_INFINITY; let smallestOperationLog = null; allArgs(operationLog, arg => { if (arg.result.primitive < smallestValue) { smallestValue = arg.result.primitive; smallestOperationLog = arg; } }); return { operationLog: smallestOperationLog, charIndex }; case "Math.max": let largestValue = Number.NEGATIVE_INFINITY; let largestOperationLog = null; allArgs(operationLog, arg => { if (arg.result.primitive > largestValue) { largestValue = arg.result.primitive; largestOperationLog = arg; } }); return { operationLog: largestOperationLog, charIndex }; case "Date.prototype.getTime": case "Date.prototype.valueOf": case "String.prototype.toLowerCase": case "String.prototype.toUpperCase": return { operationLog: operationLog.args.context, charIndex }; case "Date.prototype.constructor": case "Math.abs": case "parseFloat": return { operationLog: operationLog.args.arg0, charIndex }; default: return { operationLog: operationLog.extraArgs.returnValue, charIndex: charIndex }; } } export interface FnProcessorArgs { extraState: any; setArgValuesForApply: (vals: any) => void; fnArgValues: any[]; getFnArgForApply: (argIndex: any) => any; setFnArgForApply: (argIndex: any, argValue: any) => void; setFnArgTrackingValue: (index: any, val: any) => void; ctx: ExecContext; setContext: (c: any) => void; context: any; fnArgTrackingValues: any[]; logData: any; object: any; setFunction: any; fnArgValuesAtInvocation: any[]; fnArgTrackingValuesAtInvocation: any[]; } export const knownFnProcessors = { [getShortKnownValueName("EventEmitter.prototype.emit")]: ({ extraState, setArgValuesForApply, fnArgValues, getFnArgForApply, setFnArgForApply, ctx, setContext, fnArgTrackingValues, logData, fnArgTrackingValuesAtInvocation, setFnArgTrackingValue }: FnProcessorArgs) => { // If you call `.emit("eventName", "data")` then the `.on("eventName")` callback // is called with "data" as it's first argument setFnArgTrackingValue(0, fnArgTrackingValuesAtInvocation[1]); }, [getShortKnownValueName("Array.prototype.map")]: ({ extraState, setArgValuesForApply, fnArgValues, getFnArgForApply, setFnArgForApply, ctx, setContext, fnArgTrackingValues, logData }: FnProcessorArgs) => { extraState.mapResultTrackingValues = []; setArgValuesForApply(fnArgValues.slice()); const originalMappingFunction = getFnArgForApply(0); setFnArgForApply(0, function(this: any, item, index, array) { const itemTrackingInfo = ctx.getObjectPropertyTrackingValue( array, index.toString() ); if (fnArgValues.length > 1) { setContext([fnArgValues[1], fnArgTrackingValues[1]]); } else { setContext([this, null]); } const ret = ctx.global[doOperation]( "callExpression", [ [originalMappingFunction, null], [this, null], [ [item, itemTrackingInfo, null], [index, null], [array, null] ] ], {}, logData.loc ); extraState.mapResultTrackingValues.push(ctx.lastOpTrackingResult); return ret; }); }, [getShortKnownValueName("Array.prototype.reduce")]: ({ extraState, getFnArgForApply, setFnArgForApply, ctx, fnArgTrackingValues, logData, object }: FnProcessorArgs) => { if (fnArgTrackingValues.length > 1) { extraState.reduceResultTrackingValue = fnArgTrackingValues[1]; } else { // "If no initial value is supplied, the first element in the array will be used." // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/reduce extraState.reduceResultTrackingValue = ctx.getObjectPropertyTrackingValue( object, 0 ); } const originalReduceFunction = getFnArgForApply(0); setFnArgForApply(0, function( this: any, previousRet, param, currentIndex, array ) { let paramTrackingValue = ctx.getObjectPropertyTrackingValue( array, currentIndex.toString() ); const ret = ctx.global[doOperation]( "callExpression", [ [originalReduceFunction, null], [this, null], [ [previousRet, extraState.reduceResultTrackingValue], [param, paramTrackingValue], [currentIndex, null], [array, null] ] ], {}, logData.loc ); extraState.reduceResultTrackingValue = ctx.lastOpTrackingResult; return ret; }); }, [getShortKnownValueName("Array.prototype.filter")]: ({ extraState, getFnArgForApply, setFnArgForApply, ctx, logData }: FnProcessorArgs) => { extraState.filterResults = []; const originalFilterFunction = getFnArgForApply(0); setFnArgForApply(0, function(this: any, element, index, array) { const ret = ctx.global[doOperation]( "callExpression", [ [originalFilterFunction, null], [this, null], [ [element, ctx.getObjectPropertyTrackingValue(array, index)], [index, null], [array, null] ] ], {}, logData.loc ); extraState.filterResults.push(ret); return ret; }); }, [getShortKnownValueName("Array.prototype.pop")]: ({ extraState, ctx, object }: FnProcessorArgs) => { extraState.poppedValueTrackingValue = null; if (object && object.length > 0) { extraState.poppedValueTrackingValue = ctx.getObjectPropertyTrackingValue( object, object.length - 1 ); } }, [getShortKnownValueName("Array.prototype.shift")]: ({ extraState, ctx, object }: FnProcessorArgs) => { extraState.shiftedTrackingValue = null; if (object && object.length > 0) { extraState.shiftedTrackingValue = ctx.getObjectPropertyTrackingValue( object, 0 ); } }, [getShortKnownValueName("Response.prototype.json")]: ({ setFunction, ctx, logData }: FnProcessorArgs) => { setFunction(function(this: Response) { const response: Response = this; let then = ctx.knownValues.getValue("Promise.prototype.then"); const p = ctx.knownValues .getValue("Response.prototype.text") .apply(response); return then.call(p, function(text) { if (text === '{"ok":true}') { return Promise.resolve(JSON.parse(text)); } console.log(response.url, ctx.global["__fetches"][response.url]); const t = ctx.createOperationLog({ operation: ctx.operationTypes.fetchResponse, args: { value: [text], fetchCall: ["(FetchCall)", ctx.global["__fetches"][response.url]] }, astArgs: {}, result: text, runtimeArgs: { url: response.url }, loc: logData.loc }); const obj = ctx.global[doOperation]( "callExpression", [[JSON.parse], [JSON], [[text, t]]], {} ); return Promise.resolve(obj); }); }); }, [getShortKnownValueName("fetch")]: ({ ctx, logData, fnArgValues, fnArgValuesAtInvocation }: FnProcessorArgs) => { // not super accurate but until there's a proper solution // let's pretend we can match the fetch call // to the response value via the url ctx.global["__fetches"] = ctx.global["__fetches"] || {}; let url = typeof fnArgValuesAtInvocation[0] === "string" ? fnArgValuesAtInvocation[0] : fnArgValuesAtInvocation[0].url; console.log({ url, full: getFullUrl(url) }); url = getFullUrl(url); ctx.global["__fetches"][url] = logData.index; }, [getShortKnownValueName("XMLHttpRequest.prototype.open")]: ({ ctx, logData, fnArgValues }: FnProcessorArgs) => { ctx.global["__xmlHttpRequests"] = ctx.global["__xmlHttpRequests"] || {}; let url = fnArgValues[1]; url = getFullUrl(url); ctx.global["__xmlHttpRequests"][url] = logData.index; } }; function allArgs(operationLog, fn) { let i = 0; while ("arg" + i in operationLog.args) { fn(operationLog.args["arg" + i]); i++; } } export const newExpressionPostProcessors = { // Would maybe be better to store the constructor args in some way and then // link to that when the property is accessed?? Then during traversal // you could see that URL.proto.href is accessed, reference the constructor data // and then link to it? // But this works for now. [getShortKnownValueName("URL")]: ({ ctx, ret, fnArgTrackingValues, logData }) => { let urlTv = fnArgTrackingValues[0]; ctx.trackObjectPropertyAssignment(ret, "href", urlTv); ctx.trackObjectPropertyAssignment( ret, "pathname", ctx.createOperationLog({ operation: ctx.operationTypes.genericOperation, runtimeArgs: { name: "URL.pathname", next: urlTv, adjustCharIndex: ret.href.indexOf(ret.pathname) }, args: {}, loc: logData.loc, result: ret.pathname }) ); ctx.trackObjectPropertyAssignment(ret, "origin", urlTv); } };
the_stack
import Adapt, { AdaptMountedElement, AdaptMountedPrimitiveElement, ChangeType, Group, PluginOptions, PrimitiveComponent, Waiting, } from "@adpt/core"; import should from "should"; import { createMockLogger, k8sutils, MockLogger } from "@adpt/testutils"; import { sleep } from "@adpt/utils"; import { ActionPlugin, createActionPlugin } from "../../src/action"; import { ClusterInfo, DaemonSet, daemonSetResourceInfo, K8sContainer, Kubeconfig, Pod, Resource, resourceElementToName, } from "../../src/k8s"; import { labelKey } from "../../src/k8s/manifest_support"; import { mkInstance } from "../run_minikube"; import { act, checkNoActions, doBuild, randomName } from "../testlib"; import { forceK8sObserverSchemaLoad, K8sTestStatusType } from "./testlib"; const { deleteAll, getAll } = k8sutils; // tslint:disable-next-line: no-object-literal-type-assertion const dummyConfig = {} as ClusterInfo; class DummyComponent extends PrimitiveComponent {} describe("k8s DaemonSet Component Tests", () => { it("Should Instantiate and build DaemonSet", async () => { const ds = <DaemonSet key="test" config={dummyConfig}> <Pod isTemplate> <K8sContainer name="onlyContainer" image="node:latest" /> </Pod> </DaemonSet>; should(ds).not.Undefined(); const result = await Adapt.build(ds, null); const dom = result.contents; if (dom == null) { throw should(dom).not.Null(); } should(dom.componentType).equal(Resource); should(dom.props.spec.selector).not.Undefined(); should(dom.props.spec.template).not.Undefined(); should(dom.props.spec.template.metadata).not.Undefined(); should(dom.props.spec.template.metadata.labels).not.Undefined(); }); it("Should respect user-set selector", async () => { const selector = { matchLabels: { foo: "bar" }}; const ds = <DaemonSet key="test" config={dummyConfig} selector={selector}> <Pod isTemplate> <K8sContainer name="onlyContainer" image="node:latest" /> </Pod> </DaemonSet>; should(ds).not.Undefined(); const result = await Adapt.build(ds, null); const dom = result.contents; if (dom == null) { throw should(dom).not.Null(); } should(dom.componentType).equal(Resource); should(dom.props.spec.selector).eql(selector); }); it("Should enforce single child Pod", async () => { const ds = <DaemonSet key="test" config={dummyConfig} selector={{ matchLabels: { test: "testDaemonSet" }}} > <Pod isTemplate metadata={{ labels: { test: "testDaemonSet" } }}> <K8sContainer name="onlyContainer" image="node:latest" /> </Pod> <Pod isTemplate> <K8sContainer name="onlyContainer" image="node:latest" /> </Pod> </DaemonSet>; const result = await Adapt.build(ds, null); should(result.messages).have.length(1); should(result.messages[0].content).match(/single Pod as a child/); should(result.messages[0].content).match(/found 2/); }); it("Should enforce only Pod children", async () => { const ds = <DaemonSet key="test" config={dummyConfig} selector={{ matchLabels: { test: "testDaemonSet" }}} > <DummyComponent /> </DaemonSet>; const result = await Adapt.build(ds, null); should(result.messages).have.length(1); should(result.messages[0].content).match(/must be a Pod/); }); it("Should require a Pod", async () => { const ds = <DaemonSet config={dummyConfig} selector={{}} />; const result = await Adapt.build(ds, null); should(result.messages).have.length(1); should(result.messages[0].content).match(/single Pod as a child/); should(result.messages[0].content).match(/found 0/); }); }); async function waitForDeployed(mountedOrig: AdaptMountedElement, dom: AdaptMountedElement, deployID: string) { let deployed: boolean | Waiting = false; do { const status = await mountedOrig.status<K8sTestStatusType>(); should(status.kind).equal("DaemonSet"); should(status.metadata.name).equal(resourceElementToName(dom, deployID)); should(status.metadata.annotations).containEql({ [labelKey("name")]: dom.id }); deployed = daemonSetResourceInfo.deployedWhen(status); if (deployed !== true) await sleep(1000); else return status; } while (1); } describe("k8s DaemonSet Operation Tests", function () { this.timeout(60 * 1000); let plugin: ActionPlugin; let logger: MockLogger; let options: PluginOptions; let kubeClusterInfo: ClusterInfo; let client: k8sutils.KubeClient; let deployID: string | undefined; before(async function () { this.timeout(mkInstance.setupTimeoutMs); this.slow(20 * 1000); kubeClusterInfo = { kubeconfig: await mkInstance.kubeconfig as Kubeconfig }; client = await mkInstance.client; forceK8sObserverSchemaLoad(); }); beforeEach(async () => { plugin = createActionPlugin(); logger = createMockLogger(); deployID = randomName("cloud-daemonset-op"); options = { dataDir: "/fake/datadir", deployID, logger, log: logger.info, }; }); afterEach(async function () { this.timeout(40 * 1000); const apiPrefix = "apis/apps/v1"; if (client) { await deleteAll("daemonsets", { client, deployID, apiPrefix }); } }); async function createDS(name: string): Promise<AdaptMountedPrimitiveElement | null> { const ds = <DaemonSet key={name} config={kubeClusterInfo}> <Pod isTemplate terminationGracePeriodSeconds={0}> <K8sContainer name="container" image="alpine:3.8" command={["sleep", "3s"]} /> </Pod> </DaemonSet>; const { mountedOrig, dom } = await doBuild(ds, { deployID }); await plugin.start(options); const obs = await plugin.observe(null, dom); const actions = plugin.analyze(null, dom, obs); should(actions).length(1); should(actions[0].type).equal(ChangeType.create); should(actions[0].detail).startWith("Creating DaemonSet"); should(actions[0].changes).have.length(1); should(actions[0].changes[0].type).equal(ChangeType.create); should(actions[0].changes[0].detail).startWith("Creating DaemonSet"); should(actions[0].changes[0].element.componentName).equal("Resource"); should(actions[0].changes[0].element.props.key).equal(name); if (!deployID) throw new Error(`Missing deployID?`); await act(actions); const daemonsets = await getAll("daemonsets", { client, deployID, apiPrefix: "apis/apps/v1"}); should(daemonsets).length(1); should(daemonsets[0].metadata.name).equal(resourceElementToName(dom, options.deployID)); if (mountedOrig === null) throw should(mountedOrig).not.Null(); const lastStatus = await waitForDeployed(mountedOrig, dom, deployID); const pods = await getAll("pods", { client, deployID }); if (pods.length !== 1) { // tslint:disable-next-line: no-console console.error("Daemonset created, but no pods: ", JSON.stringify(lastStatus)); should(pods).length(1); } should(pods[0].metadata.name) .startWith(resourceElementToName(dom, deployID)); await plugin.finish(); return dom; } it("Should create daemonset", async () => { await createDS("test"); }); it("Should modify daemonset", async () => { const oldDom = await createDS("test"); //5s sleep diff to cause modify vs. 3s sleep in createPod const command = ["sleep", "5s"]; const pod = <DaemonSet key="test" config={kubeClusterInfo}> <Pod isTemplate terminationGracePeriodSeconds={0}> <K8sContainer name="container" image="alpine:3.8" command={command} /> </Pod> </DaemonSet>; const { mountedOrig, dom } = await doBuild(pod, { deployID }); await plugin.start(options); const obs = await plugin.observe(oldDom, dom); const actions = plugin.analyze(oldDom, dom, obs); should(actions).length(1); should(actions[0].type).equal(ChangeType.modify); should(actions[0].detail).startWith("Updating DaemonSet"); should(actions[0].changes).have.length(1); should(actions[0].changes[0].type).equal(ChangeType.modify); should(actions[0].changes[0].detail).startWith("Updating DaemonSet"); should(actions[0].changes[0].element.componentName).equal("Resource"); should(actions[0].changes[0].element.props.key).equal("test"); if (!deployID) throw new Error(`Missing deployID?`); await act(actions); if (mountedOrig == null) should(mountedOrig).not.Null(); const lastStatus = await waitForDeployed(mountedOrig, dom, deployID); const pods = await getAll("pods", { client, deployID }); if (pods.length !== 1) { // tslint:disable-next-line: no-console console.error("Daemonset modified, but no pods: ", JSON.stringify(lastStatus, undefined, 2)); should(pods).length(1); } should(pods).length(1); should(pods[0].metadata.name) .startWith(resourceElementToName(dom, deployID)); should(pods[0].spec.containers).length(1); should(pods[0].spec.containers[0].command).eql(command); await plugin.finish(); }); it("Should leave daemonset alone", async () => { const oldDom = await createDS("test"); //No diff const command = ["sleep", "3s"]; const pod = <DaemonSet key="test" config={kubeClusterInfo}> <Pod isTemplate terminationGracePeriodSeconds={0}> <K8sContainer name="container" image="alpine:3.8" command={command} /> </Pod> </DaemonSet>; const { dom } = await doBuild(pod, { deployID }); await plugin.start(options); const obs = await plugin.observe(oldDom, dom); const actions = plugin.analyze(oldDom, dom, obs); checkNoActions(actions, dom); await plugin.finish(); }); it("Should delete daemonset", async () => { const oldDom = await createDS("test"); const { dom } = await doBuild(<Group />, { deployID }); await plugin.start(options); const obs = await plugin.observe(oldDom, dom); const actions = plugin.analyze(oldDom, dom, obs); should(actions.length).equal(1); should(actions[0].type).equal(ChangeType.delete); should(actions[0].detail).startWith("Deleting DaemonSet"); should(actions[0].changes).have.length(1); should(actions[0].changes[0].type).equal(ChangeType.delete); should(actions[0].changes[0].detail).startWith("Deleting DaemonSet"); should(actions[0].changes[0].element.componentName).equal("Resource"); should(actions[0].changes[0].element.props.key).equal("test"); if (!deployID) throw new Error(`Missing deployID?`); await act(actions); let pods: any[]; do { await sleep(1000); //Give pods time to terminate pods = await getAll("pods", { client, deployID }); if (pods.length !== 0) { should(pods.length).equal(1); } } while (pods.length !== 0); await plugin.finish(); }); });
the_stack
import type vscode from 'vscode'; import { Injectable, Optional, Autowired } from '@opensumi/di'; import { IRPCProtocol } from '@opensumi/ide-connection'; import { ILogger, Disposable, PreferenceService, IDisposable } from '@opensumi/ide-core-browser'; import { ITerminalApiService, ITerminalGroupViewService, ITerminalController, ITerminalInfo, ITerminalProcessExtHostProxy, IStartExtensionTerminalRequest, ITerminalDimensions, ITerminalDimensionsDto, ITerminalExternalLinkProvider, ITerminalClient, ITerminalLink, ITerminalProfileInternalService, } from '@opensumi/ide-terminal-next'; import { IEnvironmentVariableService, SerializableEnvironmentVariableCollection, EnvironmentVariableServiceToken, } from '@opensumi/ide-terminal-next/lib/common/environmentVariable'; import { deserializeEnvironmentVariableCollection } from '@opensumi/ide-terminal-next/lib/common/environmentVariable'; import { ITerminalProfileService } from '@opensumi/ide-terminal-next/lib/common/profile'; import { IMainThreadTerminal, IExtHostTerminal, ExtHostAPIIdentifier } from '../../../common/vscode'; import { IActivationEventService } from '../../types'; @Injectable({ multiple: true }) export class MainThreadTerminal implements IMainThreadTerminal { private readonly proxy: IExtHostTerminal; shortId2LongIdMap: Map<string, string> = new Map(); private readonly _terminalProcessProxies = new Map<string, ITerminalProcessExtHostProxy>(); private readonly _profileProviders = new Map<string, IDisposable>(); /** * A single shared terminal link provider for the exthost. When an ext registers a link * provider, this is registered with the terminal on the renderer side and all links are * provided through this, even from multiple ext link providers. Xterm should remove lower * priority intersecting links itself. */ private _linkProvider: IDisposable | undefined; @Autowired(EnvironmentVariableServiceToken) private environmentVariableService: IEnvironmentVariableService; @Autowired(ITerminalApiService) private terminalApi: ITerminalApiService; @Autowired(ITerminalController) private controller: ITerminalController; @Autowired(ITerminalProfileService) private profileService: ITerminalProfileService; @Autowired(ITerminalProfileInternalService) private profileInternalSerivce: ITerminalProfileInternalService; @Autowired(ITerminalGroupViewService) private terminalGroupViewService: ITerminalGroupViewService; @Autowired(PreferenceService) protected readonly preference: PreferenceService; @Autowired(IActivationEventService) protected readonly activationEventService: IActivationEventService; @Autowired(ILogger) protected readonly logger: ILogger; private disposable = new Disposable(); constructor(@Optional(IRPCProtocol) private rpcProtocol: IRPCProtocol) { this.proxy = this.rpcProtocol.getProxy(ExtHostAPIIdentifier.ExtHostTerminal); this.initData(); this.bindEvent(); } public dispose() { this.disposable.dispose(); } private bindEvent() { this.disposable.addDispose( this.terminalApi.onDidChangeActiveTerminal((id) => { this.proxy.$onDidChangeActiveTerminal(id); }), ); this.disposable.addDispose( this.terminalApi.onDidCloseTerminal((e) => { this.proxy.$onDidCloseTerminal(e); }), ); this.disposable.addDispose( this.terminalApi.onDidTerminalTitleChange((e) => { this.proxy.$onDidTerminalTitleChange(e.id, e.name); }), ); this.disposable.addDispose( this.terminalApi.onDidOpenTerminal((info: ITerminalInfo) => { const client = this.controller.clients.get(info.id); if (client) { client.addDispose( (client as any).xterm.raw.onData(() => { this.proxy.$acceptTerminalInteraction(info.id); }), ); } this.proxy.$onDidOpenTerminal(info); }), ); this.disposable.addDispose( this.controller.onInstanceRequestStartExtensionTerminal((e) => this._onRequestStartExtensionTerminal(e)), ); this.disposable.addDispose( this.profileService.onDidChangeAvailableProfiles(() => { this._updateDefaultProfile(); }), ); this.disposable.addDispose( this.profileService.onTerminalProfileResolved(async (id: string) => { await this.activationEventService.fireEvent(`onTerminalProfile:${id}`); }), ); } private initData() { const terminals = this.terminalApi.terminals; const infoList: ITerminalInfo[] = []; terminals.forEach((term) => { infoList.push({ id: term.id, name: term.name, isActive: term.isActive, }); }); this.proxy.$setTerminals(infoList); this._updateDefaultProfile(); } transform<T>(id: string, cb: (sessionId: string) => T): T { const sessionId = this.shortId2LongIdMap.get(id); return cb(sessionId || id); } $sendText(id: string, text: string, addNewLine?: boolean) { return this.transform(id, (sessionId) => { this.proxy.$acceptTerminalInteraction(sessionId); return this.terminalApi.sendText(sessionId, text, addNewLine); }); } $show(id: string, preserveFocus?: boolean) { return this.transform(id, (sessionId) => this.terminalApi.showTerm(sessionId, preserveFocus)); } $hide(id: string) { return this.transform(id, (sessionId) => this.terminalApi.hideTerm(sessionId)); } $dispose(id: string) { return this.transform(id, (sessionId) => this.terminalApi.removeTerm(sessionId)); } $getProcessId(id: string) { return this.transform(id, (sessionId) => this.terminalApi.getProcessId(sessionId)); } async $createTerminal(options: vscode.TerminalOptions, shortId: string): Promise<void> { await this.controller.ready.promise; const terminal = await this.terminalApi.createTerminal(options, shortId); if (!terminal) { // 应该要 throw Error this.logger.error(`Create Terminal ${shortId} fail.`); return; } this.shortId2LongIdMap.set(shortId, terminal.id); } private _onRequestStartExtensionTerminal(request: IStartExtensionTerminalRequest): void { const proxy = request.proxy; this._terminalProcessProxies.set(proxy.terminalId, proxy); // Note that onReisze is not being listened to here as it needs to fire when max dimensions // change, excluding the dimension override const initialDimensions: ITerminalDimensionsDto | undefined = request.cols && request.rows ? { columns: request.cols, rows: request.rows, } : undefined; this.proxy.$startExtensionTerminal(proxy.terminalId, initialDimensions).then(request.callback); proxy.onInput((data) => this.proxy.$acceptProcessInput(proxy.terminalId, data)); proxy.onShutdown((immediate) => this.proxy.$acceptProcessShutdown(proxy.terminalId, immediate)); proxy.onRequestCwd(() => this.proxy.$acceptProcessRequestCwd(proxy.terminalId)); proxy.onRequestInitialCwd(() => this.proxy.$acceptProcessRequestInitialCwd(proxy.terminalId)); } private _getTerminalProcess(id: string): ITerminalProcessExtHostProxy { return this.transform(id, (terminalId) => { const terminal = this._terminalProcessProxies.get(terminalId); if (!terminal) { throw new Error(`Unknown terminal: ${terminalId}`); } return terminal; }); } public $sendProcessTitle(id: string, title: string): void { return this.transform(id, (terminalId) => { const terminalWidgetInstance = this.terminalGroupViewService.getWidget(terminalId); if (terminalWidgetInstance) { terminalWidgetInstance.rename(title); this.proxy.$acceptTerminalTitleChange(terminalId, title); } }); } public $sendProcessData(terminalId: string, data: string): void { this._getTerminalProcess(terminalId).emitData(data); } public $sendProcessReady(terminalId: string, pid: number, cwd: string): void { this._getTerminalProcess(terminalId).emitReady(pid, cwd); } public $sendProcessExit(terminalId: string, exitCode: number | undefined): void { this._getTerminalProcess(terminalId).emitExit(exitCode); this._terminalProcessProxies.delete(terminalId); } public $sendOverrideDimensions(terminalId: string, dimensions: ITerminalDimensions | undefined): void { this._getTerminalProcess(terminalId).emitOverrideDimensions(dimensions); } public $sendProcessInitialCwd(terminalId: string, initialCwd: string): void { this._getTerminalProcess(terminalId).emitInitialCwd(initialCwd); } public $sendProcessCwd(terminalId: string, cwd: string): void { this._getTerminalProcess(terminalId).emitCwd(cwd); } public $startLinkProvider() { this._linkProvider?.dispose(); this._linkProvider = this.controller.registerLinkProvider(new ExtensionTerminalLinkProvider(this.proxy)); } public $stopLinkProvider() { this._linkProvider?.dispose(); this._linkProvider = undefined; } public $registerProfileProvider(id: string, extensionIdentifier: string): void { // Proxy profile provider requests through the extension host this._profileProviders.set( id, this.profileService.registerTerminalProfileProvider(extensionIdentifier, id, { createContributedTerminalProfile: async (options) => { this.proxy.$createContributedProfileTerminal(id, options); }, }), ); } public $unregisterProfileProvider(id: string): void { this._profileProviders.get(id)?.dispose(); this._profileProviders.delete(id); } private async _updateDefaultProfile() { const defaultProfile = await this.profileInternalSerivce.resolveDefaultProfile(); if (defaultProfile) { this.proxy.$acceptDefaultProfile(defaultProfile); } } $setEnvironmentVariableCollection( extensionIdentifier: string, persistent: boolean, collection: SerializableEnvironmentVariableCollection | undefined, ): void { if (collection) { const translatedCollection = { persistent, map: deserializeEnvironmentVariableCollection(collection), }; this.environmentVariableService.set(extensionIdentifier, translatedCollection); } else { this.environmentVariableService.delete(extensionIdentifier); } } } class ExtensionTerminalLinkProvider implements ITerminalExternalLinkProvider { constructor(private readonly _proxy: IExtHostTerminal) {} async provideLinks(instance: ITerminalClient, line: string): Promise<ITerminalLink[] | undefined> { const proxy = this._proxy; const extHostLinks = await proxy.$provideLinks(instance.id, line); return extHostLinks.map((dto) => ({ id: dto.id, startIndex: dto.startIndex, length: dto.length, label: dto.label, activate: () => proxy.$activateLink(instance.id, dto.id), })); } }
the_stack
import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { AutoscaleSettings } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { MonitorClient } from "../monitorClient"; import { AutoscaleSettingResource, AutoscaleSettingsListByResourceGroupNextOptionalParams, AutoscaleSettingsListByResourceGroupOptionalParams, AutoscaleSettingsListBySubscriptionNextOptionalParams, AutoscaleSettingsListBySubscriptionOptionalParams, AutoscaleSettingsListByResourceGroupResponse, AutoscaleSettingsCreateOrUpdateOptionalParams, AutoscaleSettingsCreateOrUpdateResponse, AutoscaleSettingsDeleteOptionalParams, AutoscaleSettingsGetOptionalParams, AutoscaleSettingsGetResponse, AutoscaleSettingResourcePatch, AutoscaleSettingsUpdateOptionalParams, AutoscaleSettingsUpdateResponse, AutoscaleSettingsListBySubscriptionResponse, AutoscaleSettingsListByResourceGroupNextResponse, AutoscaleSettingsListBySubscriptionNextResponse } from "../models"; /// <reference lib="esnext.asynciterable" /> /** Class containing AutoscaleSettings operations. */ export class AutoscaleSettingsImpl implements AutoscaleSettings { private readonly client: MonitorClient; /** * Initialize a new instance of the class AutoscaleSettings class. * @param client Reference to the service client */ constructor(client: MonitorClient) { this.client = client; } /** * Lists the autoscale settings for a resource group * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param options The options parameters. */ public listByResourceGroup( resourceGroupName: string, options?: AutoscaleSettingsListByResourceGroupOptionalParams ): PagedAsyncIterableIterator<AutoscaleSettingResource> { const iter = this.listByResourceGroupPagingAll(resourceGroupName, options); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: () => { return this.listByResourceGroupPagingPage(resourceGroupName, options); } }; } private async *listByResourceGroupPagingPage( resourceGroupName: string, options?: AutoscaleSettingsListByResourceGroupOptionalParams ): AsyncIterableIterator<AutoscaleSettingResource[]> { let result = await this._listByResourceGroup(resourceGroupName, options); yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { result = await this._listByResourceGroupNext( resourceGroupName, continuationToken, options ); continuationToken = result.nextLink; yield result.value || []; } } private async *listByResourceGroupPagingAll( resourceGroupName: string, options?: AutoscaleSettingsListByResourceGroupOptionalParams ): AsyncIterableIterator<AutoscaleSettingResource> { for await (const page of this.listByResourceGroupPagingPage( resourceGroupName, options )) { yield* page; } } /** * Lists the autoscale settings for a subscription * @param options The options parameters. */ public listBySubscription( options?: AutoscaleSettingsListBySubscriptionOptionalParams ): PagedAsyncIterableIterator<AutoscaleSettingResource> { const iter = this.listBySubscriptionPagingAll(options); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: () => { return this.listBySubscriptionPagingPage(options); } }; } private async *listBySubscriptionPagingPage( options?: AutoscaleSettingsListBySubscriptionOptionalParams ): AsyncIterableIterator<AutoscaleSettingResource[]> { let result = await this._listBySubscription(options); yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { result = await this._listBySubscriptionNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } } private async *listBySubscriptionPagingAll( options?: AutoscaleSettingsListBySubscriptionOptionalParams ): AsyncIterableIterator<AutoscaleSettingResource> { for await (const page of this.listBySubscriptionPagingPage(options)) { yield* page; } } /** * Lists the autoscale settings for a resource group * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param options The options parameters. */ private _listByResourceGroup( resourceGroupName: string, options?: AutoscaleSettingsListByResourceGroupOptionalParams ): Promise<AutoscaleSettingsListByResourceGroupResponse> { return this.client.sendOperationRequest( { resourceGroupName, options }, listByResourceGroupOperationSpec ); } /** * Creates or updates an autoscale setting. * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param autoscaleSettingName The autoscale setting name. * @param parameters Parameters supplied to the operation. * @param options The options parameters. */ createOrUpdate( resourceGroupName: string, autoscaleSettingName: string, parameters: AutoscaleSettingResource, options?: AutoscaleSettingsCreateOrUpdateOptionalParams ): Promise<AutoscaleSettingsCreateOrUpdateResponse> { return this.client.sendOperationRequest( { resourceGroupName, autoscaleSettingName, parameters, options }, createOrUpdateOperationSpec ); } /** * Deletes and autoscale setting * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param autoscaleSettingName The autoscale setting name. * @param options The options parameters. */ delete( resourceGroupName: string, autoscaleSettingName: string, options?: AutoscaleSettingsDeleteOptionalParams ): Promise<void> { return this.client.sendOperationRequest( { resourceGroupName, autoscaleSettingName, options }, deleteOperationSpec ); } /** * Gets an autoscale setting * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param autoscaleSettingName The autoscale setting name. * @param options The options parameters. */ get( resourceGroupName: string, autoscaleSettingName: string, options?: AutoscaleSettingsGetOptionalParams ): Promise<AutoscaleSettingsGetResponse> { return this.client.sendOperationRequest( { resourceGroupName, autoscaleSettingName, options }, getOperationSpec ); } /** * Updates an existing AutoscaleSettingsResource. To update other fields use the CreateOrUpdate method. * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param autoscaleSettingName The autoscale setting name. * @param autoscaleSettingResource Parameters supplied to the operation. * @param options The options parameters. */ update( resourceGroupName: string, autoscaleSettingName: string, autoscaleSettingResource: AutoscaleSettingResourcePatch, options?: AutoscaleSettingsUpdateOptionalParams ): Promise<AutoscaleSettingsUpdateResponse> { return this.client.sendOperationRequest( { resourceGroupName, autoscaleSettingName, autoscaleSettingResource, options }, updateOperationSpec ); } /** * Lists the autoscale settings for a subscription * @param options The options parameters. */ private _listBySubscription( options?: AutoscaleSettingsListBySubscriptionOptionalParams ): Promise<AutoscaleSettingsListBySubscriptionResponse> { return this.client.sendOperationRequest( { options }, listBySubscriptionOperationSpec ); } /** * ListByResourceGroupNext * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param nextLink The nextLink from the previous successful call to the ListByResourceGroup method. * @param options The options parameters. */ private _listByResourceGroupNext( resourceGroupName: string, nextLink: string, options?: AutoscaleSettingsListByResourceGroupNextOptionalParams ): Promise<AutoscaleSettingsListByResourceGroupNextResponse> { return this.client.sendOperationRequest( { resourceGroupName, nextLink, options }, listByResourceGroupNextOperationSpec ); } /** * ListBySubscriptionNext * @param nextLink The nextLink from the previous successful call to the ListBySubscription method. * @param options The options parameters. */ private _listBySubscriptionNext( nextLink: string, options?: AutoscaleSettingsListBySubscriptionNextOptionalParams ): Promise<AutoscaleSettingsListBySubscriptionNextResponse> { return this.client.sendOperationRequest( { nextLink, options }, listBySubscriptionNextOperationSpec ); } } // Operation Specifications const serializer = coreClient.createSerializer(Mappers, /* isXml */ false); const listByResourceGroupOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Insights/autoscalesettings", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.AutoscaleSettingResourceCollection }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.subscriptionId ], headerParameters: [Parameters.accept], serializer }; const createOrUpdateOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Insights/autoscalesettings/{autoscaleSettingName}", httpMethod: "PUT", responses: { 200: { bodyMapper: Mappers.AutoscaleSettingResource }, 201: { bodyMapper: Mappers.AutoscaleSettingResource }, default: { bodyMapper: Mappers.ErrorResponse } }, requestBody: Parameters.parameters, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.subscriptionId, Parameters.autoscaleSettingName ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer }; const deleteOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Insights/autoscalesettings/{autoscaleSettingName}", httpMethod: "DELETE", responses: { 200: {}, 204: {}, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.subscriptionId, Parameters.autoscaleSettingName ], headerParameters: [Parameters.accept], serializer }; const getOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Insights/autoscalesettings/{autoscaleSettingName}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.AutoscaleSettingResource }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.subscriptionId, Parameters.autoscaleSettingName ], headerParameters: [Parameters.accept], serializer }; const updateOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Insights/autoscalesettings/{autoscaleSettingName}", httpMethod: "PATCH", responses: { 200: { bodyMapper: Mappers.AutoscaleSettingResource }, default: { bodyMapper: Mappers.ErrorResponse } }, requestBody: Parameters.autoscaleSettingResource, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.subscriptionId, Parameters.autoscaleSettingName ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer }; const listBySubscriptionOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/providers/Microsoft.Insights/autoscalesettings", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.AutoscaleSettingResourceCollection }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.$host, Parameters.subscriptionId], headerParameters: [Parameters.accept], serializer }; const listByResourceGroupNextOperationSpec: coreClient.OperationSpec = { path: "{nextLink}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.AutoscaleSettingResourceCollection }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.subscriptionId, Parameters.nextLink ], headerParameters: [Parameters.accept], serializer }; const listBySubscriptionNextOperationSpec: coreClient.OperationSpec = { path: "{nextLink}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.AutoscaleSettingResourceCollection }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.nextLink ], headerParameters: [Parameters.accept], serializer };
the_stack
import { Component, Host, h } from '@stencil/core'; import { newSpecPage } from '@stencil/core/testing'; describe('hydrate, shadow parent', () => { it('slot depth 1, text w/out vdom', async () => { @Component({ tag: 'cmp-a', shadow: true, }) class CmpA { render() { return ( <Host> <div> <slot></slot> </div> </Host> ); } } // @ts-ignore const serverHydrated = await newSpecPage({ components: [CmpA], html: ` <cmp-a>middle</cmp-a> `, hydrateServerSide: true, }); expect(serverHydrated.root).toEqualHtml(` <cmp-a class="hydrated" s-id="1"> <!--r.1--> <!--o.0.1.--> <div c-id="1.0.0.0"> <!--s.1.1.1.0.--> <!--t.0.1--> middle </div> </cmp-a> `); // @ts-ignore const clientHydrated = await newSpecPage({ components: [CmpA], html: serverHydrated.root.outerHTML, hydrateClientSide: true, }); expect(clientHydrated.root['s-id']).toBe('1'); expect(clientHydrated.root).toEqualHtml(` <cmp-a class="hydrated"> <mock:shadow-root> <div> <slot></slot> </div> </mock:shadow-root> <!----> middle </cmp-a> `); }); it('slot, text w/out vdom', async () => { @Component({ tag: 'cmp-a', shadow: true, }) class CmpA { render() { return ( <Host> top <slot></slot> bottom </Host> ); } } // @ts-ignore const serverHydrated = await newSpecPage({ components: [CmpA], html: ` <cmp-a>middle</cmp-a> `, hydrateServerSide: true, }); expect(serverHydrated.root).toEqualHtml(` <cmp-a class="hydrated" s-id="1"> <!--r.1--> <!--o.0.1.--> <!--t.1.0.0.0--> top <!--s.1.1.0.1.--> <!--t.0.1--> middle <!--t.1.2.0.2--> bottom </cmp-a> `); // @ts-ignore const clientHydrated = await newSpecPage({ components: [CmpA], html: serverHydrated.root.outerHTML, hydrateClientSide: true, }); expect(clientHydrated.root['s-id']).toBe('1'); expect(clientHydrated.root).toEqualHtml(` <cmp-a class="hydrated"> <mock:shadow-root> top <slot></slot> bottom </mock:shadow-root> <!----> middle </cmp-a> `); }); it('no slot, child shadow text', async () => { @Component({ tag: 'cmp-a', shadow: true, }) class CmpA { render() { return <Host>shadow-text</Host>; } } // @ts-ignore const serverHydrated = await newSpecPage({ components: [CmpA], html: ` <cmp-a></cmp-a> `, hydrateServerSide: true, }); expect(serverHydrated.root).toEqualHtml(` <cmp-a class="hydrated" s-id="1"> <!--r.1--> <!--t.1.0.0.0--> shadow-text </cmp-a> `); // @ts-ignore const clientHydrated = await newSpecPage({ components: [CmpA], html: serverHydrated.root.outerHTML, hydrateClientSide: true, }); expect(clientHydrated.root['s-id']).toBe('1'); expect(clientHydrated.root).toEqualHtml(` <cmp-a class="hydrated"> <mock:shadow-root> shadow-text </mock:shadow-root> </cmp-a> `); }); it('named slot and slot depth 1', async () => { @Component({ tag: 'cmp-a', shadow: true, }) class CmpA { render() { return ( <Host> <div> <slot></slot> </div> <slot name="fixed"></slot> </Host> ); } } // @ts-ignore const serverHydrated = await newSpecPage({ components: [CmpA], html: ` <cmp-a></cmp-a> `, hydrateServerSide: true, }); expect(serverHydrated.root).toEqualHtml(` <cmp-a class="hydrated" s-id="1"> <!--r.1--> <div c-id="1.0.0.0"> <!--s.1.1.1.0.--> </div> <!--s.1.2.0.1.fixed--> </cmp-a> `); // @ts-ignore const clientHydrated = await newSpecPage({ components: [CmpA], html: serverHydrated.root.outerHTML, hydrateClientSide: true, }); expect(clientHydrated.root['s-id']).toBe('1'); expect(clientHydrated.root).toEqualHtml(` <cmp-a class="hydrated"> <mock:shadow-root> <div> <slot></slot> </div> <slot name="fixed"></slot> </mock:shadow-root> </cmp-a> `); }); it('nested cmp-b, parent text light-dom slot', async () => { @Component({ tag: 'cmp-a', shadow: true }) class CmpA { render() { return ( <Host> <cmp-b>cmp-a-light-dom</cmp-b> </Host> ); } } @Component({ tag: 'cmp-b' }) class CmpB { render() { return ( <Host> <slot></slot> </Host> ); } } // @ts-ignore const serverHydrated = await newSpecPage({ components: [CmpA, CmpB], html: `<cmp-a></cmp-a>`, hydrateServerSide: true, }); expect(serverHydrated.root).toEqualHtml(` <cmp-a class="hydrated" s-id="1"> <!--r.1--> <cmp-b class="hydrated" c-id="1.0.0.0" s-id="2"> <!--r.2--> <!--o.1.1--> <!--s.2.0.0.0.--> <!--t.1.1.1.0--> cmp-a-light-dom </cmp-b> </cmp-a> `); // @ts-ignore const clientHydrated = await newSpecPage({ components: [CmpA, CmpB], html: serverHydrated.root.outerHTML, hydrateClientSide: true, }); expect(clientHydrated.root).toEqualHtml(` <cmp-a class="hydrated"> <mock:shadow-root> <cmp-b class="hydrated"> <!--r.2--> <!----> <!--s.2.0.0.0.--> cmp-a-light-dom </cmp-b> </mock:shadow-root> </cmp-a> `); expect(clientHydrated.root).toEqualLightHtml(` <cmp-a class="hydrated"></cmp-a> `); }); it('nested text, complicated slots', async () => { @Component({ tag: 'cmp-a', shadow: true }) class CmpA { render() { return ( <Host> <section> <slot name="start"></slot> <slot name="secondary"></slot> <div> <slot></slot> </div> <slot name="primary"></slot> <slot name="end"></slot> </section> </Host> ); } } // @ts-ignore const serverHydrated = await newSpecPage({ components: [CmpA], html: ` <cmp-a> Title </cmp-a> `, hydrateServerSide: true, }); expect(serverHydrated.root).toEqualHtml(` <cmp-a class="hydrated" s-id="1"> <!--r.1--> <!--o.0.1.--> <section c-id="1.0.0.0"> <!--s.1.1.1.0.start--> <!--s.1.2.1.1.secondary--> <div c-id="1.3.1.2"> <!--s.1.4.2.0.--> <!--t.0.1--> Title </div> <!--s.1.5.1.3.primary--> <!--s.1.6.1.4.end--> </section> </cmp-a> `); // @ts-ignore const clientHydrated = await newSpecPage({ components: [CmpA], html: serverHydrated.root.outerHTML, hydrateClientSide: true, }); expect(clientHydrated.root).toEqualHtml(` <cmp-a class="hydrated"> <mock:shadow-root> <section> <slot name="start"></slot> <slot name="secondary"></slot> <div> <slot></slot> </div> <slot name="primary"></slot> <slot name="end"></slot> </section> </mock:shadow-root> <!----> Title </cmp-a> `); expect(clientHydrated.root).toEqualLightHtml(` <cmp-a class="hydrated"> <!----> Title </cmp-a> `); }); it('root level component, nested shadow slot', async () => { @Component({ tag: 'ion-tab-button', shadow: true }) class TabButton { render() { return ( <Host> <a> <slot></slot> <ion-ripple-effect></ion-ripple-effect> </a> </Host> ); } } @Component({ tag: 'ion-badge', shadow: true }) class Badge { render() { return ( <Host> <slot></slot> </Host> ); } } @Component({ tag: 'ion-ripple-effect', shadow: true }) class RippleEffect { render() { return <Host></Host>; } } // @ts-ignore const serverHydrated = await newSpecPage({ components: [Badge, RippleEffect, TabButton], html: ` <ion-tab-button> <ion-badge>root-text</ion-badge> </ion-tab-button> `, hydrateServerSide: true, }); expect(serverHydrated.root).toEqualHtml(` <ion-tab-button class="hydrated" s-id="1"> <!--r.1--> <!--o.0.2.--> <a c-id="1.0.0.0"> <!--s.1.1.1.0.--> <ion-badge class="hydrated" c-id="0.2" s-id="2"> <!--r.2--> <!--o.0.4.--> <!--s.2.0.0.0.--> <!--t.0.4--> root-text </ion-badge> <ion-ripple-effect class="hydrated" c-id="1.2.1.1" s-id="3"> <!--r.3--> </ion-ripple-effect> </a> </ion-tab-button> `); // @ts-ignore const clientHydrated = await newSpecPage({ components: [Badge, RippleEffect, TabButton], html: serverHydrated.root.outerHTML, hydrateClientSide: true, }); expect(clientHydrated.root).toEqualHtml(` <ion-tab-button class="hydrated"> <mock:shadow-root> <a> <slot></slot> <ion-ripple-effect class="hydrated"> <mock:shadow-root> </mock:shadow-root> </ion-ripple-effect> </a> </mock:shadow-root> <!----> <ion-badge class="hydrated"> <mock:shadow-root> <slot></slot> </mock:shadow-root> <!----> root-text </ion-badge> </ion-tab-button> `); }); it('nested cmp-b, parent slot', async () => { @Component({ tag: 'cmp-a', shadow: true }) class CmpA { render() { return <slot></slot>; } } @Component({ tag: 'cmp-b' }) class CmpB { render() { return ( <Host> <slot></slot> </Host> ); } } // @ts-ignore const serverHydrated = await newSpecPage({ components: [CmpA, CmpB], html: `<cmp-a><cmp-b>cmp-a-light-dom</cmp-b></cmp-a>`, hydrateServerSide: true, }); expect(serverHydrated.root).toEqualHtml(` <cmp-a class="hydrated" s-id="1"> <!--r.1--> <!--o.0.1.--> <!--s.1.0.0.0.--> <cmp-b class="hydrated" c-id="0.1" s-id="2"> <!--r.2--> <!--o.0.2--> <!--s.2.0.0.0.--> <!--t.0.2--> cmp-a-light-dom </cmp-b> </cmp-a> `); // @ts-ignore const clientHydrated = await newSpecPage({ components: [CmpA, CmpB], html: serverHydrated.root.outerHTML, hydrateClientSide: true, }); expect(clientHydrated.root).toEqualHtml(` <cmp-a class="hydrated"> <mock:shadow-root> <slot></slot> </mock:shadow-root> <!----> <cmp-b class="hydrated"> <!--r.2--> <!----> <!--s.2.0.0.0.--> cmp-a-light-dom </cmp-b> </cmp-a> `); expect(clientHydrated.root).toEqualLightHtml(` <cmp-a class="hydrated"> <!----> <cmp-b class="hydrated"> <!--r.2--> <!----> <!--s.2.0.0.0.--> cmp-a-light-dom </cmp-b> </cmp-a> `); }); });
the_stack
import React, { useEffect, useCallback, useRef, useState, forwardRef, ReactNode, FC, ComponentProps, MutableRefObject, } from 'react'; import { styled, css } from '@storybook/theming'; import { color, typography, spacing } from './shared/styles'; import { jiggle } from './shared/animation'; import { Icon } from './Icon'; import { Link } from './Link'; import WithTooltip from './tooltip/WithTooltip'; import { TooltipMessage } from './tooltip/TooltipMessage'; // prettier-ignore const Label = styled.label<Pick<Props, 'appearance'>>` font-weight: ${props => props.appearance !== 'code' && typography.weight.bold}; font-family: ${props => props.appearance === 'code' && typography.type.code }; font-size: ${props => props.appearance === 'code' ? typography.size.s1 - 1 : typography.size.s2 }px; line-height: ${props => props.appearance === 'code' ? 16 : 20 }px; `; // prettier-ignore const LabelWrapper = styled.div<Pick<Props, 'hideLabel'>>` margin-bottom: 8px; ${props => props.hideLabel && css` border: 0px !important; clip: rect(0 0 0 0) !important; -webkit-clip-path: inset(100%) !important; clip-path: inset(100%) !important; height: 1px !important; overflow: hidden !important; padding: 0px !important; position: absolute !important; white-space: nowrap !important; width: 1px !important; `} `; // prettier-ignore const InputEl = styled.input` &::placeholder { color: ${color.mediumdark}; } appearance: none; border:none; box-sizing: border-box; display: block; outline: none; width: 100%; margin: 0; &[disabled] { cursor: not-allowed; opacity: .5; } &:-webkit-autofill { -webkit-box-shadow: 0 0 0 3em ${color.lightest} inset; } `; const getStackLevelStyling = (props: Pick<Props, 'error' | 'stackLevel'>) => { const radius = 4; const stackLevelDefinedStyling = css` position: relative; ${props.error && `z-index: 1;`} &:focus { z-index: 2; } `; switch (props.stackLevel) { case 'top': return css` border-top-left-radius: ${radius}px; border-top-right-radius: ${radius}px; border-bottom-left-radius: 0; border-bottom-right-radius: 0; ${stackLevelDefinedStyling} `; case 'middle': return css` border-radius: 0px; margin-top: -1px; ${stackLevelDefinedStyling} `; case 'bottom': return css` border-bottom-left-radius: ${radius}px; border-bottom-right-radius: ${radius}px; border-top-left-radius: 0; border-top-right-radius: 0; margin-top: -1px; ${stackLevelDefinedStyling} `; default: return css` border-radius: ${radius}px; `; } }; // prettier-ignore const InputWrapper = styled.div<Pick<Props, 'error' | 'stackLevel' | 'appearance' | 'startingType' | 'icon'>>` display: inline-block; position: relative; vertical-align: top; width: 100%; ${InputEl} { position: relative; ${props => getStackLevelStyling(props)} background: ${color.lightest}; color: ${color.darkest}; font-size: ${typography.size.s2}px; line-height: 20px; padding: 10px 15px; //40px tall box-shadow: ${color.border} 0 0 0 1px inset; &:focus { box-shadow: ${color.secondary} 0 0 0 1px inset; } ${props => props.appearance === 'pill' && css` font-size: ${typography.size.s1}px; line-height: 16px; padding: 6px 12px; //28px tall border-radius: 3em; background: transparent; `} ${props => props.appearance === 'code' && css` font-size: ${typography.size.s1 - 1}px; line-height: 16px; font-family: ${typography.type.code}; border-radius: ${spacing.borderRadius.small}px; background: ${color.lightest}; padding: 8px 10px; `} } ${props => props.startingType === 'password' && css` ${InputEl} { padding-right: 52px; } `} ${props => props.icon && css` > svg { transition: all 150ms ease-out; position: absolute; top: 50%; ${props.appearance === 'pill' || props.appearance === 'code' ? css` font-size: ${typography.size.s1}px; ` : css ` font-size: ${typography.size.s2}px; `} height: 1em; width: 1em; margin-top: -.525em; z-index: 3; ${props.appearance === 'pill' || props.appearance === 'code' ? css` left: 10px; ` : css ` left: ${props.appearance === 'tertiary' ? 0 : `15px` }; `} background: transparent; path { transition: all 150ms ease-out; fill: ${color.mediumdark}; } } ${InputEl}:focus + svg path { fill: ${color.darker}; } ${InputEl} { padding-left: 40px; ${(props.appearance === 'pill' || props.appearance === 'code') && css` padding-left: 30px; `}; } `} ${props => props.error && css` ${InputEl} { box-shadow: ${color.red} 0 0 0 1px inset; &:focus { box-shadow: ${color.red} 0 0 0 1px inset !important; } } svg { animation: ${jiggle} 700ms ease-out; path { fill: ${color.red}; } } `} `; // prettier-ignore const InputContainer = styled.div<Pick<Props, 'orientation'>>` ${props => props.orientation === 'horizontal' && css` display: table-row; ${LabelWrapper}, ${InputWrapper} { display: table-cell; } ${LabelWrapper} { width: 1px; padding-right: 20px; vertical-align: middle; } ${InputWrapper} { width: auto; } `} `; const ErrorTooltip = styled(WithTooltip)` width: 100%; `; const ErrorTooltipMessage = styled(TooltipMessage)` width: 170px; `; const Action = styled.div` position: absolute; right: 0; min-width: 45px; top: 50%; transform: translateY(-50%); font-weight: bold; font-size: 11px; z-index: 2; `; const getErrorMessage = ({ error, value, lastErrorValue, }: Pick<Props, 'error' | 'value' | 'lastErrorValue'>) => { let errorMessage = typeof error === 'function' ? error(value) : error; if (lastErrorValue) { if (value !== lastErrorValue) { errorMessage = null; } } return errorMessage; }; // FC<Props & ComponentProps<typeof InputEl>> export const PureInput = forwardRef<HTMLInputElement, Props & ComponentProps<typeof InputEl>>( ( { id, appearance = 'default', className = null, error = null, errorTooltipPlacement = 'right', hideLabel = false, icon = null, label, lastErrorValue = null, onActionClick = null, orientation = 'vertical', stackLevel = undefined, startingType = 'text', suppressErrorMessage = false, type = 'text', value = '', ...props }, ref ) => { const [errorMessage, setErrorMessage] = useState( getErrorMessage({ error, value, lastErrorValue }) ); const errorId = `${id}-error`; useEffect(() => { setErrorMessage(getErrorMessage({ error, value, lastErrorValue })); }, [value, error, lastErrorValue]); const inputEl = ( <InputEl id={id} // Pass the ref to the actual input element so it can be controlled // externally. ref={ref} value={value} type={type} aria-describedby={errorId} aria-invalid={!!error} {...props} /> ); return ( <InputContainer orientation={orientation} className={className}> <LabelWrapper hideLabel={hideLabel}> <Label htmlFor={id} appearance={appearance}> {label} </Label> </LabelWrapper> <InputWrapper error={errorMessage} data-error={errorMessage} icon={icon} appearance={appearance} stackLevel={stackLevel} startingType={startingType} > {icon && <Icon icon={icon} aria-hidden />} {/** The tooltip is rendered regardless of the presence of an error. This is done to preserve the focus state of the Input when it is used inside of a form that can choose when to show/hide error states based on various factors. */} <ErrorTooltip tabIndex={-1} placement={errorTooltipPlacement} trigger="none" startOpen tagName="div" hasChrome={!!errorMessage && !suppressErrorMessage} tooltip={ errorMessage && !suppressErrorMessage && <ErrorTooltipMessage desc={errorMessage} /> } > {inputEl} </ErrorTooltip> {startingType === 'password' && ( <Action> {/* eslint-disable-next-line jsx-a11y/anchor-is-valid */} <Link isButton tertiary onClick={onActionClick} type="button"> {type === 'password' ? 'Show' : 'Hide'} </Link> </Action> )} </InputWrapper> </InputContainer> ); } ); interface Props { id: string; value?: string; appearance?: 'default' | 'pill' | 'code' | 'tertiary'; errorTooltipPlacement?: ComponentProps<typeof WithTooltip>['placement']; stackLevel?: 'top' | 'middle' | 'bottom'; label: string; hideLabel?: boolean; orientation?: 'vertical' | 'horizontal'; icon?: ComponentProps<typeof Icon>['icon']; error?: ReactNode | Function; suppressErrorMessage?: boolean; className?: string; lastErrorValue?: string; startingType?: string; type?: string; onActionClick?: (ev: React.MouseEvent<HTMLElement>) => void; startFocused?: boolean; } export const Input = forwardRef<HTMLInputElement, ComponentProps<typeof PureInput>>( ({ type: startingType, startFocused, ...rest }, ref) => { const [type, setType] = useState(startingType); const togglePasswordType = useCallback( (event) => { // Make sure this does not submit a form event.preventDefault(); event.stopPropagation(); if (type === 'password') { setType('text'); return; } setType('password'); }, [type, setType] ); // Outside refs take precedence const selfRef = useRef(); const inputRef = (ref as MutableRefObject<HTMLInputElement>) || selfRef; const didFocusOnStart = useRef(false); useEffect(() => { if (inputRef.current && startFocused && !didFocusOnStart.current) { inputRef.current.focus(); didFocusOnStart.current = true; } }, [inputRef, didFocusOnStart]); return ( <PureInput ref={inputRef} startingType={startingType} type={type} onActionClick={togglePasswordType} {...rest} /> ); } );
the_stack
import {Color} from '../../math/color'; import {Mat4} from '../../math/mat4' import {Vec3} from '../../math/vec3'; import {DirectionalLight} from '../directionallight'; import {Material} from '../material'; let MAX_BONES_PER_MESH = 60; export class AnimatedEntityVertex { constructor ( public pos: Vec3, public normal: Vec3, public boneWeights: [number, number, number, number], public boneIndices: [number, number, number, number] ) {} } export class ModelData { constructor ( public material: Material, public vertices: AnimatedEntityVertex[], public indices: Uint16Array, public boneNames: string[], public boneOffsets: Mat4[] ) { this.gl = null; this.vb = null; this.ib = null; } private gl: WebGLRenderingContext|null; public vb: WebGLBuffer|null; public ib: WebGLBuffer|null; public prepare(gl: WebGLRenderingContext) { if (this.gl != gl) { if (this.vb) gl.deleteBuffer(this.vb); if (this.ib) gl.deleteBuffer(this.ib); let vertStride = ( Float32Array.BYTES_PER_ELEMENT * 3 // pos + Float32Array.BYTES_PER_ELEMENT * 3 // normal + Float32Array.BYTES_PER_ELEMENT * 4 // bone weights + Float32Array.BYTES_PER_ELEMENT * 4 // bone indices ); let verts = new ArrayBuffer(this.vertices.length * vertStride); let floatView = new Float32Array(verts); for (let i = 0; i < this.vertices.length; i++) { let vert = this.vertices[i]; let posIdx = i * vertStride / Float32Array.BYTES_PER_ELEMENT; let normIdx = i * vertStride / Float32Array.BYTES_PER_ELEMENT + 3; let weightIdx = i * vertStride / Float32Array.BYTES_PER_ELEMENT + 6; let idxIdx = i * vertStride / Float32Array.BYTES_PER_ELEMENT + 10; floatView.set(vert.pos.data, posIdx); floatView.set(vert.normal.data, normIdx); floatView.set(vert.boneWeights, weightIdx); floatView.set(vert.boneIndices, idxIdx); } this.vb = gl.createBuffer(); gl.bindBuffer(gl.ARRAY_BUFFER, this.vb); gl.bufferData(gl.ARRAY_BUFFER, verts, gl.STATIC_DRAW); this.ib = gl.createBuffer(); gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.ib); gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(this.indices), gl.STATIC_DRAW); this.gl = gl; } } } export class AnimatedEntityRenderCall { constructor ( public modelData: ModelData, public worldTransform: Mat4 ) {} } /** * GLSL vertex shader code */ let vsText = ` // Thank you for this excellent coverage of animation, toji! // https://github.com/toji/building-the-game/blob/part-3/public/js/skinned-model.js precision mediump float; uniform mat4 mModel; uniform mat4 mView; uniform mat4 mProj; uniform mat4 boneMat[${MAX_BONES_PER_MESH}]; attribute vec3 vPos; attribute vec3 vNorm; attribute vec4 vWeights; attribute vec4 vBones; varying vec3 fWorldPos; varying vec3 fWorldNormal; void main(void) { mat4 skinMat = vWeights.x * boneMat[int(vBones.x)]; skinMat += vWeights.y * boneMat[int(vBones.y)]; skinMat += vWeights.z * boneMat[int(vBones.z)]; skinMat += vWeights.w * boneMat[int(vBones.w)]; fWorldPos = (mModel * skinMat * vec4(vPos, 1.0)).xyz; fWorldNormal = (mModel * skinMat * vec4(vNorm, 0.0)).xyz; gl_Position = mProj * mView * vec4(fWorldPos, 1.0); } `; /** * GLSL fragment shader code */ let fsText = ` precision mediump float; struct Material { vec4 ambient; vec4 diffuse; }; struct Light { vec4 ambient; vec4 diffuse; vec3 direction; }; uniform Material objectMaterial; uniform Light sun; // uniform vec3 cameraPosition; varying vec3 fWorldPos; varying vec3 fWorldNormal; void main() { vec4 ambient = objectMaterial.ambient * sun.ambient; vec4 diffuse = vec4(0, 0, 0, 0); float diffuseFactor = clamp(-dot(sun.direction, fWorldNormal), 0.0, 1.0); diffuse = diffuseFactor * objectMaterial.diffuse * sun.diffuse; gl_FragColor = clamp(ambient + diffuse, 0.0, 1.0); } `; /** * GL program that can draw an object of a solid color with a bunch of animation data provided as well. * This draws the data produced by the animation system. */ export class AnimatedEntityProgram { constructor() { this.gl = null; this.program = null; this.modelTransform = null; this.viewTransform = null; this.projTransform = null; this.bonesArray = null; this.lightAmbientColor = null; this.lightDiffuseColor = null; this.lightDiffuseColor = null; this.materialAmbientColor = null; this.materialDiffuseColor = null; // this.cameraPosition = null; } // // Uniform Locations // private gl: WebGLRenderingContext|null; private program: WebGLProgram|null; private modelTransform: WebGLUniformLocation|null; private viewTransform: WebGLUniformLocation|null; private projTransform: WebGLUniformLocation|null; private bonesArray: WebGLUniformLocation|null; private lightAmbientColor: WebGLUniformLocation|null; private lightDiffuseColor: WebGLUniformLocation|null; private lightDirection:WebGLUniformLocation|null; private materialAmbientColor: WebGLUniformLocation|null; private materialDiffuseColor: WebGLUniformLocation|null; // private cameraPosition: WebGLUniformLocation|null; // // Attributes // private positionAttrib: number; private normalAttrib: number; private weightsAttrib: number; private bonesAttrib: number; // // Public Interface // public prepare(gl: WebGLRenderingContext) { if (this.gl != gl) { // Compile Shaders let vs = gl.createShader(gl.VERTEX_SHADER); gl.shaderSource(vs, vsText); gl.compileShader(vs); if (!gl.getShaderParameter(vs, gl.COMPILE_STATUS)) { console.error('Failed to compile animated entity vertex shader: ', gl.getShaderInfoLog(vs)); return; } let fs = gl.createShader(gl.FRAGMENT_SHADER); gl.shaderSource(fs, fsText); gl.compileShader(fs); if (!gl.getShaderParameter(fs, gl.COMPILE_STATUS)) { console.error('Failed to compile animated entity fragment shader: ', gl.getShaderInfoLog(fs)); return; } this.program = gl.createProgram(); gl.attachShader(this.program, vs); gl.attachShader(this.program, fs); gl.linkProgram(this.program); if (!gl.getProgramParameter(this.program, gl.LINK_STATUS)) { console.error('Failed to link animated entity program: ', gl.getProgramInfoLog(this.program)); } // Get Uniform Handles this.modelTransform = gl.getUniformLocation(this.program, 'mModel'); this.viewTransform = gl.getUniformLocation(this.program, 'mView'); this.projTransform = gl.getUniformLocation(this.program, 'mProj'); this.bonesArray = gl.getUniformLocation(this.program, 'boneMat'); this.lightAmbientColor = gl.getUniformLocation(this.program, 'sun.ambient'); this.lightDiffuseColor = gl.getUniformLocation(this.program, 'sun.diffuse'); this.lightDirection = gl.getUniformLocation(this.program, 'sun.direction'); this.materialAmbientColor = gl.getUniformLocation(this.program, 'objectMaterial.ambient'); this.materialDiffuseColor = gl.getUniformLocation(this.program, 'objectMaterial.diffuse'); // this.cameraPosition = gl.getUniformLocation(this.program, 'cameraPosition'); // Get Attribute Handles this.positionAttrib = gl.getAttribLocation(this.program, 'vPos'); this.normalAttrib = gl.getAttribLocation(this.program, 'vNorm'); this.weightsAttrib = gl.getAttribLocation(this.program, 'vWeights'); this.bonesAttrib = gl.getAttribLocation(this.program, 'vBones'); // Set cached GL context (to be used until next context loss) this.gl = gl; } gl.useProgram(this.program); gl.enableVertexAttribArray(this.positionAttrib); gl.enableVertexAttribArray(this.normalAttrib); gl.enableVertexAttribArray(this.weightsAttrib); gl.enableVertexAttribArray(this.bonesAttrib); gl.enable(gl.DEPTH_TEST); gl.enable(gl.CULL_FACE); } public disengage(gl: WebGLRenderingContext) { if (this.gl != gl) return; gl.disableVertexAttribArray(this.bonesAttrib); gl.disableVertexAttribArray(this.weightsAttrib); gl.disableVertexAttribArray(this.normalAttrib); gl.disableVertexAttribArray(this.positionAttrib); gl.useProgram(null); } public setSceneData(gl: WebGLRenderingContext, projMatrix: Mat4, lightData: DirectionalLight) { if (this.gl != gl) return; if (!(this.projTransform && this.lightAmbientColor && this.lightDiffuseColor && this.lightDirection)) { return console.warn('Needed uniforms not set - aborting'); } gl.uniformMatrix4fv(this.projTransform, false, projMatrix.data); gl.uniform4fv(this.lightAmbientColor, lightData.ambientColor.data); gl.uniform4fv(this.lightDiffuseColor, lightData.diffuseColor.data); gl.uniform3fv(this.lightDirection, lightData.direction.data); } public setPerFrameData(gl: WebGLRenderingContext, viewMatrix: Mat4, cameraPos: Vec3) { if (this.gl != gl) return; // if (!(this.viewTransform && this.cameraPosition)) { if (!this.viewTransform) { return console.warn('Needed uniforms not set - aborting'); } gl.uniformMatrix4fv(this.viewTransform, false, viewMatrix.data); // gl.uniform3fv(this.cameraPosition, cameraPos.data); } public renderObject(gl: WebGLRenderingContext, call: AnimatedEntityRenderCall, bones: Float32Array) { if (this.gl != gl) return; if (!(this.modelTransform && this.materialAmbientColor && this.materialDiffuseColor && this.bonesArray)) { return console.warn('Needed uniforms not set - aborting'); } gl.uniformMatrix4fv(this.modelTransform, false, call.worldTransform.data); gl.uniform4fv(this.materialAmbientColor, call.modelData.material.ambient.data); gl.uniform4fv(this.materialDiffuseColor, call.modelData.material.diffuse.data); gl.uniformMatrix4fv(this.bonesArray, false, bones); call.modelData.prepare(gl); gl.bindBuffer(gl.ARRAY_BUFFER, call.modelData.vb); gl.vertexAttribPointer(this.positionAttrib, 3, gl.FLOAT, false, 14 * Float32Array.BYTES_PER_ELEMENT, 0); gl.vertexAttribPointer(this.normalAttrib, 3, gl.FLOAT, false, 14 * Float32Array.BYTES_PER_ELEMENT, 3 * Float32Array.BYTES_PER_ELEMENT); gl.vertexAttribPointer(this.weightsAttrib, 4, gl.FLOAT, false, 14 * Float32Array.BYTES_PER_ELEMENT, 6 * Float32Array.BYTES_PER_ELEMENT); gl.vertexAttribPointer(this.bonesAttrib, 4, gl.FLOAT, false, 14 * Float32Array.BYTES_PER_ELEMENT, 10 * Float32Array.BYTES_PER_ELEMENT); gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, call.modelData.ib); gl.drawElements(gl.TRIANGLES, call.modelData.indices.length, gl.UNSIGNED_SHORT, 0); } };
the_stack
import { mXparserConstants } from '../mXparserConstants'; import { MathFunctions } from './MathFunctions'; import { BinaryRelations } from './BinaryRelations'; import { javaemul } from 'j4ts/j4ts'; /** * BooleanAlgebra - class for boolean operators. * * @author <b>Mariusz Gromada</b><br> * <a href="mailto:mariuszgromada.org@gmail.com">mariuszgromada.org@gmail.com</a><br> * <a href="http://github.com/mariuszgromada/MathParser.org-mXparser" target="_blank">mXparser on GitHub</a><br> * * @version 4.3.0 * @class */ export class BooleanAlgebra { /** * False as integer */ public static FALSE: number = 0; /** * True as integer */ public static TRUE: number = 1; /** * Null as integer */ public static NULL: number = 2; /** * False as double */ public static F: number = 0; /** * True as double */ public static T: number = 1; /** * Null as double */ public static N: number; public static N_$LI$(): number { if (BooleanAlgebra.N == null) { BooleanAlgebra.N = javaemul.internal.DoubleHelper.NaN; } return BooleanAlgebra.N; } /** * AND truth table */ public static AND_TRUTH_TABLE: number[][]; public static AND_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.AND_TRUTH_TABLE == null) { BooleanAlgebra.AND_TRUTH_TABLE = [[BooleanAlgebra.F, BooleanAlgebra.F, BooleanAlgebra.F], [BooleanAlgebra.F, BooleanAlgebra.T, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.F, BooleanAlgebra.N_$LI$(), BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.AND_TRUTH_TABLE; } /** * NAND truth table */ public static NAND_TRUTH_TABLE: number[][]; public static NAND_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.NAND_TRUTH_TABLE == null) { BooleanAlgebra.NAND_TRUTH_TABLE = [[BooleanAlgebra.T, BooleanAlgebra.T, BooleanAlgebra.T], [BooleanAlgebra.T, BooleanAlgebra.F, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.T, BooleanAlgebra.N_$LI$(), BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.NAND_TRUTH_TABLE; } /** * OR truth table */ public static OR_TRUTH_TABLE: number[][]; public static OR_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.OR_TRUTH_TABLE == null) { BooleanAlgebra.OR_TRUTH_TABLE = [[BooleanAlgebra.F, BooleanAlgebra.T, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.T, BooleanAlgebra.T, BooleanAlgebra.T], [BooleanAlgebra.N_$LI$(), BooleanAlgebra.T, BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.OR_TRUTH_TABLE; } /** * NOR truth table */ public static NOR_TRUTH_TABLE: number[][]; public static NOR_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.NOR_TRUTH_TABLE == null) { BooleanAlgebra.NOR_TRUTH_TABLE = [[BooleanAlgebra.T, BooleanAlgebra.F, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.F, BooleanAlgebra.F, BooleanAlgebra.F], [BooleanAlgebra.N_$LI$(), BooleanAlgebra.F, BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.NOR_TRUTH_TABLE; } /** * XOR truth table */ public static XOR_TRUTH_TABLE: number[][]; public static XOR_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.XOR_TRUTH_TABLE == null) { BooleanAlgebra.XOR_TRUTH_TABLE = [[BooleanAlgebra.F, BooleanAlgebra.T, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.T, BooleanAlgebra.F, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.N_$LI$(), BooleanAlgebra.N_$LI$(), BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.XOR_TRUTH_TABLE; } /** * XNOR truth table */ public static XNOR_TRUTH_TABLE: number[][]; public static XNOR_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.XNOR_TRUTH_TABLE == null) { BooleanAlgebra.XNOR_TRUTH_TABLE = [[BooleanAlgebra.T, BooleanAlgebra.F, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.F, BooleanAlgebra.T, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.N_$LI$(), BooleanAlgebra.N_$LI$(), BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.XNOR_TRUTH_TABLE; } /** * IMP truth table */ public static IMP_TRUTH_TABLE: number[][]; public static IMP_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.IMP_TRUTH_TABLE == null) { BooleanAlgebra.IMP_TRUTH_TABLE = [[BooleanAlgebra.T, BooleanAlgebra.T, BooleanAlgebra.T], [BooleanAlgebra.F, BooleanAlgebra.T, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.N_$LI$(), BooleanAlgebra.T, BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.IMP_TRUTH_TABLE; } /** * CIMP truth table */ public static CIMP_TRUTH_TABLE: number[][]; public static CIMP_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.CIMP_TRUTH_TABLE == null) { BooleanAlgebra.CIMP_TRUTH_TABLE = [[BooleanAlgebra.T, BooleanAlgebra.F, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.T, BooleanAlgebra.T, BooleanAlgebra.T], [BooleanAlgebra.T, BooleanAlgebra.N_$LI$(), BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.CIMP_TRUTH_TABLE; } /** * EQV truth table */ public static EQV_TRUTH_TABLE: number[][]; public static EQV_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.EQV_TRUTH_TABLE == null) { BooleanAlgebra.EQV_TRUTH_TABLE = [[BooleanAlgebra.T, BooleanAlgebra.F, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.F, BooleanAlgebra.T, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.N_$LI$(), BooleanAlgebra.N_$LI$(), BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.EQV_TRUTH_TABLE; } /** * NIMP truth table */ public static NIMP_TRUTH_TABLE: number[][]; public static NIMP_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.NIMP_TRUTH_TABLE == null) { BooleanAlgebra.NIMP_TRUTH_TABLE = [[BooleanAlgebra.F, BooleanAlgebra.F, BooleanAlgebra.F], [BooleanAlgebra.T, BooleanAlgebra.F, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.N_$LI$(), BooleanAlgebra.F, BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.NIMP_TRUTH_TABLE; } /** * CNIMP truth table */ public static CNIMP_TRUTH_TABLE: number[][]; public static CNIMP_TRUTH_TABLE_$LI$(): number[][] { if (BooleanAlgebra.CNIMP_TRUTH_TABLE == null) { BooleanAlgebra.CNIMP_TRUTH_TABLE = [[BooleanAlgebra.F, BooleanAlgebra.T, BooleanAlgebra.N_$LI$()], [BooleanAlgebra.F, BooleanAlgebra.F, BooleanAlgebra.F], [BooleanAlgebra.F, BooleanAlgebra.N_$LI$(), BooleanAlgebra.N_$LI$()]]; } return BooleanAlgebra.CNIMP_TRUTH_TABLE; } /** * NOT truth table */ public static NOT_TRUTH_TABLE: number[]; public static NOT_TRUTH_TABLE_$LI$(): number[] { if (BooleanAlgebra.NOT_TRUTH_TABLE == null) { BooleanAlgebra.NOT_TRUTH_TABLE = [BooleanAlgebra.T, BooleanAlgebra.F, BooleanAlgebra.N_$LI$()]; } return BooleanAlgebra.NOT_TRUTH_TABLE; } /** * Double to integer boolean translation * * @param {number} a the double number * * @return {number} If a = Double.NaN return NULL, * else if a &lt;&gt; 0 return TRUE, * else return FALSE. */ public static double2IntBoolean(a: number): number { if (/* isNaN */isNaN(a))return BooleanAlgebra.NULL; if (BinaryRelations.epsilonComparison){ if (MathFunctions.abs(a) > BinaryRelations.epsilon_$LI$())return BooleanAlgebra.TRUE; else return BooleanAlgebra.FALSE; } else { if (a !== 0)return BooleanAlgebra.TRUE; else return BooleanAlgebra.FALSE; } } /** * Boolean AND * * @param {number} a the a number (a AND b) * @param {number} b the b number (a AND b) * * @return {number} Truth table element AND[A][B] where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static and(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.AND_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean OR * * @param {number} a the a number (a OR b) * @param {number} b the b number (a OR b) * * @return {number} Truth table element OR[A][B] * where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static or(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.OR_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean XOR * * @param {number} a the a number (a XOR b) * @param {number} b the b number (a XOR b) * * @return {number} Truth table element XOR[A][B] * where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static xor(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.XOR_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean NAND * * @param {number} a the a number (a NAND b) * @param {number} b the b number (a NAND b) * * @return {number} Truth table element NAND[A][B] * where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static nand(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.NAND_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean NOR * * @param {number} a the a number (a NOR b) * @param {number} b the b number (a NOR b) * * @return {number} Truth table element NOR[A][B] * where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static nor(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.NOR_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean XNOR * * @param {number} a the a number (a XNOR b) * @param {number} b the b number (a XNOR b) * * @return {number} Truth table element XNOR[A][B] * where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static xnor(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.XNOR_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean IMP * * @param {number} a the a number (a IMP b) * @param {number} b the b number (a IMP b) * * @return {number} Truth table element IMP[A][B] * where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static imp(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.IMP_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean EQV * * @param {number} a the a number (a EQV b) * @param {number} b the b number (a EQV b) * * @return {number} Truth table element EQV[A][B] * where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static eqv(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.EQV_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean NOT * * @param {number} a the a number (NOT a) * * @return {number} Truth table element NOT[A] * where A = double2IntBoolean(a) */ public static not(a: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); return BooleanAlgebra.NOT_TRUTH_TABLE_$LI$()[A]; } /** * Boolean CIMP * * @param {number} a the a number (a CIMP b) * @param {number} b the b number (a CIMP b) * * @return {number} Truth table element CIMP[A][B] * where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static cimp(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.CIMP_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean NIMP * * @param {number} a the a number (a NIMP b) * @param {number} b the b number (a NIMP b) * * @return {number} Truth table element NIMP[A][B] * where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static nimp(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.NIMP_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean CNIMP * * @param {number} a the a number (a CNIMP b) * @param {number} b the b number (a CNIMP b) * * @return {number} Truth table element CNIMP[A][B] * where A = double2IntBoolean(a), B = double2IntBoolean(b) */ public static cnimp(a: number, b: number): number { const A: number = BooleanAlgebra.double2IntBoolean(a); const B: number = BooleanAlgebra.double2IntBoolean(b); return BooleanAlgebra.CNIMP_TRUTH_TABLE_$LI$()[A][B]; } /** * Boolean AND variadic * * @param {double[]} values List of values * @return {number} Returns BooleanAlgebra.TRUE if all values on the list are BooleanAlgebra.TURE, * otherwise returns BooleanAlgebra.FALSE */ public static andVariadic(values: number[]): number { if (values == null)return javaemul.internal.DoubleHelper.NaN; if (values.length === 0)return javaemul.internal.DoubleHelper.NaN; let cntTrue: number = 0; let bv: number; for(let index137=0; index137 < values.length; index137++) { let v = values[index137]; { bv = BooleanAlgebra.double2IntBoolean(v); if (bv === BooleanAlgebra.FALSE)return BooleanAlgebra.FALSE; if (bv === BooleanAlgebra.TRUE)cntTrue++; if (mXparserConstants.isCurrentCalculationCancelled())return javaemul.internal.DoubleHelper.NaN; } } if (cntTrue === values.length)return BooleanAlgebra.TRUE; else return javaemul.internal.DoubleHelper.NaN; } /** * Boolean OR variadic * * @param {double[]} values List of values * @return {number} Returns BooleanAlgebra.TRUE if at least one value on the list is BooleanAlgebra.TURE, * otherwise returns BooleanAlgebra.FALSE */ public static orVariadic(values: number[]): number { if (values == null)return javaemul.internal.DoubleHelper.NaN; if (values.length === 0)return javaemul.internal.DoubleHelper.NaN; let cntFalse: number = 0; let bv: number; for(let index138=0; index138 < values.length; index138++) { let v = values[index138]; { bv = BooleanAlgebra.double2IntBoolean(v); if (bv === BooleanAlgebra.TRUE)return BooleanAlgebra.TRUE; if (bv === BooleanAlgebra.FALSE)cntFalse++; if (mXparserConstants.isCurrentCalculationCancelled())return javaemul.internal.DoubleHelper.NaN; } } if (cntFalse === values.length)return BooleanAlgebra.FALSE; else return javaemul.internal.DoubleHelper.NaN; } /** * Boolean XOR variadic * * @param {double[]} values List of values * @return {number} Returns BooleanAlgebra.TRUE if exactly one value on the list is BooleanAlgebra.TURE, * otherwise returns BooleanAlgebra.FALSE */ public static xorVariadic(values: number[]): number { if (values == null)return javaemul.internal.DoubleHelper.NaN; if (values.length === 0)return javaemul.internal.DoubleHelper.NaN; let cntTrue: number = 0; let bv: number; for(let index139=0; index139 < values.length; index139++) { let v = values[index139]; { bv = BooleanAlgebra.double2IntBoolean(v); if (bv === BooleanAlgebra.TRUE){ cntTrue++; if (cntTrue > 1)return BooleanAlgebra.FALSE; } if (bv === BooleanAlgebra.NULL)return javaemul.internal.DoubleHelper.NaN; if (mXparserConstants.isCurrentCalculationCancelled())return javaemul.internal.DoubleHelper.NaN; } } if (cntTrue === 1)return BooleanAlgebra.TRUE; else return BooleanAlgebra.FALSE; } } BooleanAlgebra["__class"] = "org.mariuszgromada.math.mxparser.mathcollection.BooleanAlgebra";
the_stack
import { LoggerManager, Math2D } from "@here/harp-utils"; import * as THREE from "three"; import { debugContext } from "./DebugContext"; declare const require: any; const RBush = require("rbush"); const logger = LoggerManager.instance.create("ScreenCollissions"); export interface IBox { minX: number; minY: number; maxX: number; maxY: number; } export class CollisionBox extends Math2D.Box implements IBox { constructor(box?: Math2D.Box | THREE.Box2 | IBox) { super(); if (box !== undefined) { this.copy(box); } } copy(box: Math2D.Box | THREE.Box2 | IBox): CollisionBox { if (box instanceof Math2D.Box) { this.set(box.x, box.y, box.w, box.h); } else if (box instanceof THREE.Box2) { this.set(box.min.x, box.min.y, box.max.x - box.min.x, box.max.y - box.min.y); } else { this.set(box.minX, box.minY, box.maxX - box.minX, box.maxY - box.minY); } return this; } get minX(): number { return this.x; } set minX(minX: number) { this.x = minX; } get maxX(): number { return this.x + this.w; } set maxX(maxX: number) { this.w = maxX - this.x; } get minY(): number { return this.y; } set minY(minY: number) { this.y = minY; } get maxY(): number { return this.y + this.h; } set maxY(maxY: number) { this.h = maxY - this.y; } } /** * Collision box with additional boxes defining tighter bounds for the enclosed feature * (e.g.glyph bounds for text). */ export class DetailedCollisionBox extends CollisionBox { constructor(box: Math2D.Box | THREE.Box2 | IBox, readonly detailBoxes: CollisionBox[]) { super(box); } } export interface LineWithBound extends IBox { line: THREE.Line3; } export function isLineWithBound(box: IBox): box is LineWithBound { return (box as LineWithBound).line !== undefined; } const tmpCollisionBox = new CollisionBox(); export class ScreenCollisions { /** The screen bounding box. */ readonly screenBounds = new Math2D.Box(); /** Tree of allocated bounds. */ private readonly rtree = new RBush(); /** * Constructs a new ScreenCollisions object. */ constructor() { // } /** * Resets the list of allocated screen bounds. */ reset() { this.rtree.clear(); } /** * Updates the screen bounds that are used to check if bounding boxes are visible. * * @param width - The width of the container. * @param height - The height of the container. */ update(width: number, height: number) { this.screenBounds.set(width / -2, height / -2, width, height); this.reset(); } /** * Marks the region of the screen intersecting with the given bounding box as allocated. * * @param bounds - The bounding box in NDC scaled coordinates (i.e. top left is -width/2, * -height/2) */ allocate(bounds: Math2D.Box | CollisionBox | DetailedCollisionBox): void { const bbox = !(bounds instanceof CollisionBox) ? new CollisionBox(bounds) : bounds; this.rtree.insert(bbox); } /** * Inserts the given bounds into the rtree. * * @param bounds - The bounding boxes (the bounding boxes must be in the space returned from the * ScreenProjector.project method). */ allocateIBoxes(bounds: IBox[]) { this.rtree.load(bounds); } /** * Search for all bounds in the tree intersecting with the given box. * @param box - The box used for the search. * @returns An array of all IBoxes intersecting with the given box. */ search(box: CollisionBox): IBox[] { return this.rtree.search(box); } /** * Checks if the given bounding box is already allocated. * * @param bounds - The bounding box in world coordinates. */ isAllocated(bounds: Math2D.Box | CollisionBox): boolean { const collisionBox = bounds instanceof CollisionBox ? bounds : tmpCollisionBox.copy(bounds); const results = this.search(collisionBox); return this.intersectsDetails(collisionBox, results); } /** * Checks if the given screen bounds intersects with the frustum of the active camera. * * @param bounds - The bounding box in world coordinates. */ isVisible(bounds: Math2D.Box): boolean { return this.screenBounds.intersects(bounds); } /** * Checks if the given screen bounds is contained within the frustum of the active camera. * * @param bounds - The bounding box in world coordinates. */ isFullyVisible(bounds: Math2D.Box): boolean { return this.screenBounds.containsBox(bounds); } /** * Test whether a given [[CollisionBox]] intersects with any of the details in the specified * [[IBox]]es. * * @param testBox - The box to test for intersection. * @param boxes - The candidate boxes the test box may intersect with. It's assumed that the * global bounds of these boxes intersect with the given test box. * @returns `true` if any intersection found. */ intersectsDetails(testBox: CollisionBox, boxes: IBox[]): boolean { for (const box of boxes) { if (box instanceof DetailedCollisionBox) { for (const detailBox of box.detailBoxes) { if (detailBox.intersects(testBox)) { return true; } } } else if (isLineWithBound(box)) { const boundedLine = box as LineWithBound; if (this.intersectsLine(testBox, boundedLine)) { return true; } } else { return true; } } return false; } /** * Computes the intersection between the supplied CollisionBox and the LineWithBound. * @note The [[CollisionBox]] is in Screen Bounds space, whereas the line must be * in Screen Coordinate space */ private intersectsLine(bbox: CollisionBox, boundedLine: LineWithBound): boolean { const line = boundedLine.line; // Note, these aren't normalized, but it doesn't matter, we are just interested // in the sign. const lineXDiffTransformed = line.end.x - line.start.x; // Sign of bottom left, bottom right, top left and top right corners. let signBL: number; let signBR: number; let signTL: number; let signTR: number; if (lineXDiffTransformed !== 0) { const lineYDiffTransformed = line.end.y - line.start.y; const normalX = lineYDiffTransformed; const normalY = -lineXDiffTransformed; const D = line.start.y - (lineYDiffTransformed / lineXDiffTransformed) * line.start.x; signBL = Math.sign(bbox.minX * normalX + (bbox.minY - D) * normalY); signBR = Math.sign(bbox.maxX * normalX + (bbox.minY - D) * normalY); signTL = Math.sign(bbox.minX * normalX + (bbox.maxY - D) * normalY); signTR = Math.sign(bbox.maxX * normalX + (bbox.maxY - D) * normalY); } else { signBL = Math.sign(bbox.minX - line.start.x); signBR = Math.sign(bbox.maxX - line.start.x); signTL = Math.sign(bbox.minX - line.start.x); signTR = Math.sign(bbox.maxX - line.start.x); } return signBL !== signBR || signBL !== signTL || signBL !== signTR; } } /** * @hidden * * Shows requests for screen space during labelling in an HTML canvas, which should be sized like * the actual map canvas. It can be placed on top of the map canvas to show exactly which requests * for screen space were done. * * Also logs statistics. */ export class ScreenCollisionsDebug extends ScreenCollisions { /** 2D rendering context. */ private m_renderContext: CanvasRenderingContext2D | null = null; private m_renderingEnabled = false; private m_numAllocations = 0; private m_numSuccessfulTests = 0; private m_numFailedTests = 0; private m_numSuccessfulVisibilityTests = 0; private m_numFailedVisibilityTests = 0; /** * Constructs a new ScreenCollisions object which renders its state to a 2D canvas. */ constructor(debugCanvas: HTMLCanvasElement) { super(); if (debugCanvas !== undefined && debugCanvas !== null) { this.m_renderContext = debugCanvas.getContext("2d"); } } /** * Resets the list of allocated bounds and clears the debug canvas. * @override */ reset() { super.reset(); this.m_numAllocations = 0; this.m_numSuccessfulTests = 0; this.m_numFailedTests = 0; this.m_numSuccessfulVisibilityTests = 0; this.m_numFailedVisibilityTests = 0; } /** * Updates the screen bounds used to check if bounding boxes are visible. * * @param width - The width of the container. * @param height - The height of the container. * @override */ update(width: number, height: number) { if (this.m_renderingEnabled) { logger.log( `Allocations: ${this.m_numAllocations} Successful Tests: ${this.m_numSuccessfulTests} Failed Tests: ${this.m_numFailedTests} Successful Visibility Tests: ${this.m_numSuccessfulVisibilityTests} Failed Visibility Tests: ${this.m_numFailedVisibilityTests} ` ); } super.update(width, height); if (this.m_renderContext !== null) { this.m_renderContext.canvas.width = width; this.m_renderContext.canvas.height = height; } // activate in the browser with: // window.__debugContext.setValue("DEBUG_SCREEN_COLLISIONS", true) this.m_renderingEnabled = debugContext.getValue("DEBUG_SCREEN_COLLISIONS"); } /** * Marks the region of the screen intersecting with the given bounding box as allocated. * * @param bounds - the bounding box in world coordinates. * @override */ allocate(bounds: Math2D.Box | CollisionBox): void { super.allocate(bounds); this.m_numAllocations++; if (this.m_renderingEnabled && this.m_renderContext !== null) { this.m_renderContext.strokeStyle = "#6666ff"; this.m_renderContext.strokeRect( bounds.x - this.screenBounds.x, this.screenBounds.y + this.screenBounds.h - bounds.y, bounds.w, -bounds.h ); } } /** @override */ allocateIBoxes(boundsArray: IBox[]) { for (const bounds of boundsArray) { this.m_numAllocations++; if (this.m_renderingEnabled && this.m_renderContext !== null) { this.m_renderContext.strokeStyle = "#aa2222"; this.m_renderContext.strokeRect( bounds.minX - this.screenBounds.x, this.screenBounds.y + this.screenBounds.h - bounds.minY, bounds.maxX - bounds.minX, -(bounds.maxY - bounds.minY) ); } } super.allocateIBoxes(boundsArray); } /** @override */ intersectsDetails(testBox: CollisionBox, boxes: IBox[]): boolean { const collisionFound = super.intersectsDetails(testBox, boxes); if (this.m_renderingEnabled && this.m_renderContext !== null) { const padding = collisionFound ? 2 : 1; this.m_renderContext.strokeStyle = collisionFound ? "#FF0000" : "#00ff00"; this.m_renderContext.strokeRect( testBox.x - this.screenBounds.x - padding, this.screenBounds.y + this.screenBounds.h - testBox.y + padding, testBox.w + 2 * padding, -testBox.h - 2 * padding ); } if (collisionFound) { this.m_numFailedTests++; } else { this.m_numSuccessfulTests++; } return collisionFound; } /** * Checks if the given screen bounds intersects with the frustum of the active camera. * * @param bounds - The bounding box in world coordinates. * @override */ isVisible(bounds: Math2D.Box): boolean { const visible = super.isVisible(bounds); if (visible) { this.m_numSuccessfulVisibilityTests++; } else { this.m_numFailedVisibilityTests++; } return visible; } }
the_stack
import React, { useContext, useRef, useEffect, useState, useCallback, } from "react"; import { SharedInterpolationContext, TransitionItem, TransitionItemContextType, SharedInterpolationType, SharedInterpolationStatus, OnAnimationFunction, StateContextType, SharedInterpolationInfo, Style, } from "../Types"; import { StyleSheet } from "react-native"; import { useForceUpdate } from "../../Hooks"; import { useLog } from "../../Hooks/useLog"; import { createSharedInterpolation, setupSharedInterpolation, getStates, getStateNameForLabel, } from "../../Shared"; import { TransitionView } from "../index"; import { fluidException, fluidInternalException } from "../../Types"; import { ConfigAnimationType, createConfig, ChildAnimationDirection, SafeStateConfigType, } from "../../Configuration"; export const useSharedInterpolation = ( transitionItem: TransitionItem, transitionItemContext: TransitionItemContextType, configuration: SafeStateConfigType, stateContext: StateContextType, currentDirection: ChildAnimationDirection | undefined, ) => { const sharedInterpolations = useRef<Array<SharedInterpolationType>>([]); const [sharedInterpolationInfos, setSharedInterpolationInfos] = useState< Array<SharedInterpolationInfo> >([]); const sharedInterpolatorContext = useContext(SharedInterpolationContext); const forceUpdate = useForceUpdate(); const logger = useLog(transitionItem.label, "shared"); // State name for this transition item's shared element transition const sharedTransitionStateName = getStateNameForLabel(transitionItem.label); decorateConfiguration(sharedTransitionStateName, configuration); const registerSharedInterpolationInfo = useCallback( (fromLabel: string, toLabel: string) => { const exinstingInfo = sharedInterpolationInfos.find( p => p.fromLabel === fromLabel && p.toLabel === toLabel, ); if (exinstingInfo) { return; } // Check if we can get the source/target items from this context const toItem = transitionItemContext.getTransitionItemByLabel(toLabel); const fromItem = transitionItemContext.getTransitionItemByLabel( fromLabel, ); // Check if we have found these two items if (fromItem && toItem) { setSharedInterpolationInfos(p => [ ...p, { fromLabel, toLabel, }, ]); } else if (sharedInterpolatorContext) { // Walk up the tree sharedInterpolatorContext.registerSharedInterpolationInfo( fromLabel, toLabel, ); } }, [ sharedInterpolationInfos, sharedInterpolatorContext, transitionItemContext, ], ); const registerSharedInterpolation = useCallback( async ( item: TransitionItem, fromLabel: string, toLabel: string, animation?: ConfigAnimationType, onBegin?: OnAnimationFunction, onEnd?: OnAnimationFunction, ) => { // Check if we can get the source item from this context const ownerItem = transitionItemContext.getOwner(); const toItem = transitionItemContext.getTransitionItemByLabel( item.label || "unknown", ); let overriddenFromStyle: Style | undefined; // Check if current context knows about both to/from items const fromItem = transitionItemContext.getTransitionItemByLabel( fromLabel, ); if (fromItem && toItem) { if (__DEV__) { logger( () => "Starting Shared Transition from " + fromItem.label + " -> " + toItem.label, ); } // Check if there is already an interpolation running here const existingInterpolation = sharedInterpolations.current.find( p => (p.fromLabel === fromLabel && p.toLabel === toLabel) || (p.fromLabel === toLabel && p.toLabel === fromLabel), ); if (existingInterpolation) { // There is already a shared interpolation going on here. We // should stop it and transfer the style values to the // new interpolation const fromItemClone = transitionItemContext.getTransitionItemByLabel( existingInterpolation.fromCloneLabel, ); if (!fromItemClone) { throw fluidInternalException( "Could not find clones in onging interpolation.", ); } // Overriden from style overriddenFromStyle = fromItemClone.getCalculatedStyles(); } // Create interpolation const sharedInterpolation: SharedInterpolationType = createSharedInterpolation( fromItem, toItem, currentDirection, animation, onBegin, onEnd, ); // Create onAnimationDone sharedInterpolation.onAnimationDone = () => { const s = sharedInterpolations.current.find( si => si.id === sharedInterpolation.id, ); if (s && s.status === SharedInterpolationStatus.Active) { s.status = SharedInterpolationStatus.Removing; forceUpdate(); // TODO: Callback to user-land? } }; sharedInterpolation.onAnimationFinished = () => { const s = sharedInterpolations.current.find( si => si.id === sharedInterpolation.id, ); if (s && s.status === SharedInterpolationStatus.Removing) { s.status = SharedInterpolationStatus.Done; forceUpdate(); // TODO: Callback to user-land? } }; // Add to list of preparing shared interpolations sharedInterpolations.current.push(sharedInterpolation); // Start setting up the shared interpolation sharedInterpolation.setupPromise = setupSharedInterpolation( sharedInterpolation, ownerItem, overriddenFromStyle, ); } else if (sharedInterpolatorContext) { // Walk up the tree to find parent root and start shared // interpolation from there. sharedInterpolatorContext.registerSharedInterpolation( item, fromLabel, toLabel, animation, onBegin, onEnd, ); } else { throw fluidException( "No container found for shared element transition. " + "Remember to wrap shared elements in a parent Fluid.View.", ); } }, [ transitionItemContext, sharedInterpolatorContext, currentDirection, logger, forceUpdate, ], ); const setupPendingTransitions = useCallback(() => { // Handle pending shared transitions const pendingSharedTransitions = sharedInterpolations.current.filter( p => p.status === SharedInterpolationStatus.Created, ); // Skip all logic if we don't have any pending transitions if (pendingSharedTransitions.length === 0) return; // Mark as preparing pendingSharedTransitions.forEach( si => (si.status = SharedInterpolationStatus.Preparing), ); // Wait for all shared transitions to be set up const promises = pendingSharedTransitions.map(s => s.setupPromise); if (pendingSharedTransitions.length > 0) { Promise.all(promises).then(async () => { // Update status to active pendingSharedTransitions.forEach( si => (si.status = SharedInterpolationStatus.Active), ); forceUpdate(); }); } }, [forceUpdate]); const removeOverwrittenTransitions = () => { let shouldForceUpdate = false; sharedInterpolations.current.forEach(si => { if ( si.status === SharedInterpolationStatus.Active && hasOverwrittenTransition(si, sharedInterpolations.current) ) { si.status = SharedInterpolationStatus.Done; shouldForceUpdate = true; } }); if (shouldForceUpdate) { forceUpdate(); } }; useEffect(() => { removeOverwrittenTransitions(); // Lets skip out here to avoid setting up async contexts in functions below if ( sharedInterpolations.current.filter( p => p.status === SharedInterpolationStatus.Created, ).length === 0 ) { return; } // Otherwise lets start setting up shared transitions setupPendingTransitions(); }); // Find elements to render let sharedElementsToRender = sharedInterpolations.current.filter( p => p.status === SharedInterpolationStatus.Active || p.status === SharedInterpolationStatus.Removing, ); // Remove elements that have status Died sharedInterpolations.current = removeDeadTransitions( sharedInterpolations.current, ); // Add states const transitionStates = getStates( sharedInterpolationInfos, sharedInterpolations.current, ); if (transitionStates.length > 0) { console.log( transitionStates.map(s => `${s.name} (${s.active ? 1 : 0})`).join(", "), ); } stateContext.states = [...stateContext.states, ...transitionStates]; const renderSharedOverlay = ( Component: any, props: any, hasChildren: boolean, ): React.ReactChild => { if (!hasChildren) return <Component {...props} />; const { children, ...rest } = props; return ( <Component {...rest}> {children} {sharedElementsToRender.length > 0 && ( <TransitionView pointerEvents={"none"} label={"__sharedOverlay"} staticStyle={styles.overlayContainer} config={createConfig({ childAnimation: { type: "parallel" } })}> {sharedElementsToRender.map(c => [c.toClone, c.fromClone])} </TransitionView> )} </Component> ); }; return { renderSharedOverlay, sharedInterpolationContext: { registerSharedInterpolation, registerSharedInterpolationInfo, }, }; }; const styles = StyleSheet.create({ overlayContainer: { ...StyleSheet.absoluteFillObject, // overflow: "hidden" // borderWidth: StyleSheet.hairlineWidth, // borderColor: "#00FF0044", // backgroundColor: "#00FF0022", }, }); const decorateConfiguration = ( sharedTransitionStateName: string, configuration: SafeStateConfigType, ) => { configuration.onEnter.push({ state: sharedTransitionStateName, animation: { type: "timing", duration: 100, }, interpolation: { styleKey: "opacity", inputRange: [0, 0.999, 1], outputRange: [1, 0, 0], }, }); configuration.onExit.push({ state: sharedTransitionStateName, animation: { type: "timing", duration: 100, }, interpolation: { styleKey: "opacity", inputRange: [0, 0.001, 1], outputRange: [0, 1, 1], }, }); }; const hasOverwrittenTransition = ( si: SharedInterpolationType, sharedInterpolations: SharedInterpolationType[], ): boolean => { return ( sharedInterpolations.find( p => p !== si && p.status === SharedInterpolationStatus.Active && ((p.fromLabel === si.fromLabel && p.toLabel === si.toLabel) || (p.fromLabel === si.toLabel && p.toLabel === si.fromLabel)), ) !== undefined ); }; const removeDeadTransitions = (sis: SharedInterpolationType[]) => { return sis.filter(p => p.status !== SharedInterpolationStatus.Done); };
the_stack
import * as assert from 'assert'; import { IReportIssueDataCollectionResult } from 'microsoft.aspnetcore.razor.vscode/dist/Diagnostics/IReportIssueDataCollectionResult'; import { ReportIssueCreator } from 'microsoft.aspnetcore.razor.vscode/dist/Diagnostics/ReportIssueCreator'; import { IRazorDocument } from 'microsoft.aspnetcore.razor.vscode/dist/IRazorDocument'; import { IRazorDocumentManager } from 'microsoft.aspnetcore.razor.vscode/dist/IRazorDocumentManager'; import * as vscode from 'microsoft.aspnetcore.razor.vscode/dist/vscodeAdapter'; import { TestProjectedDocument } from './Mocks/TestProjectedDocument'; import { TestRazorDocument } from './Mocks/TestRazorDocument'; import { TestRazorDocumentManager } from './Mocks/TestRazorDocumentManager'; import { TestTextDocument } from './Mocks/TestTextDocument'; import { createTestVSCodeApi } from './Mocks/TestVSCodeApi'; describe('ReportIssueCreator', () => { function getReportIssueCreator(api: vscode.api) { const documentManager = new TestRazorDocumentManager(); const issueCreator = new TestReportIssueCreator(api, documentManager); return issueCreator; } it('sanitize replaces USERNAME with anonymous', () => { // Arrange const api = createTestVSCodeApi(); const issueCreator = getReportIssueCreator(api); const user = 'JohnDoe'; const content = `Hello ${user} World ${user}`; delete process.env.USER; process.env.USERNAME = user; // Act const sanitizedContent = issueCreator.sanitize(content); // Assert assert.equal('Hello anonymous World anonymous', sanitizedContent); }); it('sanitize replaces USER with anonymous', () => { // Arrange const api = createTestVSCodeApi(); const issueCreator = getReportIssueCreator(api); const user = 'JohnDoe'; const content = `Hello ${user} World ${user}`; process.env.USER = user; delete process.env.USERNAME; // Act const sanitizedContent = issueCreator.sanitize(content); // Assert assert.equal('Hello anonymous World anonymous', sanitizedContent); }); it('sanitize returns original content when no user', () => { // Arrange const api = createTestVSCodeApi(); const issueCreator = getReportIssueCreator(api); const content = 'original content'; delete process.env.USER; delete process.env.USERNAME; // Act const sanitizedContent = issueCreator.sanitize(content); // Assert assert.equal(sanitizedContent, content); }); it('create can operate when no content is available', async () => { // Arrange const api = createTestVSCodeApi(); const issueCreator = getReportIssueCreator(api); const collectionResult: IReportIssueDataCollectionResult = { document: undefined, logOutput: '', }; // Act const issueContent = await issueCreator.create(collectionResult); // Assert assert.ok(issueContent.indexOf('Bug') > 0); }); it('getRazor returns text documents contents', async () => { // Arrange const api = createTestVSCodeApi(); const issueCreator = getReportIssueCreator(api); const expectedContent = 'TextDocument content'; const textDocument = new TestTextDocument(expectedContent, api.Uri.parse('C:/path/to/file.cshtml')); // Act const razorContent = await issueCreator.getRazor(textDocument); // Assert assert.equal(razorContent, expectedContent); }); it('getProjectedCSharp returns projected CSharp and vscodes text document CSharp', async () => { // Arrange const api = createTestVSCodeApi(); const expectedVSCodeCSharpContent = 'VSCode seen CSharp content'; const csharpDocumentUri = api.Uri.parse('C:/path/to/file.cshtml.__virtual.cs'); const csharpTextDocument = new TestTextDocument(expectedVSCodeCSharpContent, csharpDocumentUri); api.setWorkspaceDocuments(csharpTextDocument); const hostDocumentUri = api.Uri.parse('C:/path/to/file.cshtml'); const expectedProjectedCSharpContent = 'Projected CSharp content'; const razorDocument = new TestRazorDocument(hostDocumentUri, hostDocumentUri.path); razorDocument.csharpDocument = new TestProjectedDocument(expectedProjectedCSharpContent, csharpDocumentUri); const issueCreator = getReportIssueCreator(api); // Act const razorContent = await issueCreator.getProjectedCSharp(razorDocument); // Assert assert.ok(razorContent.indexOf(expectedVSCodeCSharpContent) > 0); assert.ok(razorContent.indexOf(expectedProjectedCSharpContent) > 0); }); it('getProjectedCSharp returns only projected CSharp if cannot locate vscodes text document', async () => { // Arrange const api = createTestVSCodeApi(); const csharpDocumentUri = api.Uri.parse('C:/path/to/file.cshtml.__virtual.cs'); const hostDocumentUri = api.Uri.parse('C:/path/to/file.cshtml'); const expectedProjectedCSharpContent = 'Projected CSharp content'; const razorDocument = new TestRazorDocument(hostDocumentUri, hostDocumentUri.path); razorDocument.csharpDocument = new TestProjectedDocument(expectedProjectedCSharpContent, csharpDocumentUri); const issueCreator = getReportIssueCreator(api); // Act const razorContent = await issueCreator.getProjectedCSharp(razorDocument); // Assert assert.ok(razorContent.indexOf(expectedProjectedCSharpContent) > 0); }); it('getProjectedHtml returns projected Html and vscodes text document Html', async () => { // Arrange const api = createTestVSCodeApi(); const expectedVSCodeHtmlContent = 'VSCode seen Html content'; const htmlDocumentUri = api.Uri.parse('C:/path/to/file.cshtml.__virtual.cs'); const htmlTextDocument = new TestTextDocument(expectedVSCodeHtmlContent, htmlDocumentUri); api.setWorkspaceDocuments(htmlTextDocument); const hostDocumentUri = api.Uri.parse('C:/path/to/file.cshtml'); const expectedProjectedHtmlContent = 'Projected Html content'; const razorDocument = new TestRazorDocument(hostDocumentUri, hostDocumentUri.path); razorDocument.htmlDocument = new TestProjectedDocument(expectedProjectedHtmlContent, htmlDocumentUri); const issueCreator = getReportIssueCreator(api); // Act const razorContent = await issueCreator.getProjectedHtml(razorDocument); // Assert assert.ok(razorContent.indexOf(expectedVSCodeHtmlContent) > 0); assert.ok(razorContent.indexOf(expectedProjectedHtmlContent) > 0); }); it('getProjectedHtml returns only projected Html if cannot locate vscodes text document', async () => { // Arrange const api = createTestVSCodeApi(); const htmlDocumentUri = api.Uri.parse('C:/path/to/file.cshtml.__virtual.html'); const hostDocumentUri = api.Uri.parse('C:/path/to/file.cshtml'); const expectedProjectedHtmlContent = 'Projected Html content'; const razorDocument = new TestRazorDocument(hostDocumentUri, hostDocumentUri.path); razorDocument.htmlDocument = new TestProjectedDocument(expectedProjectedHtmlContent, htmlDocumentUri); const issueCreator = getReportIssueCreator(api); // Act const razorContent = await issueCreator.getProjectedHtml(razorDocument); // Assert assert.ok(razorContent.indexOf(expectedProjectedHtmlContent) > 0); }); const omniSharpExtension: vscode.Extension<any> = { id: 'ms-vscode.csharp', packageJSON: { name: 'OmniSharp', version: '1234', isBuiltin: false, }, }; const razorClientExtension: vscode.Extension<any> = { id: 'ms-vscode.razor-vscode', packageJSON: { name: 'Razor', version: '5678', isBuiltin: false, }, }; it('getExtensionVersion returns OmniSharp extension version', async () => { // Arrange const api = createTestVSCodeApi(); api.setExtensions(omniSharpExtension, razorClientExtension); const issueCreator = getReportIssueCreator(api); // Act const extensionVersion = issueCreator.getExtensionVersion(); // Assert assert.equal(extensionVersion, '1234'); }); it('getExtensionVersion can not find extension', async () => { // Arrange const api = createTestVSCodeApi(); const issueCreator = getReportIssueCreator(api); // Act & Assert assert.doesNotThrow(async () => issueCreator.getExtensionVersion()); }); it('getInstalledExtensions returns non built-in extensions sorted by name', async () => { // Arrange const api = createTestVSCodeApi(); const builtinExtension: vscode.Extension<any> = { id: 'something.builtin', packageJSON: { name: 'BuiltInThing', isBuiltin: true, }, }; api.setExtensions(razorClientExtension, builtinExtension, omniSharpExtension); const issueCreator = getReportIssueCreator(api); // Act const extensions = issueCreator.getInstalledExtensions(); // Assert assert.deepEqual(extensions, [omniSharpExtension, razorClientExtension]); }); it('generateExtensionTable returns all non-builtin extensions in string format', async () => { // Arrange const api = createTestVSCodeApi(); const builtinExtension: vscode.Extension<any> = { id: 'something.builtin', packageJSON: { name: 'BuiltInThing', version: 'ShouldNotShowUp', isBuiltin: true, }, }; api.setExtensions(razorClientExtension, builtinExtension, omniSharpExtension); const issueCreator = getReportIssueCreator(api); // Act const table = issueCreator.generateExtensionTable(); // Assert assert.ok(table.indexOf(omniSharpExtension.packageJSON.version) > 0); assert.ok(table.indexOf(razorClientExtension.packageJSON.version) > 0); assert.ok(table.indexOf(builtinExtension.packageJSON.version) === -1); }); it('generateExtensionTable can operate when 0 extensions', async () => { // Arrange const api = createTestVSCodeApi(); const issueCreator = getReportIssueCreator(api); // Act & Assert assert.doesNotThrow(() => issueCreator.generateExtensionTable()); }); }); class TestReportIssueCreator extends ReportIssueCreator { constructor(vscodeApi: vscode.api, documentManager: IRazorDocumentManager) { super(vscodeApi, documentManager); } public getRazor(document: vscode.TextDocument) { return super.getRazor(document); } public getProjectedCSharp(razorDocument: IRazorDocument) { return super.getProjectedCSharp(razorDocument); } public getProjectedHtml(razorDocument: IRazorDocument) { return super.getProjectedHtml(razorDocument); } public getExtensionVersion() { return super.getExtensionVersion(); } public getInstalledExtensions() { return super.getInstalledExtensions(); } public generateExtensionTable() { return super.generateExtensionTable(); } public sanitize(content: string) { return super.sanitize(content); } }
the_stack
const testLogLevel = "info"; const sutLogLevel = "info"; const containerImageName = "ghcr.io/hyperledger/cactus-quorum-multi-party-all-in-one"; const containerImageVersion = "2022-04-06-fd10e27"; import "jest-extended"; import lodash from "lodash"; import { v4 as uuidv4 } from "uuid"; import Web3 from "web3"; import { AbiItem } from "web3-utils"; import { PluginRegistry } from "@hyperledger/cactus-core"; import { PluginLedgerConnectorQuorum, QuorumApiClient, WatchBlocksV1Progress, Web3BlockHeader, Web3SigningCredentialType, } from "@hyperledger/cactus-plugin-ledger-connector-quorum"; import { PluginKeychainMemory } from "@hyperledger/cactus-plugin-keychain-memory"; import { Logger, LoggerProvider } from "@hyperledger/cactus-common"; import { ICactusPlugin, IVerifierEventListener, LedgerEvent, } from "@hyperledger/cactus-core-api"; import { AddressInfo } from "net"; import { ApiServer, AuthorizationProtocol, ConfigService, } from "@hyperledger/cactus-cmd-api-server"; import { Verifier, VerifierFactory } from "@hyperledger/cactus-verifier-client"; import { pruneDockerAllIfGithubAction, QuorumMultiPartyTestLedger, } from "@hyperledger/cactus-test-tooling"; import HelloWorldContractJson from "../../../solidity/hello-world-contract/HelloWorld.json"; const log: Logger = LoggerProvider.getOrCreate({ label: "verifier-integration-with-quorum-connector.test", level: testLogLevel, }); log.info("Test started"); describe("Verifier integration with quorum connector tests", () => { let quorumTestLedger: QuorumMultiPartyTestLedger; let apiServer: ApiServer; let connector: PluginLedgerConnectorQuorum; let web3: Web3; let keychainPlugin: PluginKeychainMemory; let connectionProfile: ReturnType< typeof QuorumMultiPartyTestLedger.prototype.getKeys > extends Promise<infer T> ? T : never; const quorumValidatorId = "testQuorumId"; let globalVerifierFactory: VerifierFactory; ////////////////////////////////// // Environment Setup ////////////////////////////////// beforeAll(async () => { log.info("Prune Docker..."); await pruneDockerAllIfGithubAction({ logLevel: testLogLevel }); // Start Ledger log.info("Start QuorumMultiPartyTestLedger..."); log.debug("QuorumMultiParty image:", containerImageName); log.debug("QuorumMultiParty version:", containerImageVersion); quorumTestLedger = new QuorumMultiPartyTestLedger({ containerImageName, containerImageVersion, logLevel: sutLogLevel, emitContainerLogs: false, //useRunningLedger: true, }); await quorumTestLedger.start(); connectionProfile = await quorumTestLedger.getKeys(); log.debug("connectionProfile:", connectionProfile); // Setup ApiServer plugins const plugins: ICactusPlugin[] = []; const pluginRegistry = new PluginRegistry({ plugins }); log.info("Create PluginKeychainMemory..."); keychainPlugin = new PluginKeychainMemory({ instanceId: uuidv4(), keychainId: uuidv4(), logLevel: sutLogLevel, }); keychainPlugin.set( HelloWorldContractJson.contractName, JSON.stringify(HelloWorldContractJson), ); plugins.push(keychainPlugin); log.info("Create PluginLedgerConnectorQuorum..."); connector = new PluginLedgerConnectorQuorum({ rpcApiHttpHost: connectionProfile.quorum.member1.url, rpcApiWsHost: connectionProfile.quorum.member1.wsUrl, logLevel: sutLogLevel, instanceId: uuidv4(), pluginRegistry: new PluginRegistry({ plugins: [keychainPlugin] }), }); plugins.push(connector); // Create web3 provider for test web3 = new Web3(connectionProfile.quorum.member1.url); // Create Api Server log.info("Create ApiServer..."); const configService = new ConfigService(); const cactusApiServerOptions = await configService.newExampleConfig(); cactusApiServerOptions.authorizationProtocol = AuthorizationProtocol.NONE; cactusApiServerOptions.configFile = ""; cactusApiServerOptions.apiCorsDomainCsv = "*"; cactusApiServerOptions.apiTlsEnabled = false; cactusApiServerOptions.apiPort = 0; const config = await configService.newExampleConfigConvict( cactusApiServerOptions, ); apiServer = new ApiServer({ config: config.getProperties(), pluginRegistry, }); // Start ApiServer const apiServerStartOut = await apiServer.start(); log.debug(`apiServerStartOut:`, apiServerStartOut); const httpServer = apiServer.getHttpServerApi(); const addressInfo = httpServer?.address() as AddressInfo; const { address, port } = addressInfo; const apiHost = `http://${address}:${port}`; // Create VerifierFactory log.info("Create VerifierFactory with Quorum Validator..."); globalVerifierFactory = new VerifierFactory( [ { validatorID: quorumValidatorId, validatorType: "QUORUM_2X", basePath: apiHost, logLevel: sutLogLevel, }, ], sutLogLevel, ); }); afterAll(async () => { log.info("Shutdown the server..."); if (apiServer) { await apiServer.shutdown(); } log.info("Stop and destroy the test ledger..."); if (quorumTestLedger) { await quorumTestLedger.stop(); await quorumTestLedger.destroy(); } log.info("Prune docker..."); await pruneDockerAllIfGithubAction({ logLevel: testLogLevel }); }); ////////////////////////////////// // Helper Functions ////////////////////////////////// function monitorAndGetBlock( options: Record<string, unknown> = {}, ): Promise<LedgerEvent<WatchBlocksV1Progress>> { return new Promise<LedgerEvent<WatchBlocksV1Progress>>( (resolve, reject) => { const appId = "testMonitor"; const sut = globalVerifierFactory.getVerifier(quorumValidatorId); const monitor: IVerifierEventListener<WatchBlocksV1Progress> = { onEvent(ledgerEvent: LedgerEvent<WatchBlocksV1Progress>): void { try { log.info("Received event:", ledgerEvent); if (!ledgerEvent.data) { throw Error("No block data"); } log.info( "Listener received ledgerEvent, block number", ledgerEvent.data.blockHeader?.number, ); sut.stopMonitor(appId); resolve(ledgerEvent); } catch (err) { reject(err); } }, onError(err: any): void { log.error("Ledger monitoring error:", err); reject(err); }, }; sut.startMonitor(appId, options, monitor); }, ); } ////////////////////////////////// // Tests ////////////////////////////////// test("Verifier of QuorumApiClient is created by VerifierFactory", () => { const sut = globalVerifierFactory.getVerifier(quorumValidatorId); expect(sut.ledgerApi.className).toEqual("QuorumApiClient"); }); describe("web3EthContract tests", () => { let verifier: Verifier<QuorumApiClient>; let contractCommon: { abi: AbiItem[]; address: string; }; beforeAll(async () => { // Setup verifier verifier = globalVerifierFactory.getVerifier( quorumValidatorId, "QUORUM_2X", ); // Deploy contract to interact with const deployOut = await connector.deployContract({ contractName: HelloWorldContractJson.contractName, keychainId: keychainPlugin.getKeychainId(), web3SigningCredential: { ethAccount: connectionProfile.quorum.member2.accountAddress, secret: connectionProfile.quorum.member2.privateKey, type: Web3SigningCredentialType.PrivateKeyHex, }, gas: 1000000, }); expect(deployOut).toBeTruthy(); expect(deployOut.transactionReceipt).toBeTruthy(); expect(deployOut.transactionReceipt.contractAddress).toBeTruthy(); expect(deployOut.transactionReceipt.status).toBeTrue(); contractCommon = { abi: HelloWorldContractJson.abi as AbiItem[], address: deployOut.transactionReceipt.contractAddress as string, }; }); test("Invalid web3EthContract calls are rejected by QuorumApiClient", async () => { // Define correct input parameters const correctContract: Record<string, unknown> = lodash.clone( contractCommon, ); const correctMethod: Record<string, unknown> = { type: "web3EthContract", command: "call", function: "getName", params: [], }; const correctArgs: any = {}; // Sanity check if correct parameters work const resultCorrect = await verifier.sendSyncRequest( correctContract, correctMethod, correctArgs, ); expect(resultCorrect.status).toEqual(200); // Failing: Missing contract ABI const missingABIContract = lodash.clone(correctContract); delete missingABIContract.abi; expect( verifier.sendSyncRequest( missingABIContract, correctMethod, correctArgs, ), ).toReject(); // Failing: Missing contract address const missingAddressContract = lodash.clone(correctContract); delete missingAddressContract.address; expect( verifier.sendSyncRequest( missingAddressContract, correctMethod, correctArgs, ), ).toReject(); // Failing: Unknown invocation method const unknownMethod = lodash.clone(correctMethod); unknownMethod.command = "foo"; expect( verifier.sendSyncRequest(correctContract, unknownMethod, correctArgs), ).toReject(); // Failing: Empty invocation method const emptyMethod = lodash.clone(correctMethod); emptyMethod.command = ""; expect( verifier.sendSyncRequest(correctContract, emptyMethod, correctArgs), ).toReject(); // Failing: Empty contract method const emptyContractFunction = lodash.clone(correctMethod); emptyContractFunction.function = ""; expect( verifier.sendSyncRequest( correctContract, emptyContractFunction, correctArgs, ), ).toReject(); // Failing: Wrong method params format const numericParam = lodash.clone(correctMethod); numericParam.params = 42; expect( verifier.sendSyncRequest(correctContract, numericParam, correctArgs), ).toReject(); const objectParam = lodash.clone(correctMethod); objectParam.params = { arg1: 42 }; expect( verifier.sendSyncRequest(correctContract, objectParam, correctArgs), ).toReject(); }); test("Send unsigned transaction and use call to check results works", async () => { const newName = "QuorumCactus"; // 1. Set new value (send) // Will use signing key of the node we're connected to (member1) const methodSend = { type: "web3EthContract", command: "send", function: "setName", params: [newName], }; const argsSend = { args: { from: connectionProfile.quorum.member1.accountAddress, }, }; const resultsSend = await verifier.sendSyncRequest( contractCommon, methodSend, argsSend, ); expect(resultsSend.status).toEqual(200); expect(resultsSend.data.status).toBeTrue(); // 2. Get new, updated value (call) const methodCall = { type: "web3EthContract", command: "call", function: "getName", params: [], }; const argsCall = {}; const resultCall = await verifier.sendSyncRequest( contractCommon, methodCall, argsCall, ); expect(resultCall.status).toEqual(200); expect(resultCall.data).toEqual(newName); }); test("encodeABI of transactions gives same results as direct web3 call", async () => { // Send encodeABI request to connector const methodEncode = { type: "web3EthContract", command: "encodeABI", function: "setName", params: ["QuorumCactusEncode"], }; const argsEncode = { args: { from: connectionProfile.quorum.member1.accountAddress, }, }; const resultsEncode = await verifier.sendSyncRequest( contractCommon, methodEncode, argsEncode, ); expect(resultsEncode.status).toEqual(200); expect(resultsEncode.data.length).toBeGreaterThan(5); // Compare encoded data with direct web3 call const web3Contract = new web3.eth.Contract( contractCommon.abi, contractCommon.address, ); const web3Encode = await web3Contract.methods .setName(...methodEncode.params) .encodeABI(argsEncode); expect(resultsEncode.data).toEqual(web3Encode); }); test("estimateGas of transactions gives same results as direct web3 call", async () => { // Send estimateGas request to connector const methodEstimateGas = { type: "web3EthContract", command: "estimateGas", function: "setName", params: ["QuorumCactusGas"], }; const argsEstimateGas = {}; const resultsEstimateGas = await verifier.sendSyncRequest( contractCommon, methodEstimateGas, argsEstimateGas, ); expect(resultsEstimateGas.status).toEqual(200); expect(resultsEstimateGas.data).toBeGreaterThan(0); // Compare gas estimate with direct web3 call const web3Contract = new web3.eth.Contract( contractCommon.abi, contractCommon.address, ); const web3Encode = await web3Contract.methods .setName(...methodEstimateGas.params) .estimateGas(argsEstimateGas); expect(resultsEstimateGas.data).toEqual(web3Encode); }); test("Sending transaction with sendAsyncRequest works", async () => { const newName = "QuorumCactusAsync"; // 1. Set new value with async call (send) // Will use signing key of the node we're connected to (member1) const methodSendAsync = { type: "web3EthContract", command: "send", function: "setName", params: [newName], }; const argsSendAsync = { args: { from: connectionProfile.quorum.member1.accountAddress, }, }; await verifier.sendAsyncRequest( contractCommon, methodSendAsync, argsSendAsync, ); // 2. Wait for transaction commit // We assume transaction will be included in the next block await monitorAndGetBlock(); // 3. Get new, updated value (call) const methodCall = { type: "web3EthContract", command: "call", function: "getName", params: [], }; const argsCall = {}; const resultsCall = await verifier.sendSyncRequest( contractCommon, methodCall, argsCall, ); expect(resultsCall.status).toEqual(200); expect(resultsCall.data).toEqual(newName); }); }); test("Verifier of QuorumApiClient supports web3Eth function", async () => { // web3Eth.getBalance const contract = {}; const method = { type: "web3Eth", command: "getBalance" }; const args = { args: [connectionProfile.quorum.member2.accountAddress] }; const results = await globalVerifierFactory .getVerifier(quorumValidatorId) .sendSyncRequest(contract, method, args); expect(results.status).toEqual(200); expect(results.data.length).toBeGreaterThan(0); }); test("Invalid web3Eth calls are rejected by QuorumApiClient", async () => { // Define correct input parameters const correctContract = {}; const correctMethod: Record<string, unknown> = { type: "web3Eth", command: "getBalance", }; const correctArgs: any = { args: [connectionProfile.quorum.member2.accountAddress], }; const verifier = globalVerifierFactory.getVerifier(quorumValidatorId); // Sanity check if correct parameters work const resultCorrect = await verifier.sendSyncRequest( correctContract, correctMethod, correctArgs, ); expect(resultCorrect.status).toEqual(200); // Failing: Empty web3.eth method const emptyMethod = lodash.clone(correctMethod); emptyMethod.command = ""; expect( verifier.sendSyncRequest(correctContract, emptyMethod, correctArgs), ).toReject(); // Failing: Wrong args format const numericArgsFormat = lodash.clone(correctArgs); numericArgsFormat.args = 42; expect( verifier.sendSyncRequest(correctContract, numericArgsFormat, correctArgs), ).toReject(); const objectArgsFormat = lodash.clone(correctArgs); objectArgsFormat.args = { arg1: 42 }; expect( verifier.sendSyncRequest(correctContract, objectArgsFormat, correctArgs), ).toReject(); }); test("QuorumApiClient web3Eth throws error on unknown method", async () => { const contract = {}; const method = { type: "web3Eth", command: "foo" }; const args = {}; const results = await globalVerifierFactory .getVerifier(quorumValidatorId) .sendSyncRequest(contract, method, args); expect(results).toBeTruthy(); expect(results.status).toEqual(504); expect(results.errorDetail).toBeTruthy(); }); function assertBlockHeader(header: Web3BlockHeader) { // Check if defined and with expected type // Ignore nullable / undefine-able fields expect(typeof header.parentHash).toEqual("string"); expect(typeof header.sha3Uncles).toEqual("string"); expect(typeof header.miner).toEqual("string"); expect(typeof header.stateRoot).toEqual("string"); expect(typeof header.logsBloom).toEqual("string"); expect(typeof header.number).toEqual("number"); expect(typeof header.gasLimit).toEqual("number"); expect(typeof header.gasUsed).toEqual("number"); expect(typeof header.extraData).toEqual("string"); expect(typeof header.nonce).toEqual("string"); expect(typeof header.hash).toEqual("string"); expect(typeof header.difficulty).toEqual("string"); } test("Monitor new blocks headers on Quorum", async () => { const ledgerEvent = await monitorAndGetBlock(); // assert well-formed output expect(ledgerEvent.id).toEqual(""); expect(ledgerEvent.verifierId).toEqual(quorumValidatorId); expect(ledgerEvent.data).toBeTruthy(); // blockData should not be present if called with empty options expect(ledgerEvent.data?.blockData).toBeUndefined(); expect(ledgerEvent.data?.blockHeader).toBeTruthy(); // check some fields assertBlockHeader(ledgerEvent.data?.blockHeader as Web3BlockHeader); }); test("Monitor new blocks data on Quorum", async () => { const ledgerEvent = await monitorAndGetBlock({ getBlockData: true }); // assert well-formed output expect(ledgerEvent.id).toEqual(""); expect(ledgerEvent.verifierId).toEqual(quorumValidatorId); expect(ledgerEvent.data).toBeTruthy(); // blockHeader should not be present if called with getBlockData option expect(ledgerEvent.data?.blockHeader).toBeFalsy(); expect(ledgerEvent.data?.blockData).toBeTruthy(); // check some fields assertBlockHeader(ledgerEvent.data?.blockData as Web3BlockHeader); expect(typeof ledgerEvent.data?.blockData?.size).toEqual("number"); expect(typeof ledgerEvent.data?.blockData?.totalDifficulty).toEqual( "string", ); expect(typeof ledgerEvent.data?.blockData?.uncles).toEqual("object"); expect(typeof ledgerEvent.data?.blockData?.transactions).toEqual("object"); }); });
the_stack
import * as d3 from "d3"; import { MetaData, MetaDataType } from "./meta_data"; import { Node } from "./node"; import { LinkPosition } from "./position_cache"; import { classify } from "./util"; export type Constructor = (data: LinkDataType, id: number, metaKeys: string[], linkWidth: (object) => number) => void; export type LinkDataType = { source: string, target: string, meta: Record<string, any>, // eslint-disable-line @typescript-eslint/no-explicit-any class: string, } export class LinkBase { private static groups: Record<string, any>; // eslint-disable-line @typescript-eslint/no-explicit-any protected readonly source: number | Node; protected readonly target: number | Node; private readonly meta: MetaDataType[]; private readonly sourceMeta: MetaDataType[]; private readonly targetMeta: MetaDataType[]; private readonly extraClass: string; private width: number; private readonly defaultMargin: number; private readonly labelXOffset: number; private readonly labelYOffset: number; private color: string; private _margin: number; constructor(data: LinkDataType, public id: number, metaKeys: string[], linkWidth: (object) => number) { this.source = Node.idByName(data.source); this.target = Node.idByName(data.target); this.meta = new MetaData(data.meta).get(metaKeys); this.sourceMeta = new MetaData(data.meta, "source").get(metaKeys); this.targetMeta = new MetaData(data.meta, "target").get(metaKeys); this.extraClass = data.class || ""; if (typeof linkWidth === "function") this.width = linkWidth(data.meta) || 3; else this.width = linkWidth || 3; this.defaultMargin = 15; this.labelXOffset = 20; this.labelYOffset = 1.5; // em this.color = "#7a4e4e"; this.register(id, this.source, this.target); } isNamedPath(): boolean { return this.meta.length > 0; } isReversePath(): boolean { return this.targetMeta.length > 0; } d(): string { return `M ${(this.source as Node).x} ${(this.source as Node).y} L ${(this.target as Node).x} ${(this.target as Node).y}`; } pathId(): string { return `path${this.id}`; } linkId(): string { return `link${this.id}`; } margin(): number { if (!this._margin) { const margin = window.getComputedStyle(document.getElementById(this.linkId())).margin; // NOTE: Assuming that window.getComputedStyle() returns some value link "10px" // or "0px" even when not defined in .css if (!margin || margin === "0px") { this._margin = this.defaultMargin; } else { this._margin = parseInt(margin); } } return this._margin; } // OPTIMIZE: Implement better right-alignment of the path, especially for multi tspans tspanXOffset(): number { if (this.isNamedPath()) return 0; else if (this.isReversePath()) return -this.labelXOffset; else return this.labelXOffset; } tspanYOffset(): string { if (this.isNamedPath()) return `${-this.labelYOffset + 0.7}em`; else return `${this.labelYOffset}em`; } rotate(bbox: SVGRect): string { if ((this.source as Node).x > (this.target as Node).x) return `rotate(180 ${bbox.x + bbox.width / 2} ${bbox.y + bbox.height / 2})`; else return "rotate(0)"; } split(): Record<string, any>[] { // eslint-disable-line @typescript-eslint/no-explicit-any if (!this.meta && !this.sourceMeta && !this.targetMeta) return [this]; const meta = []; ["meta", "sourceMeta", "targetMeta"].forEach((key, i, keys) => { if (this[key]) { const duped = Object.assign(Object.create(this), this); keys.filter((k) => k !== key).forEach((k) => duped[k] = []); meta.push(duped); } }); return meta; } hasMeta(): boolean { return this.meta.length > 0 || this.sourceMeta.length > 0 || this.targetMeta.length > 0; } class(): string { // eslint-disable-next-line max-len return `link ${classify((this.source as Node).name)} ${classify((this.target as Node).name)} ${classify((this.source as Node).name)}-${classify((this.target as Node).name)} ${this.extraClass}`; } // eslint-disable-next-line @typescript-eslint/no-explicit-any static render(linkLayer: d3.Selection<any>, labelLayer: d3.Selection<any>, links: Link[]): [d3.Selection<Link>, d3.Selection<Link>, d3.Selection<any>] { // Render lines const pathGroup = linkLayer.selectAll(".link") .data(links) .enter() .append("g") .attr("class", (d) => d.class()); const link = pathGroup.append("line") .attr("x1", (d) => (d.source as Node).x) .attr("y1", (d) => (d.source as Node).y) .attr("x2", (d) => (d.target as Node).x) .attr("y2", (d) => (d.target as Node).y) .attr("stroke", (d) => d.color) .attr("stroke-width", (d) => d.width) .attr("id", (d) => d.linkId()) .on("mouseover.line", (d) => textGroup.selectAll(`text.${d.pathId()}`).classed("hover", true)) .on("mouseout.line", (d) => textGroup.selectAll(`text.${d.pathId()}`).classed("hover", false)); const path = pathGroup.append("path") .attr("d", (d) => d.d()) .attr("id", (d) => d.pathId()); // Render texts const textGroup = labelLayer.selectAll(".link") .data(links) .enter() .append("g") .attr("class", (d) => d.class()); const text = textGroup.selectAll("text") .data((d: Link) => d.split().filter((l: Link) => l.hasMeta())) .enter() .append("text") .attr("class", (d: Link) => d.pathId()); // Bind text with pathId as class const textPath = text.append("textPath") .attr("xlink:href", (d: Link) => `#${d.pathId()}`); textPath.each(function(d: Link) { Link.appendTspans(this, d.meta); Link.appendTspans(this, d.sourceMeta); Link.appendTspans(this, d.targetMeta); if (d.isNamedPath()) Link.center(this); if (d.isReversePath()) Link.theOtherEnd(this); }); Link.zoom(); // Initialize return [link, path, text]; } private static theOtherEnd(container: SVGGElement): void { d3.select(container) .attr("class", "reverse") .attr("text-anchor", "end") .attr("startOffset", "100%"); } private static center(container: SVGGElement): void { d3.select(container) .attr("class", "center") .attr("text-anchor", "middle") .attr("startOffset", "50%"); } private static appendTspans(container: SVGGElement, meta: MetaDataType[]): void { meta.forEach((m) => { d3.select(container).append("tspan") .attr("x", (d: Link) => d.tspanXOffset()) .attr("dy", (d: Link) => d.tspanYOffset()) .attr("class", m.class) .text(m.value); }); } // eslint-disable-next-line @typescript-eslint/no-explicit-any static tick(link: d3.Selection<Link>, path: d3.Selection<Link>, label: d3.Selection<any>): void { link.attr("x1", (d) => (d.source as Node).x) .attr("y1", (d) => (d.source as Node).y) .attr("x2", (d) => (d.target as Node).x) .attr("y2", (d) => (d.target as Node).y); if (path) path.attr("d", (d) => d.d()); if (label) label.attr("transform", function(d: Link) { return d.rotate(this.getBBox()); }); } static zoom(scale?: number): void { let visibility = "hidden"; if (scale && scale > 1.5) visibility = "visible"; d3.selectAll(".link text") .style("visibility", visibility); } static setPosition(link: d3.Selection<Link>, position: LinkPosition[]): void { link.attr("x1", (d, i) => position[i].x1) .attr("y1", (d, i) => position[i].y1) .attr("x2", (d, i) => position[i].x2) .attr("y2", (d, i) => position[i].y2); } register(id: number, source: number, target: number): void { Link.groups = Link.groups || {}; const key = [source, target].sort().toString(); Link.groups[key] = Link.groups[key] || []; Link.groups[key].push(id); } private static shiftMultiplier(link: Link): number { const members = Link.groups[[(link.source as Node).id, (link.target as Node).id].sort().toString()] || []; return members.indexOf(link.id) - (members.length - 1) / 2; } // eslint-disable-next-line @typescript-eslint/no-explicit-any static shiftBundle(link: d3.Selection<Link>, path: d3.Selection<Link>, label: d3.Selection<any>): void { const transform = (d) => d.shiftBundle(Link.shiftMultiplier(d)); link.attr("transform", transform); path.attr("transform", transform); label.attr("transform", transform); } shiftBundle(multiplier: number): string { const gap = this.margin() * multiplier; const width = Math.abs((this.target as Node).x - (this.source as Node).x); const height = Math.abs((this.source as Node).y - (this.target as Node).y); const length = Math.sqrt(Math.pow(width, 2) + Math.pow(height, 2)); return `translate(${gap * height / length}, ${gap * width / length})`; } static reset(): void { Link.groups = null; } } const Eventable = (Base: typeof LinkBase) => { class EventableLink extends Base { private dispatch: d3.Dispatch; constructor(data: LinkDataType, id: number, metaKeys: string[], linkWidth: (object) => number) { super(data, id, metaKeys, linkWidth); this.dispatch = d3.dispatch("rendered"); } // eslint-disable-next-line @typescript-eslint/no-explicit-any static render(linkLayer, labelLayer, links): [d3.Selection<Link>, d3.Selection<Link>, d3.Selection<any>] { const [link, path, text] = super.render(linkLayer, labelLayer, links); link.each(function(this: SVGLineElement, d: Link & EventableLink) { d.dispatch.rendered(this); }); return [link, path, text]; } // eslint-disable-next-line @typescript-eslint/no-explicit-any on(name: string, callback: (element: SVGGElement) => any): void { this.dispatch.on(name, callback); } } return EventableLink; }; const Pluggable = (Base: typeof LinkBase) => { class Link extends Base { private static pluginConstructors: Constructor[] = []; constructor(data: LinkDataType, id: number, metaKeys: string[], linkWidth: (object) => number) { super(data, id, metaKeys, linkWidth); for (const constructor of Link.pluginConstructors) { // Call Pluggable at last as constructor may call methods defined in other classes constructor.bind(this)(data, id, metaKeys, linkWidth); } } static registerConstructor(func: Constructor): void { Link.pluginConstructors.push(func); } } return Link; }; class EventableLink extends Eventable(LinkBase) { } // Call Pluggable at last as constructor may call methods defined in other classes class Link extends Pluggable(EventableLink) { } export { Link };
the_stack
import { createEventDispatcher } from 'svelte' import type { Readable, Writable } from 'svelte/store' import type { AudioListener, Camera, Clock, Color, ColorRepresentation, Euler, Event, Intersection, Loader, Material, Matrix4, Mesh, Object3D, Raycaster, Scene, Vector2, Vector3, WebGLRenderer } from 'three' import type { EffectComposer, Pass } from 'three/examples/jsm/postprocessing/EffectComposer' export type ThreltePointerEventMap = { click: ThreltePointerEvent contextmenu: ThreltePointerEvent pointerup: ThreltePointerEvent pointerdown: ThreltePointerEvent pointerenter: ThreltePointerEvent pointerleave: ThreltePointerEvent pointermove: ThreltePointerEvent } const createEventDispatcherType = () => createEventDispatcher<ThreltePointerEventMap>() export type InteractiveObjectEventDispatcher = ReturnType<typeof createEventDispatcherType> export type ThrelteInstance = { matrix: Matrix4 color: null | Color pointerEventDispatcher?: InteractiveObjectEventDispatcher } export type ThrelteRootContext = { setCamera: (camera: Camera) => void linear: Writable<boolean> flat: Writable<boolean> dpr: Writable<number> addPass: (pass: Pass) => void removePass: (pass: Pass) => void addRaycastableObject: (obj: Object3D) => void removeRaycastableObject: (obj: Object3D) => void addInteractiveObject: (obj: Object3D) => void removeInteractiveObject: (obj: Object3D) => void interactiveObjects: Set<Object3D> raycastableObjects: Set<Object3D> raycaster: Raycaster lastIntersection: Intersection<Object3D<Event>> | null } export type ThrelteContext = { size: Readable<Size> pointer: Writable<Vector2> pointerOverCanvas: Writable<boolean> clock: Clock camera: Writable<Camera> scene: Scene renderer?: WebGLRenderer composer?: EffectComposer invalidate: (debugFrameloopMessage?: string) => void } export type ThrelteRenderContext = { frameloop: 'always' | 'demand' debugFrameloop: boolean pointerInvalidated: boolean frameInvalidated: boolean frame: number invalidations: Record<string, number> frameHandlers: Set<ThrelteFrameHandler> } export type ThrelteAudioContext = { audioListeners: Map<string, AudioListener> getAudioListener: (id?: string) => AudioListener | undefined addAudioListener: (listener: AudioListener, id?: string) => void removeAudioListener: (id?: string) => void } export type ThrelteUseFrame = { stop: () => void start: () => void started: Readable<boolean> } export type ThrelteUseFrameOptions = { autostart?: boolean order?: number /** * Optionally provide a message to use with the property * `debugFrameloop` of the `<Canvas>` component. */ debugFrameloopMessage?: string } export type ThrelteFrameHandler = { fn: (ctx: ThrelteContext, delta: number) => void order?: number debugFrameloopMessage?: string } export type ThrelteParentContext = Writable<Object3D> export type Position = | Vector3 | { x?: number y?: number z?: number } export type Scale = | Vector3 | number | { x?: number y?: number z?: number } export type Rotation = | Euler | { x?: number y?: number z?: number order?: Euler['order'] } export type LookAt = Position | Object3D export type ThrelteLayers = | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | ( | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 )[] | 'all' | 'none' export type ThrelteLayersContext = Writable<ThrelteLayers> | undefined export interface Text extends Mesh { /** * The string of text to be rendered. */ text: string /** * Defines the horizontal position in the text block that should line up with the local origin. * Can be specified as a numeric x position in local units, a string percentage of the total * text block width e.g. `'25%'`, or one of the following keyword strings: 'left', 'center', * or 'right'. */ anchorX: number | 'left' | 'center' | 'right' | string /** * Defines the vertical position in the text block that should line up with the local origin. * Can be specified as a numeric y position in local units (note: down is negative y), a string * percentage of the total text block height e.g. `'25%'`, or one of the following keyword strings: * 'top', 'top-baseline', 'middle', 'bottom-baseline', or 'bottom'. */ anchorY: number | 'top' | 'top-baseline' | 'middle' | 'bottom-baseline' | 'bottom' | string /** * Defines a cylindrical radius along which the text's plane will be curved. Positive numbers put * the cylinder's centerline (oriented vertically) that distance in front of the text, for a concave * curvature, while negative numbers put it behind the text for a convex curvature. The centerline * will be aligned with the text's local origin; you can use `anchorX` to offset it. * * Since each glyph is by default rendered with a simple quad, each glyph remains a flat plane * internally. You can use `glyphGeometryDetail` to add more vertices for curvature inside glyphs. */ curveRadius: number /** * Sets the base direction for the text. The default value of "auto" will choose a direction based * on the text's content according to the bidi spec. A value of "ltr" or "rtl" will force the direction. */ /** * URL of a custom font to be used. Font files can be any of the formats supported by * OpenType (see https://github.com/opentypejs/opentype.js). * Defaults to the Roboto font loaded from Google Fonts. */ font: null | string /** * MIDDLE */ /** * The size at which to render the font in local units; corresponds to the em-box height * of the chosen `font`. */ fontSize: number /** * Sets a uniform adjustment to spacing between letters after kerning is applied. Positive * numbers increase spacing and negative numbers decrease it. */ letterSpacing: number /** * Sets the height of each line of text, as a multiple of the `fontSize`. Defaults to 'normal' * which chooses a debugFrameloopMessageable height based on the chosen font's ascender/descender metrics. */ lineHeight: number | string /** * The maximum width of the text block, above which text may start wrapping according to the * `whiteSpace` and `overflowWrap` properties. */ maxWidth: number /** * Defines how text wraps if the `whiteSpace` property is `normal`. Can be either `'normal'` * to break at whitespace characters, or `'break-word'` to allow breaking within words. * Defaults to `'normal'`. */ overflowWrap: 'normal' | 'break-word' | 'normal' /** * The horizontal alignment of each line of text within the overall text bounding box. */ textAlign: 'left' | 'right' | 'center' | 'justify' /** * Indentation for the first character of a line; see CSS `text-indent`. */ textIndent: number /** * Defines whether text should wrap when a line reaches the `maxWidth`. Can * be either `'normal'` (the default), to allow wrapping according to the `overflowWrap` property, * or `'nowrap'` to prevent wrapping. Note that `'normal'` here honors newline characters to * manually break lines, making it behave more like `'pre-wrap'` does in CSS. */ whiteSpace: 'normal' | 'nowrap' | 'pre-wrap' /** * Defines a _base_ material to be used when rendering the text. This material will be * automatically replaced with a material derived from it, that adds shader code to * decrease the alpha for each fragment (pixel) outside the text glyphs, with antialiasing. * By default it will derive from a simple white MeshBasicMaterial, but you can use any * of the other mesh materials to gain other features like lighting, texture maps, etc. * * Also see the `color` shortcut property. */ material: Material | Material[] /** * This is a shortcut for setting the `color` of the text's material. You can use this * if you don't want to specify a whole custom `material`. Also, if you do use a custom * `material`, this color will only be used for this particuar Text instance, even if * that same material instance is shared across multiple Text objects. */ color: string | number | Color | null /** * This is a shortcut for setting the material's `polygonOffset` and related properties, * which can be useful in preventing z-fighting when this text is laid on top of another * plane in the scene. Positive numbers are further from the camera, negatives closer. */ depthOffset: number /** * If specified, defines a `[minX, minY, maxX, maxY]` of a rectangle outside of which all * pixels will be discarded. This can be used for example to clip overflowing text when * `whiteSpace='nowrap'`. */ clipRect: [number, number, number, number] | null /** * Controls number of vertical/horizontal segments that make up each glyph's rectangular * plane. Defaults to 1. This can be increased to provide more geometrical detail for custom * vertex shader effects, for example. */ glyphGeometryDetail: number /** * The size of each glyph's SDF (signed distance field) used for rendering. This must be a * power-of-two number. Defaults to 64 which is generally a good balance of size and quality * for most fonts. Larger sizes can improve the quality of glyph rendering by increasing * the sharpness of corners and preventing loss of very thin lines, at the expense of * increased memory footprint and longer SDF generation time. */ sdfGlyphSize: number | null /** * WARNING: This API is experimental and may change. * The width of an outline/halo to be drawn around each text glyph using the `outlineColor` and `outlineOpacity`. * Can be specified as either an absolute number in local units, or as a percentage string e.g. * `"12%"` which is treated as a percentage of the `fontSize`. Defaults to `0`, which means * no outline will be drawn unless an `outlineOffsetX/Y` or `outlineBlur` is set. */ outlineWidth: number | string /** * WARNING: This API is experimental and may change. * The color of the text outline, if `outlineWidth`/`outlineBlur`/`outlineOffsetX/Y` are set. * Defaults to black. */ outlineColor: ColorRepresentation /** * WARNING: This API is experimental and may change. * The opacity of the outline, if `outlineWidth`/`outlineBlur`/`outlineOffsetX/Y` are set. * Defaults to `1`. */ outlineOpacity: number /** * WARNING: This API is experimental and may change. * A blur radius applied to the outer edge of the text's outline. If the `outlineWidth` is * zero, the blur will be applied at the glyph edge, like CSS's `text-shadow` blur radius. * Can be specified as either an absolute number in local units, or as a percentage string e.g. * `"12%"` which is treated as a percentage of the `fontSize`. Defaults to `0`. */ outlineBlur: number | string /** * WARNING: This API is experimental and may change. * A horizontal offset for the text outline. * Can be specified as either an absolute number in local units, or as a percentage string e.g. `"12%"` * which is treated as a percentage of the `fontSize`. Defaults to `0`. */ outlineOffsetX: number | string /** * WARNING: This API is experimental and may change. * A vertical offset for the text outline. * Can be specified as either an absolute number in local units, or as a percentage string e.g. `"12%"` * which is treated as a percentage of the `fontSize`. Defaults to `0`. */ outlineOffsetY: number | string /** * WARNING: This API is experimental and may change. * The width of an inner stroke drawn inside each text glyph using the `strokeColor` and `strokeOpacity`. * Can be specified as either an absolute number in local units, or as a percentage string e.g. `"12%"` * which is treated as a percentage of the `fontSize`. Defaults to `0`. */ strokeWidth: number | string /** * WARNING: This API is experimental and may change. * The color of the text stroke, if `strokeWidth` is greater than zero. Defaults to gray. */ strokeColor: ColorRepresentation | Color /** * WARNING: This API is experimental and may change. * The opacity of the stroke, if `strokeWidth` is greater than zero. Defaults to `1`. */ strokeOpacity: number /** * WARNING: This API is experimental and may change. * The opacity of the glyph's fill from 0 to 1. This behaves like the material's `opacity` but allows * giving the fill a different opacity than the `strokeOpacity`. A fillOpacity of `0` makes the * interior of the glyph invisible, leaving just the `strokeWidth`. Defaults to `1`. */ fillOpacity: number sync: (cb: () => void) => void dispose: () => void } export type ThrelteUseLoader = <T extends typeof Loader>( loader: T, memoizeFn: () => InstanceType<T> ) => InstanceType<T> export type Size = { width: number height: number } export type ThreltePointerEvent = Intersection<Object3D<Event>> & { event?: MouseEvent | PointerEvent }
the_stack
* Migrations */ /* Upgrade event handler */ const handleUpgrade = () => { getAttrs(["character_sheet"], v => { if (!v.character_sheet || v.character_sheet.indexOf(sheetName) !== 0) upgradeFrom162(); else if (v.character_sheet.slice(32) !== sheetVersion) upgradeSheet(v.character_sheet.slice(32), true); }); }; /* Versioned upgrade */ const upgradeSheet = (version: string, firstTime = false, finalTime = false) => { // Any version upgrade code should go here const performUpgrade = (version: string) => { const [major, minor, patch] = version.split(".").map(x => parseInt(x)); console.log(`Upgrading from version ${version}.`); /** v2.1.0 * convert old format for burst settings for weapons and attacks * set ammo and shock checkboxes to reasonable values * convert old format for gear readied/stowed **/ if (major === 2 && minor < 1) { const upgradeFunction = _.after(4, () => { // recalculate these things just to be sure, in case the v1.6.2 update // missed them. buildShipWeaponsMenu(); buildAttacksMenu(); buildMagicMenu(); generateWeaponDisplay(); attributes.forEach(calculateMod); upgradeSheet("2.1.0"); }); getSectionIDs("repeating_weapons", idArray => { const sourceAttrs = [ ...idArray.map(id => `repeating_weapons_${id}_weapon_burst`), ...idArray.map(id => `repeating_weapons_${id}_weapon_shock_damage`), ...idArray.map(id => `repeating_weapons_${id}_weapon_ammo`) ]; getAttrs(sourceAttrs, v => { const setting = idArray.reduce((m: {[k: string]: string}, id) => { if (v[`repeating_weapons_${id}_weapon_burst`] === "0") m[`repeating_weapons_${id}_weapon_burst`] = ""; else if (v[`repeating_weapons_${id}_weapon_burst`] === "2") m[`repeating_weapons_${id}_weapon_burst`] = "+ 2[Burst]"; if (v[`repeating_weapons_${id}_weapon_shock_damage`] !== "0") m[`repeating_weapons_${id}_weapon_shock`] = "{{shock=[[@{weapon_shock_damage} + @{weapon_attribute_mod}[Attribute] + @{weapon_skill_to_damage}[Skill]]] ^{SHOCK_DAMAGE_AGAINST_AC_LEQ} @{weapon_shock_ac}!}}"; if (v[`repeating_weapons_${id}_weapon_ammo`] && v[`repeating_weapons_${id}_weapon_ammo`] !== "0") { m[`repeating_weapons_${id}_weapon_use_ammo`] = "{{ammo=[[0@{weapon_ammo} - (1 @{weapon_burst})]] / @{weapon_ammo|max}}}"; } return m; }, {}); setAttrs(setting, {}, () => upgradeFunction()); }); }); getSectionIDs("repeating_ship-weapons", idArray => { getAttrs(idArray.map(id => `repeating_ship-weapons_${id}_weapon_ammo_max`), v => { const setting = idArray.reduce((m: {[key: string]: string}, id) => { if (v[`repeating_ship-weapons_${id}_weapon_ammo_max`] && v[`repeating_ship-weapons_${id}_weapon_ammo_max`] !== "0") { m[`repeating_ship-weapons_${id}_weapon_use_ammo`] = "{{ammo=[[@{weapon_ammo} - 1]] / @{weapon_ammo_max}}}"; } return m; }, {}); setAttrs(setting, {}, () =>upgradeFunction()); }); }); getSectionIDs("repeating_npc-attacks", idArray => { getAttrs(idArray.map(id => `repeating_npc-attacks_${id}_attack_burst`), v => { const setting = idArray.reduce((m: {[key: string]: string}, id) => { if (v[`repeating_npc-attacks_${id}_attack_burst`] === "0") m[`repeating_npc-attacks_${id}_attack_burst`] = ""; else if (v[`repeating_npc-attacks_${id}_attack_burst`] === "2") m[`repeating_npc-attacks_${id}_attack_burst`] = "+ 2[Burst]"; return m; }, {}); setAttrs(setting, {}, () => upgradeFunction()); }); }); getSectionIDs("repeating_gear", idArray => { getAttrs(idArray.map(id => `repeating_gear_${id}_gear_status`), v => { const setting = idArray.reduce((m: {[key: string]: string}, id) => { m[`repeating_gear_${id}_gear_status`] = (v[`repeating_gear_${id}_gear_status`] || "").toUpperCase(); return m; }, {}); mySetAttrs(setting, v, null, () => upgradeFunction()); }); }); } /** v2.2.0 * convert single armor line to repeating armor * Change @{attribute_query_none} to @{attribute_query} **/ else if (major === 2 && minor < 2) { const upgradeFunction = _.after(2, () => { calculateStrDexMod(); calculateEffort(); buildPsionicsMenu(); buildSkillMenu(); upgradeSheet("2.2.0"); }); getAttrs(["armor_name", "armor_ac", "armor_encumbrance", "armor_type", "setting_ship_tab_name"], v => { if (v.armor_ac) { const data = { armor_active: "1", armor_ac: v.armor_ac, armor_encumbrance: v.armor_encumbrance || "0", armor_name: v.armor_name || "", armor_status: "READIED", armor_type: (v.armor_type || "").toUpperCase() }; fillRepeatingSectionFromData("armor", data, () => upgradeFunction()); } else upgradeFunction(); if (v.setting_ship_tab_name === "MECH") setAttrs({ ship_vehicle_type: "MECH" }); }); getSectionIDs("repeating_skills", skillIDs => getSectionIDs("repeating_magic-skills", magicIDs => { getSectionIDs("repeating_psychic-skills", psychicIDs => { const sourceAttrs = [ ...skillIDs.map(id => `repeating_skills_${id}_skill_query`), ...magicIDs.map(id => `repeating_magic-skills_${id}_skill_query`), ...psychicIDs.map(id => `repeating_psychic-skills_${id}_skill_query`), ...skills.revised.map(skill => `skill_${skill}_query`), ...skills.first.map(skill => `skill_${skill}_query`), ...skills.psionic.map(skill => `skill_${skill}_query`), "skill_magic_query", "skill_magic2_query" ]; getAttrs(sourceAttrs, v => { const setting = sourceAttrs.reduce((m: {[key: string]: string}, attrName) => { if (v[attrName] === "@{attribute_query_none}") m[attrName] = "@{attribute_query}"; return m; }, {}); mySetAttrs(setting, v, null, () => upgradeFunction()); }); }); })); } /** v2.3.1 * Regenerate drone and weapon ABs **/ else if (major === 2 && (minor < 3 || (minor === 3 && patch === 0))) { const upgradeFunction = _.after(1, () => { upgradeSheet("2.3.1"); }); generateWeaponDisplay(); getSectionIDs("repeating_drones", idArray => { calculateDroneAttack(idArray.map(id => `repeating_drones_${id}`), () => upgradeFunction()); }); } /** v2.4.3 * Regenerate Cyberware strain because it was bugged **/ else if (major === 2 && (minor < 4 || (minor === 4 && patch < 3))) { calculateCyberwareStrain(); upgradeSheet("2.4.3"); } /** v2.4.7 * Move attr to attr_base, and recalculate attr **/ else if (major === 2 && minor === 4 && patch < 7) { attributes.forEach(attr => { getAttrs([attr, `${attr}_base`], v => { mySetAttrs({[`${attr}_base`]: parseInt(v[`${attr}`]) || 10}, v, null, () => { calculateAttr(attr); }); }); }); upgradeSheet("2.4.7"); } /** v2.4.11 * Recalculate drone attack to remove incorrect double Int bonus */ else if (major === 2 && minor === 4 && patch < 12) { getSectionIDs("repeating_drones", idArray => { calculateDroneAttack(idArray.map(id => `repeating_drones_${id}`)); }) upgradeSheet("2.4.12"); } /** v2.5.3 * Rebuild magic "Quick Menu" to fix incorrect skills being listed. */ else if (major === 2 && minor < 5) { buildMagicMenu(); upgradeSheet("2.5.3") } /** v2.6.0 * Move extra strain into strain_extra * Move NPC AC and HD into npc_ac and npc_hd */ else if (major === 2 && minor < 6) { getAttrs(["strain", "strain_permanent", "strain_extra", "AC", "HP", "HP_max"], v => { const strain_extra = (parseInt(v.strain) || 0) - (parseInt(v.strain_permanent) || 0) mySetAttrs({ strain_extra: strain_extra, npc_ac: v.AC, npc_hd: v.HD, npc_hd_max: v.HP_max }, v, null, () => { upgradeSheet("2.6.0") }) }) } /** v2.6.3 * Regenerate weapon displays (to fix incorrect bonus displayed with Sunblade) */ else if (major === 2 && minor < 6) { generateWeaponDisplay() upgradeSheet("2.6.3") } /** Final upgrade clause, always leave this around */ else upgradeSheet(sheetVersion, false, true); }; if (firstTime) performUpgrade(version); else setAttrs({ character_sheet: `${sheetName} v${version}`, }, {}, () => { if (!finalTime) performUpgrade(version); }); }; /* Main upgrade from pre-worker versioning */ const upgradeFrom162 = () => { console.log("Upgrading from versionless sheet (assumed to be fresh or v1.6.2)."); const upgradeFunction = _.after(13, () => { upgradeSheet("2.0.1"); }); // Legacy migration getAttrs([1, 2, 3, 4, 5, 6, 7, 8].map(i => `psionics_mastered_${i}`), v => { const setting: {[key: string]: string} = {}; for (let i = 1; i < 9; i++) { const technique = v[`psionics_mastered_${i}`]; if (technique) { const newRowId = generateRowID(); setting[`repeating_techniques_${newRowId}_technique_name`] = technique; } } setAttrs(setting); }); getAttrs([1, 2, 3, 4].map(i => `cyberware_${i}`), v => { const setting: {[key: string]: string} = {}; for (let i = 1; i < 5; i++) { const cyberware = v[`cyberware_${i}`]; if (cyberware) { const newRowId = generateRowID(); setting[`repeating_cyberware_${newRowId}_cyberware_name`] = cyberware; } } setAttrs(setting); }); getAttrs(["languages"], v => { if (v.languages) { const setting: {[key: string]: string} = {}; v.languages.split(/\r?\n/).filter(l => !!l).forEach(language => { const newRowId = generateRowID(); setting[`repeating_languages_${newRowId}_language`] = language; }); setAttrs(setting); } }); const attrConversionData = { armor_enc: "armor_encumbrance", cha: "charisma", cha_misc: "charisma_bonus", con: "constitution", con_misc: "constitution_bonus", dex: "dexterity", dex_misc: "dexterity_bonus", gender: "species_gender", hd: "npc_hd", int: "intelligence", int_misc: "intelligence_bonus", morale: "npc_morale", move: "npc_move", name: "npc_name", notes: "npc_notes", npc_ac: "AC", saves: "npc_saves", ship_hp_min: "ship_hp", ship_current_crew: "ship_crew", ship_last_maintenance_cost: "ship_last_maintenance", skills: "npc_skills", skill_biopsion: "skill_biopsionics", skill_metapsion: "skill_metapsionics", skill_points: "unspent_skill_points", str: "strength", strain_perm: "strain_permanent", str_misc: "strength_bonus", wis: "wisdom", wis_misc: "wisdom_bonus", }; const attrsToConvertFromOnTo1 = [ "homebrew_luck_save", "homebrew_extra_skills", "setting_heroic_enable", ]; const customConversionAttrs = [ "damage", "homebrew_psionics_disable", "npc_attacks", "npc_attack_bonus", "setting_space_magic_enable", "ship_other_notes", "ship_free_hardpoints", "ship_free_mass", "ship_free_power", "skill_culture_alien_type", "skill_culture_one_value", "skill_culture_two_value", "skill_culture_three_value", "profession_type", "tab", ...[1, 2, 3, 4].map(n => `homebrew_custom_counter_${n}_name`), ...[1, 2, 3, 4].map(n => `homebrew_custom_counter_${n}_counter`), ]; // convert non-repeating attributes getAttrs([ ...Object.keys(attrConversionData), ...Object.values(attrConversionData), ...attrsToConvertFromOnTo1, ...customConversionAttrs, ], v => { const setting = Object.entries(attrConversionData).reduce((m: {[key: string]: string}, [oldName, newName]) => { if (v[oldName] && v[oldName] !== "" && `${v[newName]}` !== `${v[oldName]}`) m[newName] = v[oldName]; return m; }, {}); attrsToConvertFromOnTo1.forEach(name => { if (v[name] === "on") setting[name] = "1"; }); // convert skill name format ["one", "two", "three"].forEach(num => { if (v[`skill_culture_${num}_value`]) setting[`skill_culture_${num}_name`] = `Culture/${v[`skill_culture_${num}_value`]}`; }); if (v.profession_type) setting.skill_profession_name = `Profession/${v.profession_type}`; if (v.skill_culture_alien_type) setting.skill_culture_alien_name = `Culture/Alien/${v.skill_culture_alien_type}`; // Write legacy ship data if (v.ship_free_hardpoints || v.ship_free_mass || v.ship_free_power) { setting.ship_other_notes = `\nLegacy attributes${ v.ship_free_power ? `\nFree Power: ${v.ship_free_power}` : ""}${ v.ship_free_mass ? `\nFree Mass: ${v.ship_free_mass}` : ""}${ v.ship_free_hardpoints ? `\nFree Hardpoints: ${v.ship_free_hardpoints}` : ""} ${v.ship_other_notes || ""}`; } // convert homebrew custom counter stuff const customCounterData = [1, 2, 3, 4].reduce((m, num) => { if (v[`homebrew_custom_counter_${num}_name`]) m.push({ resource_name: v[`homebrew_custom_counter_${num}_name`], resource_count: v[`homebrew_custom_counter_${num}_counter`] || 0 }); return m; }, []); fillRepeatingSectionFromData("resources", customCounterData, () => upgradeFunction()); // Tab if (`${v.tab}` === "1" || `${v.tab}` === "4") setting.tab = "character"; if (`${v.tab}` === "2") setting.tab = "ship"; if (`${v.tab}` === "3") { setting.tab = "character"; setting.npc = "1"; } // NPC attack if (v.damage) { const newAttack = { attack_damage: v.damage, attack_name: translate("ATTACK"), attack_number: v.npc_attacks || "1" }; fillRepeatingSectionFromData("npc-attacks", newAttack, () => upgradeFunction()); } else upgradeFunction(); // Psionics/Space Magic toggle conversion if (v.setting_space_magic_enable === "on" && v.homebrew_psionics_disable !== "on") setting.setting_super_type = "both"; else if (v.setting_space_magic_enable === "on") setting.setting_super_type = "magic"; else if (v.homebrew_psionics_disable === "on") setting.setting_super_type = "neither"; setAttrs(setting, {}, () => upgradeFunction()); }); // convert weapon attributes, and extract ship weapons getSectionIDs("repeating_weapons", idArray => { const oldAttrs = [ ...idArray.map(id => `repeating_weapons_${id}_attribute_mod`), ...idArray.map(id => `repeating_weapons_${id}_add_skill`), ...idArray.map(id => `repeating_weapons_${id}_weapon_shock`), ...idArray.map(id => `repeating_weapons_${id}_ship_weapon_name`), ...idArray.map(id => `repeating_weapons_${id}_ship_weapon_power`), ...idArray.map(id => `repeating_weapons_${id}_ship_weapon_ab`), ...idArray.map(id => `repeating_weapons_${id}_ship_weapon_damage`), ...idArray.map(id => `repeating_weapons_${id}_ship_weapon_ammo`), ...idArray.map(id => `repeating_weapons_${id}_ship_weapon_special_effects`), ...idArray.map(id => `repeating_weapons_${id}_ship_weapon_broken`), ]; getAttrs(oldAttrs, v => { const setting = idArray.reduce((m: {[key: string]: string}, id) => { if (v[`repeating_weapons_${id}_add_skill`] === "@{weapon_skill_bonus}") m[`repeating_weapons_${id}_weapon_skill_to_damage`] = "@{weapon_skill_bonus}"; if (v[`repeating_weapons_${id}_weapon_shock`]) m[`repeating_weapons_${id}_weapon_shock_damage`] = v[`repeating_weapons_${id}_weapon_shock`]; const modValue = v[`repeating_weapons_${id}_attribute_mod`]; switch (modValue) { case "@{dex_bonus}": m[`repeating_weapons_${id}_weapon_attribute_mod`] = "@{dexterity_mod}"; break; case "@{con_bonus}": m[`repeating_weapons_${id}_weapon_attribute_mod`] = "@{constitution_mod}"; break; case "@{int_bonus}": m[`repeating_weapons_${id}_weapon_attribute_mod`] = "@{intelligence_mod}"; break; case "@{wis_bonus}": m[`repeating_weapons_${id}_weapon_attribute_mod`] = "@{wisdom_mod}"; break; case "@{cha_bonus}": m[`repeating_weapons_${id}_weapon_attribute_mod`] = "@{charisma_mod}"; break; default: m[`repeating_weapons_${id}_weapon_attribute_mod`] = "@{strength_mod}"; } return m; }, {}); const data = idArray.filter(id => v[`repeating_weapons_${id}_ship_weapon_name`]) .map(id => { const row: {[key: string]: string} = {}; row.weapon_name = v[`repeating_weapons_${id}_ship_weapon_name`]; if (v[`repeating_weapons_${id}_ship_weapon_power`]) row.weapon_power = v[`repeating_weapons_${id}_ship_weapon_power`]; if (v[`repeating_weapons_${id}_ship_weapon_ab`]) row.weapon_attack_bonus = v[`repeating_weapons_${id}_ship_weapon_ab`]; if (v[`repeating_weapons_${id}_ship_weapon_damage`]) row.weapon_damage = v[`repeating_weapons_${id}_ship_weapon_damage`]; if (v[`repeating_weapons_${id}_ship_weapon_ammo`]) row.weapon_ammo = v[`repeating_weapons_${id}_ship_weapon_ammo`]; if (v[`repeating_weapons_${id}_ship_weapon_special_effects`]) row.weapon_qualities = v[`repeating_weapons_${id}_ship_weapon_special_effects`]; if (v[`repeating_weapons_${id}_ship_weapon_broken`] === "on") row.weapon_broken = "1"; return row; }); fillRepeatingSectionFromData("ship-weapons", data, () => upgradeFunction()); setAttrs(setting, null, () => upgradeFunction()); }); }); // convert skills getSectionIDs("repeating_skills", idArray => { const oldAttrs = [ ...idArray.map(id => `repeating_skills_${id}_custom_skill_1_name`), ...idArray.map(id => `repeating_skills_${id}_custom_skill_2_name`), ...idArray.map(id => `repeating_skills_${id}_custom_skill_1_level`), ...idArray.map(id => `repeating_skills_${id}_custom_skill_2_level`), ...idArray.map(id => `repeating_skills_${id}_custom_skill_1_specialist`), ...idArray.map(id => `repeating_skills_${id}_custom_skill_2_specialist`), ]; getAttrs(oldAttrs, v => { const data = idArray.reduce((m, id) => { [1, 2].forEach(i => { if (v[`repeating_skills_${id}_custom_skill_${i}_name`]) { const skillLevel = (typeof v[`repeating_skills_${id}_custom_skill_${i}_level`] === "undefined") ? "-1" : v[`repeating_skills_${id}_custom_skill_${i}_level`]; m.push({ skill_name: v[`repeating_skills_${id}_custom_skill_${i}_name`], skill: skillLevel, skill_specialist: v[`repeating_skills_${id}_custom_skill_${i}_specialist`] || "2d6" }); } }); return m; }, []); idArray.forEach(id => removeRepeatingRow(`repeating_skills_${id}`)); fillRepeatingSectionFromData("skills", data, () => upgradeFunction()); }); }); // convert techniques getSectionIDs("repeating_technique", idArray => { const oldAttrs = [ ...idArray.map(id => `repeating_technique_${id}_technique`), ...idArray.map(id => `repeating_technique_${id}_technique_description`), ]; getAttrs(oldAttrs, v => { const data = idArray.reduce((m, id) => { if (v[`repeating_technique_${id}_technique`]) m.push({ technique_name: v[`repeating_technique_${id}_technique`], technique_description: v[`repeating_technique_${id}_technique_description`] || "" }); return m; }, []); fillRepeatingSectionFromData("techniques", data, () => upgradeFunction()); idArray.forEach(id => removeRepeatingRow(`repeating_technique_${id}`)); }); }); // convert cyberware name getSectionIDs("repeating_cyberware", idArray => { getAttrs(idArray.map(id => `repeating_cyberware_${id}_cyberware`), v => { const setting = idArray.reduce((m: {[key: string]: string}, id) => { if (v[`repeating_cyberware_${id}_cyberware`]) m[`repeating_cyberware_${id}_cyberware_name`] = v[`repeating_cyberware_${id}_cyberware`]; return m; }, {}); setAttrs(setting, null, () => upgradeFunction()); }); }); // convert goals getSectionIDs("repeating_goals", idArray => { const oldAttrs = [ ...idArray.map(id => `repeating_goals_${id}_misc_goal`), ...idArray.map(id => `repeating_goals_${id}_misc_goal_xp`), ]; getAttrs(oldAttrs, v => { const setting = idArray.reduce((m: {[key: string]: string}, id) => { if (v[`repeating_goals_${id}_misc_goal`]) m[`repeating_goals_${id}_goal_name`] = v[`repeating_goals_${id}_misc_goal`]; if (v[`repeating_goals_${id}_misc_goal_xp`]) m[`repeating_goals_${id}_goal_xp`] = v[`repeating_goals_${id}_misc_goal_xp`]; return m; }, {}); setAttrs(setting, null, () => upgradeFunction()); }); }); // convert languages getSectionIDs("repeating_languages", idArray => { getAttrs(idArray.map(id => `repeating_languages_${id}_languages`), v => { const setting = idArray.reduce((m: {[key: string]: string}, id) => { if (v[`repeating_languages_${id}_languages`]) m[`repeating_languages_${id}_language`] = v[`repeating_languages_${id}_languages`]; return m; }, {}); setAttrs(setting, null, () => upgradeFunction()); }); }); // convert gear status getSectionIDs("repeating_gear", idArray => { getAttrs(idArray.map(id => `repeating_gear_${id}_gear_readied`), v => { const setting = idArray.reduce((m: {[key: string]: string}, id) => { if (`${v[`repeating_gear_${id}_gear_readied`]}` === "1") m[`repeating_gear_${id}_gear_status`] = "readied"; else if (`${v[`repeating_gear_${id}_gear_readied`]}` === "2") m[`repeating_gear_${id}_gear_status`] = "stowed"; return m; }, {}); setAttrs(setting, null, () => upgradeFunction()); }); }); // convert defenses getSectionIDs("repeating_defenses", idArray => { const oldAttrs = [ ...idArray.map(id => `repeating_defenses_${id}_ship_defense_name`), ...idArray.map(id => `repeating_defenses_${id}_ship_defense_special_effects`), ...idArray.map(id => `repeating_defenses_${id}_ship_defense_broken`), ]; getAttrs(oldAttrs, v => { const data = idArray.map(id => { const row: {[key: string]: string} = {}; if (v[`repeating_defenses_${id}_ship_defense_name`]) row.defense_name = v[`repeating_defenses_${id}_ship_defense_name`]; if (v[`repeating_defenses_${id}_ship_defense_special_effects`]) row.defense_effect = v[`repeating_defenses_${id}_ship_defense_special_effects`]; if (v[`repeating_defenses_${id}_ship_defense_broken`]) row.defense_broken = "1"; return row; }); fillRepeatingSectionFromData("ship-defenses", data, () => upgradeFunction()); }); }); // convert fittings getSectionIDs("repeating_fittings", idArray => { const oldAttrs = [ ...idArray.map(id => `repeating_fittings_${id}_ship_fitting_name`), ...idArray.map(id => `repeating_fittings_${id}_ship_fitting_special_effects`), ...idArray.map(id => `repeating_fittings_${id}_ship_fitting_broken`), ]; getAttrs(oldAttrs, v => { const data = idArray.map(id => { const row: {[key: string]: string} = {}; if (v[`repeating_fittings_${id}_ship_fitting_name`]) row.fitting_name = v[`repeating_fittings_${id}_ship_fitting_name`]; if (v[`repeating_fittings_${id}_ship_fitting_special_effects`]) row.fitting_effect = v[`repeating_fittings_${id}_ship_fitting_special_effects`]; if (v[`repeating_fittings_${id}_ship_fitting_broken`]) row.fitting_broken = "1"; return row; }); fillRepeatingSectionFromData("ship-fittings", data, () => upgradeFunction()); }); }); };
the_stack
namespace MakerJs.model { /** * @private */ function getNonZeroSegments(pathToSegment: IPath, breakPoint: IPoint): IPath[] { var segment1 = cloneObject(pathToSegment); if (!segment1) return null; var segment2 = path.breakAtPoint(segment1, breakPoint); if (segment2) { var segments: IPath[] = [segment1, segment2]; for (var i = 2; i--;) { if (round(measure.pathLength(segments[i]), .0001) == 0) { return null; } } return segments; } else if (pathToSegment.type == pathType.Circle) { return [segment1]; } return null; } /** * @private */ function getPointsOnPath(points: IPoint[], onPath: IPath, popOptions: IIsPointOnPathOptions): IPoint[] { const endpointsOnPath: IPoint[] = []; points.forEach(p => { if (measure.isPointOnPath(p, onPath, .00001, null, popOptions)) { endpointsOnPath.push(p); } }); return endpointsOnPath; } /** * @private */ function breakAlongForeignPath(crossedPath: ICrossedPath, overlappedSegments: ICrossedPathSegment[], foreignWalkedPath: IWalkPath) { var foreignPath = foreignWalkedPath.pathContext; var segments = crossedPath.segments; if (measure.isPathEqual(segments[0].absolutePath, foreignPath, .0001, null, foreignWalkedPath.offset)) { segments[0].overlapped = true; segments[0].duplicate = true; overlappedSegments.push(segments[0]); return; } //this will cache the slope, to keep from being recalculated for each segment var popOptions: IIsPointOnPathOptions = {}; var options: IPathIntersectionOptions = { path1Offset: crossedPath.offset, path2Offset: foreignWalkedPath.offset }; var foreignIntersection = path.intersection(crossedPath.pathContext, foreignPath, options); var intersectionPoints = foreignIntersection ? foreignIntersection.intersectionPoints : null; var foreignPathEndPoints = point.fromPathEnds(foreignPath, foreignWalkedPath.offset) || []; for (var i = 0; i < segments.length; i++) { var pointsOfInterest = intersectionPoints ? foreignPathEndPoints.concat(intersectionPoints) : foreignPathEndPoints; var pointsToCheck = getPointsOnPath(pointsOfInterest, segments[i].absolutePath, popOptions); if (options.out_AreOverlapped) { segments[i].overlapped = true; overlappedSegments.push(segments[i]); } if (pointsToCheck.length > 0) { //break the path which intersected, and add the shard to the end of the array so it can also be checked in this loop for further sharding. var subSegments: IPath[] = null; var p = 0; while (!subSegments && p < pointsToCheck.length) { subSegments = getNonZeroSegments(segments[i].absolutePath, pointsToCheck[p]); p++; } if (subSegments) { crossedPath.broken = true; segments[i].absolutePath = subSegments[0]; if (subSegments[1]) { var newSegment: ICrossedPathSegment = { absolutePath: subSegments[1], pathId: segments[0].pathId, overlapped: segments[i].overlapped, uniqueForeignIntersectionPoints: [] }; if (segments[i].overlapped) { overlappedSegments.push(newSegment); } segments.push(newSegment); } //re-check this segment for another deep intersection i--; } } } } /** * DEPRECATED - use measure.isPointInsideModel instead. * Check to see if a path is inside of a model. * * @param pathContext The path to check. * @param modelContext The model to check against. * @param farPoint Optional point of reference which is outside the bounds of the modelContext. * @returns Boolean true if the path is inside of the modelContext. */ export function isPathInsideModel(pathContext: IPath, modelContext: IModel, pathOffset?: IPoint, farPoint?: IPoint, measureAtlas?: measure.Atlas): boolean { var options: IMeasurePointInsideOptions = { farPoint: farPoint, measureAtlas: measureAtlas }; var p = point.add(point.middle(pathContext), pathOffset); return measure.isPointInsideModel(p, modelContext, options); } /** * @private */ interface ICrossedPathSegment { isInside?: boolean; uniqueForeignIntersectionPoints: IPoint[]; absolutePath: IPath; addedPath?: IPath; pathId: string; overlapped: boolean; duplicate?: boolean; } /** * @private */ interface ICrossedPath extends IWalkPath { broken: boolean; segments: ICrossedPathSegment[]; } /** * @private */ interface ICombinedModel { crossedPaths: ICrossedPath[]; overlappedSegments: ICrossedPathSegment[]; } /** * DEPRECATED * Break a model's paths everywhere they intersect with another path. * * @param modelToBreak The model containing paths to be broken. * @param modelToIntersect Optional model containing paths to look for intersection, or else the modelToBreak will be used. * @returns The original model (for cascading). */ export function breakPathsAtIntersections(modelToBreak: IModel, modelToIntersect?: IModel) { var modelToBreakAtlas = new measure.Atlas(modelToBreak); modelToBreakAtlas.measureModels(); var modelToIntersectAtlas: measure.Atlas; if (!modelToIntersect) { modelToIntersect = modelToBreak; modelToIntersectAtlas = modelToBreakAtlas; } else { modelToIntersectAtlas = new measure.Atlas(modelToIntersect); modelToIntersectAtlas.measureModels(); }; breakAllPathsAtIntersections(modelToBreak, modelToIntersect || modelToBreak, false, modelToBreakAtlas, modelToIntersectAtlas); return modelToBreak; } /** * @private */ function breakAllPathsAtIntersections(modelToBreak: IModel, modelToIntersect: IModel, checkIsInside: boolean, modelToBreakAtlas: measure.Atlas, modelToIntersectAtlas: measure.Atlas, farPoint?: IPoint): ICombinedModel { var crossedPaths: ICrossedPath[] = []; var overlappedSegments: ICrossedPathSegment[] = []; var walkModelToBreakOptions: IWalkOptions = { onPath: function (outerWalkedPath: IWalkPath) { //clone this path and make it the first segment var segment: ICrossedPathSegment = { absolutePath: path.clone(outerWalkedPath.pathContext, outerWalkedPath.offset), pathId: outerWalkedPath.pathId, overlapped: false, uniqueForeignIntersectionPoints: [] }; var thisPath: ICrossedPath = <ICrossedPath>outerWalkedPath; thisPath.broken = false; thisPath.segments = [segment]; var walkModelToIntersectOptions: IWalkOptions = { onPath: function (innerWalkedPath: IWalkPath) { if (outerWalkedPath.pathContext !== innerWalkedPath.pathContext && measure.isMeasurementOverlapping(modelToBreakAtlas.pathMap[outerWalkedPath.routeKey], modelToIntersectAtlas.pathMap[innerWalkedPath.routeKey])) { breakAlongForeignPath(thisPath, overlappedSegments, innerWalkedPath); } }, beforeChildWalk: function (innerWalkedModel: IWalkModel): boolean { //see if there is a model measurement. if not, it is because the model does not contain paths. var innerModelMeasurement = modelToIntersectAtlas.modelMap[innerWalkedModel.routeKey]; return innerModelMeasurement && measure.isMeasurementOverlapping(modelToBreakAtlas.pathMap[outerWalkedPath.routeKey], innerModelMeasurement); } }; //keep breaking the segments anywhere they intersect with paths of the other model walk(modelToIntersect, walkModelToIntersectOptions); if (checkIsInside) { //check each segment whether it is inside or outside for (var i = 0; i < thisPath.segments.length; i++) { var p = point.middle(thisPath.segments[i].absolutePath); var pointInsideOptions: IMeasurePointInsideOptions = { measureAtlas: modelToIntersectAtlas, farPoint: farPoint }; thisPath.segments[i].isInside = measure.isPointInsideModel(p, modelToIntersect, pointInsideOptions); thisPath.segments[i].uniqueForeignIntersectionPoints = pointInsideOptions.out_intersectionPoints; } } crossedPaths.push(thisPath); } }; walk(modelToBreak, walkModelToBreakOptions); return { crossedPaths: crossedPaths, overlappedSegments: overlappedSegments }; } /** * @private */ function checkForEqualOverlaps(crossedPathsA: ICrossedPathSegment[], crossedPathsB: ICrossedPathSegment[], pointMatchingDistance: number) { function compareSegments(segment1: ICrossedPathSegment, segment2: ICrossedPathSegment) { if (measure.isPathEqual(segment1.absolutePath, segment2.absolutePath, pointMatchingDistance)) { segment1.duplicate = segment2.duplicate = true; } } function compareAll(segment: ICrossedPathSegment) { for (var i = 0; i < crossedPathsB.length; i++) { compareSegments(crossedPathsB[i], segment); } } for (var i = 0; i < crossedPathsA.length; i++) { compareAll(crossedPathsA[i]); } } /** * @private */ interface ITrackDeleted { (pathToDelete: IPath, routeKey: string, reason: string): void; } /** * @private */ function addOrDeleteSegments(crossedPath: ICrossedPath, includeInside: boolean, includeOutside: boolean, keepDuplicates: boolean, atlas: measure.Atlas, trackDeleted: ITrackDeleted) { function addSegment(modelContext: IModel, pathIdBase: string, segment: ICrossedPathSegment) { var id = getSimilarPathId(modelContext, pathIdBase); var newRouteKey = (id == pathIdBase) ? crossedPath.routeKey : createRouteKey(crossedPath.route.slice(0, -1).concat([id])); segment.addedPath = cloneObject(crossedPath.pathContext); //circles may have become arcs segment.addedPath.type = segment.absolutePath.type; path.copyProps(segment.absolutePath, segment.addedPath); path.moveRelative(segment.addedPath, crossedPath.offset, true); modelContext.paths[id] = segment.addedPath; if (crossedPath.broken) { //save the new segment's measurement var measurement = measure.pathExtents(segment.absolutePath); atlas.pathMap[newRouteKey] = measurement; atlas.modelsMeasured = false; } else { //keep the original measurement atlas.pathMap[newRouteKey] = savedMeasurement; } } function checkAddSegment(modelContext: IModel, pathIdBase: string, segment: ICrossedPathSegment) { if (segment.isInside && includeInside || !segment.isInside && includeOutside) { addSegment(modelContext, pathIdBase, segment); } else { atlas.modelsMeasured = false; trackDeleted(segment.absolutePath, crossedPath.routeKey, 'segment is ' + (segment.isInside ? 'inside' : 'outside') + ' intersectionPoints=' + JSON.stringify(segment.uniqueForeignIntersectionPoints)); } } //save the original measurement var savedMeasurement = atlas.pathMap[crossedPath.routeKey]; //delete the original, its segments will be added delete crossedPath.modelContext.paths[crossedPath.pathId]; delete atlas.pathMap[crossedPath.routeKey]; for (var i = 0; i < crossedPath.segments.length; i++) { if (crossedPath.segments[i].duplicate) { if (keepDuplicates) { addSegment(crossedPath.modelContext, crossedPath.pathId, crossedPath.segments[i]); } else { trackDeleted(crossedPath.segments[i].absolutePath, crossedPath.routeKey, 'segment is duplicate'); } } else { checkAddSegment(crossedPath.modelContext, crossedPath.pathId, crossedPath.segments[i]); } } } /** * Combine 2 models. Each model will be modified accordingly. * * @param modelA First model to combine. * @param modelB Second model to combine. * @param includeAInsideB Flag to include paths from modelA which are inside of modelB. * @param includeAOutsideB Flag to include paths from modelA which are outside of modelB. * @param includeBInsideA Flag to include paths from modelB which are inside of modelA. * @param includeBOutsideA Flag to include paths from modelB which are outside of modelA. * @param options Optional ICombineOptions object. * @returns A new model containing both of the input models as "a" and "b". */ export function combine(modelA: IModel, modelB: IModel, includeAInsideB: boolean = false, includeAOutsideB: boolean = true, includeBInsideA: boolean = false, includeBOutsideA: boolean = true, options?: ICombineOptions) { var opts: ICombineOptions = { trimDeadEnds: true, pointMatchingDistance: .005, out_deleted: [{ paths: {} }, { paths: {} }] }; extendObject(opts, options); opts.measureA = opts.measureA || new measure.Atlas(modelA); opts.measureB = opts.measureB || new measure.Atlas(modelB); //make sure model measurements capture all paths opts.measureA.measureModels(); opts.measureB.measureModels(); if (!opts.farPoint) { var measureBoth = measure.increase(measure.increase({ high: [null, null], low: [null, null] }, opts.measureA.modelMap['']), opts.measureB.modelMap['']); opts.farPoint = point.add(measureBoth.high, [1, 1]); } var pathsA = breakAllPathsAtIntersections(modelA, modelB, true, opts.measureA, opts.measureB, opts.farPoint); var pathsB = breakAllPathsAtIntersections(modelB, modelA, true, opts.measureB, opts.measureA, opts.farPoint); checkForEqualOverlaps(pathsA.overlappedSegments, pathsB.overlappedSegments, opts.pointMatchingDistance); function trackDeleted(which: number, deletedPath: IPath, routeKey: string, reason: string) { addPath(opts.out_deleted[which], deletedPath, 'deleted'); var p = deletedPath as IPathRemoved; p.reason = reason; p.routeKey = routeKey; } for (var i = 0; i < pathsA.crossedPaths.length; i++) { addOrDeleteSegments(pathsA.crossedPaths[i], includeAInsideB, includeAOutsideB, true, opts.measureA, (p, id, reason) => trackDeleted(0, p, id, reason)); } for (var i = 0; i < pathsB.crossedPaths.length; i++) { addOrDeleteSegments(pathsB.crossedPaths[i], includeBInsideA, includeBOutsideA, false, opts.measureB, (p, id, reason) => trackDeleted(1, p, id, reason)); } var result: IModel = { models: { a: modelA, b: modelB } }; if (opts.trimDeadEnds) { var shouldKeep: IWalkPathBooleanCallback; //union if (!includeAInsideB && !includeBInsideA) { shouldKeep = function (walkedPath: IWalkPath): boolean { //When A and B share an outer contour, the segments marked as duplicate will not pass the "inside" test on either A or B. //Duplicates were discarded from B but kept in A for (var i = 0; i < pathsA.overlappedSegments.length; i++) { if (pathsA.overlappedSegments[i].duplicate && walkedPath.pathContext === pathsA.overlappedSegments[i].addedPath) { return false; } } //default - keep the path return true; } } removeDeadEnds(result, null, shouldKeep, (wp, reason) => { var which = wp.route[1] === 'a' ? 0 : 1; trackDeleted(which, wp.pathContext, wp.routeKey, reason) }); } //pass options back to caller extendObject(options, opts); return result; } /** * Combine 2 models, resulting in a intersection. Each model will be modified accordingly. * * @param modelA First model to combine. * @param modelB Second model to combine. * @returns A new model containing both of the input models as "a" and "b". */ export function combineIntersection(modelA: IModel, modelB: IModel) { return combine(modelA, modelB, true, false, true, false); } /** * Combine 2 models, resulting in a subtraction of B from A. Each model will be modified accordingly. * * @param modelA First model to combine. * @param modelB Second model to combine. * @returns A new model containing both of the input models as "a" and "b". */ export function combineSubtraction(modelA: IModel, modelB: IModel) { return combine(modelA, modelB, false, true, true, false); } /** * Combine 2 models, resulting in a union. Each model will be modified accordingly. * * @param modelA First model to combine. * @param modelB Second model to combine. * @returns A new model containing both of the input models as "a" and "b". */ export function combineUnion(modelA: IModel, modelB: IModel) { return combine(modelA, modelB, false, true, false, true); } }
the_stack
"use strict" import Rect = require("./rect"); import Css = require("./css"); import BlokUserSettings = require("./blok-user-settings"); import BlokContainer = require("./blok-container"); import BlokAdapter = require("./blok-adapter"); import Utils = require("./utils"); /** * Wraps an Illustrator pageItem to add the capabilities needed for layout. * Ensures that all layout settings are persisted to disk. */ class Blok { /** * Normalize art object bounds * * @param pageItem - an Illustrator pageItem * @returns a 4 digit array representing the geometric bounds of art in SCREEN coordinates */ protected static getPageItemBounds(pageItem: any): number[] { var bounds = pageItem.geometricBounds; // Convert from cartesian to screen coordinates bounds[1] *= -1; bounds[3] *= -1; return bounds; } protected _pageItem: any; /** DO NOT CALL, use a static method to instantiate! */ constructor(pageItem: any) { this._pageItem = pageItem; } /** The visible position and size, relative to the parent BlokContainer */ public getRect(): Rect { let actualArtboard = new Rect(Blok.getPageItemBounds(this._pageItem)); let actualContainerArtboard = new Rect(Blok.getPageItemBounds(this.getContainer()._pageItem)); // Translate to coordinates relative to container let relativeX = actualArtboard.getX() - actualContainerArtboard.getX(); let relativeY = actualArtboard.getY() - actualContainerArtboard.getY(); actualArtboard.setX(relativeX); actualArtboard.setY(relativeY); return actualArtboard; } /** Provide a new visible position and size (relative to the parent BlokContainer), the art will update to comply */ public setRect(value: Rect): void { this.layout(value, undefined); } /** Optional >= 0 number for CSS flex-grow */ public getFlex(): number { return this.getSavedProperty<number>("flex"); } /** Optional >= 0 number for CSS flex-grow */ public setFlex(value: number): void { if (value !== undefined && value < 0) { throw new RangeError("Cannot set a negative flex!"); } this.setSavedProperty<number>("flex", value); } /** Optional value for CSS align-self */ public getAlignSelf(): Css.Alignments { return this.getSavedProperty<Css.Alignments>("alignSelf"); } /** Optional value for CSS align-self */ public setAlignSelf(value: Css.Alignments): void { this.setSavedProperty<Css.Alignments>("alignSelf", value); } /** Create a settings object reflecting our current state */ public getUserSettings(): BlokUserSettings { let settings = new BlokUserSettings(); settings.flex = this.getFlex(); settings.alignSelf = this.getAlignSelf(); return settings; } /** Make our current state match the given user settings */ public setUserSettings(value: BlokUserSettings): void { if (value.alignSelf === Css.Alignments.STRETCH && this.getAlignSelf() !== Css.Alignments.STRETCH) { let rect = this.getRect(); this.setCachedPrestretchWidth(rect.getWidth()); this.setCachedPrestretchHeight(rect.getHeight()); } else if (this.getAlignSelf() === Css.Alignments.STRETCH && value.alignSelf !== Css.Alignments.STRETCH) { this.setUseCachedPrestretch(true); } this.setFlex(value.flex); this.setAlignSelf(value.alignSelf); } /** Return a css-layout node */ public computeCssNode(): any { let w, h; if (this.getUseCachedPrestretch()) { w = this.getCachedPrestretchWidth(); h = this.getCachedPrestretchHeight(); this.setUseCachedPrestretch(false); } else { let r = this.getRect(); w = r.getWidth(); h = r.getHeight(); } let cssNode: any = { style: { width: w, height: h, flex: this.getFlex() } }; // Have to set it here, css-layout gets confused if it's set and not needed if (this.getAlignSelf() !== undefined) { cssNode.style.alignSelf = Css.enumStringToCssString(Css.Alignments[this.getAlignSelf()]); } return cssNode; } /** Trigger a layout of our container */ public invalidate(): void { this.getContainer().invalidate(); } /** * Check the art's actual dimensions against cachedWidth and cachedHeight. If * they don't match, then ask the parent BlokContainer to re-layout. * * @param lastSelection - the last known selection */ public checkForRelayout(lastSelection: any): void { let rect = this.getRect(); let z = this.getZIndex(); let cachedZ = this.getCachedZIndex(); let isWidthInvalid = !Utils.nearlyEqual(this.getCachedWidth(), rect.getWidth()); let isHeightInvalid = !Utils.nearlyEqual(this.getCachedHeight(), rect.getHeight()); let isZIndexInvalid = cachedZ !== undefined && cachedZ !== z; if (isWidthInvalid || isHeightInvalid || isZIndexInvalid) { this.getContainer().invalidate(); } // Update z index cache this.setCachedZIndex(z); } /** A reference to the parent BlokContainer */ public getContainer(): BlokContainer { let container = undefined; if (BlokAdapter.isBlokContainerAttached(this._pageItem.parent)) { container = BlokAdapter.getBlokContainer(this._pageItem.parent); } else { throw new Error("Our parent isn't a BlokContainer!"); } return container; } /** A reference to the BlokContainer at the root of this tree */ public getRootContainer(): BlokContainer { let par: Blok = this.getContainer(); let blok: Blok = par; while (par) { blok = par; par = blok.getContainer(); } return <BlokContainer>blok; } /** * PROTECTED: Resize and position the art, relative to the parent BlokContainer. * * @param desired - a rectangle for the new location and size * @param rootNode - a matching CSS node with full style and layout information, possibly undefined * @param skipScaleTransform - if true, don't perform any scale * @param skipCache - if true, don't cache any data */ public layout(desired: Rect, rootNode: any, skipScaleTransform = false, skipCache = false): void { let actual = this.getRect(); let isScaleRequested = false; let transformMatrix = app.getIdentityMatrix(); let aiDeltaX, aiDeltaY; if (!skipScaleTransform) { // Scale if (actual.getWidth() === 0) { actual.setWidth(1); } if (actual.getHeight() === 0) { actual.setHeight(1); } aiDeltaX = (desired.getWidth() / actual.getWidth()) * 100; aiDeltaY = (desired.getHeight() / actual.getHeight()) * 100; isScaleRequested = !Utils.nearlyEqual(aiDeltaX, 100) || !Utils.nearlyEqual(aiDeltaY, 100); transformMatrix = app.concatenateScaleMatrix(transformMatrix, aiDeltaX, aiDeltaY); } // Translate aiDeltaX = desired.getX() - actual.getX(); aiDeltaY = desired.getY() - actual.getY(); // Invert Y coordinate to go from screen to cartesian in Q4 aiDeltaY *= -1; transformMatrix = app.concatenateTranslationMatrix(transformMatrix, aiDeltaX, aiDeltaY); // Apply if (this._pageItem.typename === "TextFrame" && this._pageItem.kind === TextType.AREATEXT) { // If you transform area type, it'll scale instead of just resizing the bounding box. // You have to manipulate the TextPath this._pageItem.textPath.left += aiDeltaX; this._pageItem.textPath.top += aiDeltaY; this._pageItem.textPath.width = desired.getWidth(); this._pageItem.textPath.height = desired.getHeight(); if (!skipCache) { // Cache dims let curR = this.getRect(); this.setCachedWidth(curR.getWidth()); this.setCachedHeight(curR.getHeight()); } } else { if (isScaleRequested || !Utils.nearlyEqual(aiDeltaX, 0) || !Utils.nearlyEqual(aiDeltaY, 0)) { this._pageItem.transform( transformMatrix, true /*changePositions*/, false /*changeFillPatterns*/, false /*changeFillGradients*/, false /*changeStrokePattern*/, 0.0 /*changeLineWidth - this one is weird. If you say 5, it multiplies the current stroke width by 5... */, Transformation.TOPLEFT /*transformAbout*/ ); } if (!skipCache) { // Cache dims let curR = this.getRect(); this.setCachedWidth(curR.getWidth()); this.setCachedHeight(curR.getHeight()); } } } /** * Compare this Blok to another for equality. * * @param value - another Blok * * @returns true if the Bloks refer to the same art object */ public equals(value: Blok): boolean { return this._pageItem === value._pageItem; } public getZIndex(): number { let index = -1; for (let i = 0; i < this._pageItem.parent.pageItems.length; i++) { if (this._pageItem === this._pageItem.parent.pageItems[i]) { index = i; break; } } return index; } /** Positive number for this art's width before someone asked it to stretch. Use as a cache, not used in layout */ public getCachedPrestretchWidth(): number { return this.getSavedProperty<number>("cachedPrestretchWidth"); } /** Positive number for this art's width before someone asked it to stretch. Use as a cache, not used in layout */ public setCachedPrestretchWidth(value: number): void { if (value !== undefined && value < 0) { throw new RangeError("Cannot set a negative cached prestretch width!"); } this.setSavedProperty<number>("cachedPrestretchWidth", value); } /** Positive number for this art's height before someone asked it to stretch. Use as a cache, not used in layout */ public getCachedPrestretchHeight(): number { return this.getSavedProperty<number>("cachedPrestretchHeight"); } /** Positive number for this art's height before someone asked it to stretch. Use as a cache, not used in layout */ public setCachedPrestretchHeight(value: number): void { if (value !== undefined && value < 0) { throw new RangeError("Cannot set a negative cached prestretch height!"); } this.setSavedProperty<number>("cachedPrestretchHeight", value); } /** Flag for whether cached prestretch width/height should be used in layout instead of real dims */ public getUseCachedPrestretch(): boolean { return this.getSavedProperty<boolean>("useCachedPrestretch"); } /** Flag for whether cached prestretch width/height should be used in layout instead of real dims */ public setUseCachedPrestretch(value: boolean): void { this.setSavedProperty<boolean>("useCachedPrestretch", value); } /** Optional positive number for width. Use as a cache, not used in layout */ protected getCachedWidth(): number { return this.getSavedProperty<number>("cachedWidth"); } /** Optional positive number for width. Use as a cache, not used in layout */ protected setCachedWidth(value: number): void { if (value !== undefined && value < 0) { throw new RangeError("Cannot set a negative cached width!"); } this.setSavedProperty<number>("cachedWidth", value); } /** Optional positive number for height. Use as a cache, not used in layout */ protected getCachedHeight(): number { return this.getSavedProperty<number>("cachedHeight"); } /** Optional positive number for width. Use as a cache, not used in layout */ protected setCachedHeight(value: number): void { if (value !== undefined && value < 0) { throw new RangeError("Cannot set a negative cached height!"); } this.setSavedProperty<number>("cachedHeight", value); } /** Optional cache of layer position */ public getCachedZIndex(): number { return this.getSavedProperty<number>("cachedZIndex"); } /** Optional cache of layer position */ public setCachedZIndex(value: number): void { if (value !== undefined && value < 0) { throw new RangeError("Cannot set a negative cached z index!"); } this.setSavedProperty<number>("cachedZIndex", value); } /** Retrieve a property from the pageItem's tags. */ protected getSavedProperty<T>(name: string): T { return BlokAdapter.getSavedProperty<T>(this._pageItem, name); } /** Set a proeprty in the pageItem's tags so it will be saved with the file. */ protected setSavedProperty<T>(name: string, value: T): void { BlokAdapter.setSavedProperty<T>(this._pageItem, name, value); } } export = Blok;
the_stack
import { RocketCrab, Party, Player, ServerGame, ClientParty, FinderState, FINDER_ACTIVE_MS, MAX_CHATS_OVERALL, MAX_CHATS_FROM_SINGLE_PLAYER, } from "../types/types"; import { PartyStatus, GameStatus, SocketEvent, RocketcrabMode, } from "../types/enums"; import { getServerGameLibrary } from "../config"; import { v4 as uuidv4 } from "uuid"; import { CronJob } from "cron"; import { getUnixTime } from "date-fns"; import type { Socket } from "socket.io"; import { isChatMsgValid } from "../utils/utils"; const SERVER_GAME_LIST: Array<ServerGame> = getServerGameLibrary().gameList; const PARTY_EXPIRATION_SEC = 60; export const initRocketCrab = (isDevMode?: boolean): RocketCrab => { const partyList: Array<Party> = []; const rocketcrab = { partyList, isFinderActive: false, finderSubscribers: [], }; if (isDevMode) newParty({ rocketcrab, forceGameCode: "ffff" }); return rocketcrab; }; export const initCron = (rocketcrab: RocketCrab): void => { const setDates = () => { rocketcrab.finderActiveDates = { lastStart: getUnixTime(activateFinderJob.lastDate()) * 1000, nextStart: activateFinderJob.nextDate().valueOf(), nextWeekOfStarts: activateFinderJob .nextDates(24) // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore .map((momentDate) => momentDate.valueOf()), }; }; const activateFinderJob = new CronJob( "0 13 * * 6", // https://crontab.guru/#0_13_*_*_6 () => { setDates(); rocketcrab.isFinderActive = true; sendFinderStateToAll(rocketcrab); setTimeout(() => { rocketcrab.isFinderActive = false; // ensure that a long-lived party is not shown on a subsequent // activation of the finder rocketcrab.partyList.forEach((party) => { if (!party.isPublic) return; party.isPublic = false; sendStateToAll(party, rocketcrab, { enableFinderCheck: false, }); }); sendFinderStateToAll(rocketcrab); }, FINDER_ACTIVE_MS); }, null, true, "America/Chicago" ); setDates(); }; export const newParty = ({ rocketcrab: { partyList, finderActiveDates }, forceGameCode, forceUuid, isPublic = false, mode = RocketcrabMode.MAIN, }: { rocketcrab: RocketCrab; forceGameCode?: string; forceUuid?: string; isPublic?: boolean; mode?: RocketcrabMode; }): Party => { const newParty: Party = { status: PartyStatus.party, playerList: [], code: forceGameCode || getUniqueGameCode(partyList), uuid: forceUuid || uuidv4(), selectedGameId: "", gameState: { status: GameStatus.loading, }, nextPlayerId: 0, idealHostId: 0, isPublic, createdAsPublic: isPublic, chat: [], bannedIPs: [], mode, }; if (isPublic) { newParty.publicEndDate = finderActiveDates.lastStart + FINDER_ACTIVE_MS; } partyList.push(newParty); setTimeout( () => deletePartyIfEmpty(newParty, partyList), PARTY_EXPIRATION_SEC * 1000 ); return newParty; }; export const getPartyByCode = ( newCode: string, partyList: Array<Party> ): Party => partyList.find(({ code }) => code === newCode); export const getPartyByUuid = (newUuid: string, partyList: Party[]): Party => partyList.find(({ uuid }) => uuid === newUuid); export const reconnectToParty = ( lastPartyState: ClientParty, rocketcrab: RocketCrab ): Party => { const { partyList } = rocketcrab; if (!lastPartyState || !lastPartyState.uuid) return; const { code, uuid, status, selectedGameId, gameState, idealHostId, mode } = lastPartyState || {}; const partyAlreadyExists = getPartyByUuid(uuid, partyList); if (partyAlreadyExists) return partyAlreadyExists; const isValidCode = code?.length === 4 && code?.match(/[a-z]/i) && !getPartyByCode(code, partyList); const party = newParty({ rocketcrab, forceUuid: uuid, ...(isValidCode ? { forceGameCode: code } : {}), }); if (Object.values(PartyStatus).includes(status)) { party.status = status; } if ([RocketcrabMode.MAIN, RocketcrabMode.KIDS].includes(mode)) { party.mode = mode; } if (findGameById(selectedGameId)) { party.selectedGameId = selectedGameId; } party.gameState = gameState; party.idealHostId = idealHostId; return party; }; export const addPlayer = ( name: string, socket: Socket, party: Party, previousId?: number ): Player => { const { playerList } = party; const idNotInUse = !isIDinUse(previousId, party.playerList); const usePreviousId = Number.isInteger(previousId) && previousId >= 0 && idNotInUse; const id = usePreviousId ? previousId : party.nextPlayerId++; const isFirstPlayer = playerList.length === 0; // this is mostly only important for the ffff dev party // in which ids are previousIds that were not created // in this instance of the party are being used. // also important for reconnectToParty. if (id >= party.nextPlayerId) { party.nextPlayerId = id + 1; } const player: Player = { id, name: "", socket, isHost: false, }; playerList.push(player); setName(name, player, playerList); if (isFirstPlayer) { party.idealHostId = id; } setHost(party.idealHostId, playerList); return player; }; export const sendStateToAll = ( party: Party, rocketcrab: RocketCrab, { enableFinderCheck, forceFinderUpdate, }: { enableFinderCheck?: boolean; forceFinderUpdate?: boolean } = {} ): void => { party.playerList.forEach(({ socket, ...player }) => { const clientParty: ClientParty = { me: player, ...getJsonParty(party), isFinderActive: rocketcrab.isFinderActive, }; socket.emit(SocketEvent.UPDATE, clientParty); }); if ( (enableFinderCheck && shouldSendFinderStateUpdate(party, rocketcrab)) || forceFinderUpdate ) { sendFinderStateToAll(rocketcrab); } }; export const sendFinderStateToAll = (rocketcrab: RocketCrab): void => { rocketcrab.finderSubscribers.forEach((socket) => socket.emit(SocketEvent.FINDER_UPDATE, getFinderState(rocketcrab)) ); }; export const shouldSendFinderStateUpdate = ( { status, isPublic }: Party, { isFinderActive }: RocketCrab ): boolean => { const thisPartyIsShownOnFinder = status === PartyStatus.party && isPublic; return isFinderActive && thisPartyIsShownOnFinder; }; export const removePlayer = (player: Player, party: Party): void => { const { playerList, idealHostId } = party; const { socket } = player; if (socket && socket.disconnect) { socket.disconnect(true); } deleteFromArray(player, playerList); if (player.isHost) { setHost(idealHostId, playerList); } }; export const deletePartyIfEmpty = ( party: Party, partyList: Array<Party> ): void => { const { playerList, code } = party; if (playerList.length === 0 && code !== "ffff") { // the only players that could possibly // be left are unnamed players disconnectAllPlayers(playerList); deleteFromArray(party, partyList); } }; export const setName = ( name: string, playerToName: Player, playerList: Array<Player> ): void => { const validLength = typeof name === "string" && name.length <= 24; if (!findPlayerByName(name, playerList) && validLength) { playerToName.name = name; } else { playerToName.name = ""; // prevents error if no cookie was set if (name) { playerToName.socket.emit(SocketEvent.INVALID_NAME); } } }; export const setGame = (gameId: string, party: Party): void => { if (findGameById(gameId)) { party.selectedGameId = gameId; } }; export const startGame = async ( party: Party, rocketcrab: RocketCrab ): Promise<void> => { const { gameState, selectedGameId, playerList } = party; const willSendFinderUpdate = shouldSendFinderStateUpdate(party, rocketcrab); party.status === PartyStatus.party && party.isPublic; const game: ServerGame = findGameById(selectedGameId); if (!game) return; if (game.minPlayers && playerList.length < game.minPlayers) return; if (game.maxPlayers && playerList.length > game.maxPlayers) return; party.status = PartyStatus.ingame; gameState.status = GameStatus.loading; sendStateToAll(party, rocketcrab, { forceFinderUpdate: willSendFinderUpdate, }); try { gameState.connectedGame = await game.connectToGame(); } catch (e) { console.error(e); gameState.status = GameStatus.error; gameState.error = "❌ Can't connect to " + game.name; sendStateToAll(party, rocketcrab); return; } gameState.status = GameStatus.waitingforhost; const host = getHost(playerList); const onHostGameLoaded = () => { gameState.status = GameStatus.inprogress; sendStateToAll(party, rocketcrab); }; // if, for some unknown reason, the host doesn't send this event, we'll // just assume they're good after 10 seconds. // added this because people were getting stuck on "Waiting for host..." const hostGameLoadedBackup = setTimeout(onHostGameLoaded, 10 * 1000); host.socket.once(SocketEvent.HOST_GAME_LOADED, () => { clearTimeout(hostGameLoadedBackup); onHostGameLoaded(); }); sendStateToAll(party, rocketcrab); }; export const exitGame = (party: Party): void => { party.status = PartyStatus.party; const { gameState } = party; gameState.status = GameStatus.loading; delete gameState.connectedGame; delete gameState.error; }; export const getFinderState = ({ isFinderActive, partyList, finderActiveDates, finderSubscribers, }: RocketCrab): FinderState => ({ isActive: isFinderActive, publicPartyList: partyList .filter( ({ isPublic, status, selectedGameId }) => isPublic && status === PartyStatus.party && selectedGameId ) .map((party) => getJsonParty(party)), finderActiveDates, subscriberCount: finderSubscribers.length - 1, // not counting the person it's being sent to }); export const addChatMessage = ( message: string, player: Player, party: Party ): boolean => { if (!isChatMsgValid(message, player, party.chat)) return false; party.chat.push({ playerId: player.id, playerName: player.name, message, date: Date.now().valueOf(), }); purgeOverflowMsgs(player, party); return true; }; export const kickPlayer = ( playerId: number, isBan: boolean, party: Party ): void => { const playerToKick = party.playerList.find(({ id }) => id === playerId); if (!playerToKick) return; if (isBan) { party.bannedIPs.push(playerToKick.socket.handshake.address); } removePlayer(playerToKick, party); }; const purgeOverflowMsgs = (player: Player, party: Party): void => { const numberOfMsgsFromThisPlayer = party.chat.reduce( (prev, cur) => prev + (cur.playerId === player.id ? 1 : 0), 0 ); let numberOfMsgsToRemove = numberOfMsgsFromThisPlayer - MAX_CHATS_FROM_SINGLE_PLAYER; if (numberOfMsgsToRemove > 0) { // removes from the beginning, which will be the oldest msgs party.chat = party.chat.filter(({ playerId }) => { if (playerId === player.id && numberOfMsgsToRemove > 0) { numberOfMsgsToRemove--; return false; } return true; }); } // remove overflow from the beginning (oldest) if (party.chat.length > MAX_CHATS_OVERALL) { party.chat.splice(0, party.chat.length - MAX_CHATS_OVERALL); } }; const findPlayerByName = ( nameToFind: string, playerList: Array<Player> ): Player => playerList.find(({ name }) => name === nameToFind); const findGameById = (gameId: string): ServerGame => SERVER_GAME_LIST.find(({ id }) => id === gameId); const disconnectAllPlayers = (playerList: Array<Player>): void => playerList.forEach(({ socket }) => socket.disconnect(true)); export const getJsonParty = ({ playerList, ...party }: Party): ClientParty => ({ playerList: playerList.map(({ id, name, isHost }) => ({ id, name, isHost, })), ...party, }); const getUniqueGameCode = (ll: Array<Party>): string => { let newCode; do { newCode = getRandomFourLetters(); } while (ll.find(({ code }) => code === newCode) && newCode !== "ffff"); return newCode; }; const getRandomFourLetters = (): string => { const len = 4; const possible = "abcdefghijklmnopqrstuvwxyz"; let code = ""; for (let i = 0; i < len; i++) { code += possible.charAt(Math.floor(Math.random() * possible.length)); } return code; }; const deleteFromArray = (item: any, array: Array<any>): void => { const index = array.indexOf(item); if (index > -1) { array.splice(index, 1); } }; const isIDinUse = (previousId: number, playerList: Array<Player>): boolean => !!playerList.find(({ id }) => id === previousId); const setHost = (idealHostId: number, playerList: Array<Player>): void => { playerList.forEach((player) => (player.isHost = false)); const idealHost = playerList.find(({ id }) => id === idealHostId); if (idealHost) { idealHost.isHost = true; return; } // make the player with the lowest id the host playerList.reduce((acc, cur) => (acc.id < cur.id ? acc : cur), { id: Number.MAX_SAFE_INTEGER, isHost: null, }).isHost = true; }; const getHost = (playerList: Array<Player>): Player => playerList.find(({ isHost }) => isHost);
the_stack
module TDev.RT { export interface IProxyResponse { response: WebResponse; statusCode: number; } //? An HTTP web request //@ stem("request") ctx(general,gckey) export class WebRequest extends RTValue { private _method: string = undefined; private _url: string = undefined; private _showNotifications: boolean = true; private _proxyResponseType: string = undefined; constructor () { super() } private _headers: StringMap = new StringMap(); private _content: any = undefined; private _credentialsName: string = undefined; private _credentialsPassword: string = undefined; private _responseReceived: RT.Event_ = new RT.Event_(); static mk(url: string, proxyResponseType: string): WebRequest { var wr = new WebRequest(); wr._url = url; wr._method = "GET"; wr._proxyResponseType = proxyResponseType; return wr; } //? Indicates if program notifications should be shown to the user. Default is true. public show_notifications(visible: boolean) { this._showNotifications = visible; } public proxyResponseType(): string { return this._proxyResponseType; } //? Gets whether it was a 'get' or a 'post'. public method(): string { return this._method; } //? Sets the method. Default value is 'get'. //@ [method].deflStrings("post", "put", "get", "delete") public set_method(method: string): void { this._method = method; } //? Gets the url of the request public url(): string { return this._url; } //? Sets the url of the request. Must be a valid internet address. public set_url(url: string): void { this._url = url; } //? Gets the value of a given header //@ readsMutable public header(name: string): string { return this._headers.at(name); } //? Sets an HTML header value. Empty string clears the value public set_header(name: string, value: string): void { if (!value) this._headers.remove(name); else this._headers.set_at(name, value); } //? Indicates if both requests are the same instance. public equals(other: WebRequest): boolean { return this == other; } public toString(): string { return this.method() + " " + this.url(); } //? Sets the Accept header type ('text/xml' for xml, 'application/json' for json). //@ [type].deflStrings('application/json', 'text/xml') public set_accept(type: string) { this._headers.set_at("Accept", type); } //? Displays the request to the wall public post_to_wall(s: IStackFrame): void { var rt = s.rt; if (this._content && this._content.length) rt.postBoxedText("Content-Length: " + this._content.length, s.pc); var keys = this._headers.keys(); for (var i = 0; i < keys.count(); ++i) { var key = keys.at(i); rt.postBoxedText(keys.at(i) + ": " + this._headers.at(key), s.pc); } if (this._credentialsName || this._credentialsPassword) rt.postBoxedText("credentials: " + this._credentialsName, s.pc); rt.postBoxedText(this.toString(), s.pc); } public serializeForProxy() { var credentials = undefined; if (this._credentialsName || this._credentialsPassword) { credentials = { name: this._credentialsName || "", password: this._credentialsPassword || "" }; } var headers = this._headers.keys().a.map(k => { return { name: k, value: this._headers.at(k) }; }); var isString = typeof this._content == "string" return { url: this._url, method: this._method, contentText: isString ? this._content : undefined, content: this._content && !isString ? Util.base64EncodeBytes(this._content) : undefined, responseType: this._proxyResponseType, headers: headers, credentials: credentials } } public set_content_type(contentType: string) { this._headers.set_at("Content-Type", contentType); } public debuggerChildren(): any { var r = { 'method': this._method, 'url': this._url, 'headers': this._headers, 'content': this._content, 'user name': this._credentialsName, 'password': this._credentialsPassword, 'notifications': this._showNotifications }; return r; } private mkProxyCrash(proxyResponse: WebResponse): IProxyResponse { return { statusCode: proxyResponse.status_code(), response : WebResponse.mkCrash(this) }; } private sendViaProxyAsync(): Promise { // ProxyResponse if (!Util.check(!!this._proxyResponseType)) return Promise.as(<IProxyResponse>{ statusCode: 0, response: WebResponse.mkCrash(this) }); var proxy = WebRequest.mk(Cloud.getPrivateApiUrl("runtime/web/request"), undefined); proxy.set_method("POST"); proxy.set_content(JSON.stringify(this.serializeForProxy())) return proxy.sendAsync().then(proxyResponse => { switch (proxyResponse.status_code()) { case 502: if (this._showNotifications) HTML.showProxyNotification("Proxy Error: Could not perform web request. " + Cloud.onlineInfo(), this._url); return this.mkProxyCrash(proxyResponse); case 429: case 503: if (this._showNotifications) HTML.showProxyNotification("Proxy Error: Could not perform web request. Did you transfer a lot of data recently? (code 503)", this._url); return this.mkProxyCrash(proxyResponse); case 403: Cloud.accessTokenExpired(); if (this._showNotifications) HTML.showProxyNotification("Proxy Error: Could not perform web request; access denied; your access token might have expired.", this._url); return this.mkProxyCrash(proxyResponse); case 504: if (this._showNotifications) HTML.showProxyNotification("Proxy Error: Could not perform web request. Response too big. (code 504)", this._url); return this.mkProxyCrash(proxyResponse); case 400: if (this._showNotifications) HTML.showProxyNotification("Proxy Error: Malformed inputs: " + Util.decodeErrorMessage(proxyResponse.header("ErrorMessage")), this._url); return this.mkProxyCrash(proxyResponse); default: return <IProxyResponse>{ statusCode: proxyResponse.status_code(), response: WebResponse.mkProxy(this, JSON.parse(proxyResponse.content())) }; } }); } private prepareAndSend(client: XMLHttpRequest) { if (this._credentialsName || this._credentialsPassword) { client.open(this.method().toUpperCase(), this.url(), true, this._credentialsName || "", this._credentialsPassword || ""); client.withCredentials = true; } else client.open(this.method().toUpperCase(), this.url(), true); // for some reason WebWorkers don't have FormData var isForms = !isWebWorker && !!this._content && this._content instanceof FormData; var keys = this._headers.keys(); for (var i = 0; i < keys.count(); ++i) { var header = keys.at(i); if (isForms && /^content-type$/i.test(header)) continue; // content-type set by browser when sending form var headerValue = this._headers.at(header); client.setRequestHeader(header, headerValue); if (/^accept$/i.test(header) && /^image\/|^audio\//i.test(headerValue)) { client.responseType = 'arraybuffer'; } } Time.log(this.toString()); client.send(this._content); } public sendAsync(): Promise { return this.sendCoreAsync(); } public sendCoreAsync(): Promise { var request = this; return new Promise((onSuccess: (v: any) => any, onError: (v: any) => any, onProgress: (v: any) => any) => { // quick check for connectivity if (Cloud.isOffline()) { if (request._showNotifications) HTML.showNotificationText('Web request failed, please connect to Internet.'); onSuccess(WebResponse.mkCrash(request)); return; } var onCORS = function () { if (request._proxyResponseType) // CORS exception happened, and we are allowed to proxy { Cloud.authenticateAsync(lf("web request proxying")) .done((authenticated) => { if (!authenticated) onSuccess(WebResponse.mkCrash(request)); else request.sendViaProxyAsync().then((r: IProxyResponse) => { // expired token? if (r.statusCode == 403) { // try to regresh token... Cloud.authenticateAsync(lf("web request proxying")) .done((authenticated) => { if (!authenticated) onSuccess(WebResponse.mkCrash(request)); else request.sendViaProxyAsync().then((r: IProxyResponse) => onSuccess(r.response), e => onError(e)); }); } else onSuccess(r.response) }, e => onError(e)); }); } else { if (request._showNotifications) HTML.showCorsNotification(request.url()); onSuccess(WebResponse.mkCrash(request)); } } // calls from HTTPS to HTTP never work and don't call "onerror" in Chrome 38+ if (Web.proxy(this.url()) != this.url() && /^https:\//i.test(document.URL) && /^http:\//.test(this.url())) { onCORS(); return } try { var client: XMLHttpRequest = new XMLHttpRequest(); client.onerror = (e: ErrorEvent) => { Time.log('error with ' + this.toString()); } client.onreadystatechange = () => { if (client.readyState == (XMLHttpRequest.DONE || 4)) { if (client.status == 0) onCORS(); else { var r = WebResponse.mk(request, client); onSuccess(<any>r); } } }; if (HttpLog.enabled) HttpLog.log(request.serializeForProxy()) request.prepareAndSend(client); } catch (e) { onCORS(); } }); } public testCORSAsync(): Promise { if (Web.proxy(this._url) == this._url) return Promise.as(false); var request = this; return new Promise((onSuccess: (v: any) => any, onError: (v: any) => any, onProgress: (v: any) => any) => { try { var client: XMLHttpRequest = new XMLHttpRequest(); client.onerror = (e: ErrorEvent) => Time.log('error with ' + this.toString()); client.onreadystatechange = () => { if (client.readyState == (XMLHttpRequest.DONE || 4)) { onSuccess(client.status == 0); } }; request.prepareAndSend(client); } catch (e) { onSuccess(true); } }); } //? User ``send`` instead //@ cap(network) flow(SinkWeb) hidden public send_async(s: IStackFrame) { this.sendAsync().then((response: WebResponse) => { if (this._responseReceived && this._responseReceived.handlers) s.rt.queueLocalEvent(this._responseReceived, [response]); }, (e) => { var r = WebResponse.mkCrash(this); if (this._responseReceived && this._responseReceived.handlers) s.rt.queueLocalEvent(this._responseReceived, [r]); }); } //? Use ``send`` instead //@ ignoreReturnValue hidden public on_response_received(handler:WebResponseAction) : EventBinding { return this._responseReceived.addHandler(handler); } //? Performs the request synchronously //@ async cap(network) flow(SinkWeb) returns(WebResponse) public send(r: ResumeCtx) { this.sendAsync().then((response: WebResponse) => { r.resumeVal(response); }, e => { r.resumeVal(WebResponse.mkCrash(this)); }); } //? Sets the content of a 'post' request public set_content(content: string): void { this._content = content; this.set_content_type("text/plain; charset=utf-8"); } //? Sets the content of a 'post' request as the JSON tree public set_content_as_json(json: JsonObject): void { this.set_content(json.toString()); this.set_content_type("application/json; charset=utf-8"); } //? Sets the content of a 'post' request as a binary buffer public set_content_as_buffer(bytes: Buffer): void { this._content = bytes.buffer; this.set_content_type("application/octet-stream"); } //? Sets the content as multipart/form-data. public set_content_as_form(form: FormBuilder): void { this._content = form.data(); // set by browser // this.set_content_type("multipart/form-data"); if (/^get$/i.test(this.method())) this.set_method("post"); } //? Sets the content of a 'post' request as a JPEG encoded image. Quality from 0 (worse) to 1 (best). //@ [quality].defl(0.85) picAsync public set_content_as_picture(pic: Picture, quality: number, r:ResumeCtx): void { pic.loadFirst(r, () => { this.setContentAsPictureInternal(pic, quality); }) } public setContentAsSoundInternal(snd : Sound) : void { var url = snd.getDataUri(); if (url) { var mimeType = Sound.dataUriMimeType(url); var bytes = Util.decodeDataURL(url, mimeType); if (bytes) { this._content = bytes; this.set_content_type(mimeType); } } } public setContentAsPictureInternal(pic: Picture, quality: number, forceJpeg = false): void { quality = Math_.normalize(quality); var mimeType = (quality >= 1 && !forceJpeg) ? "image/png" : "image/jpeg"; var jpegUrl = pic.getDataUri(quality, -1, forceJpeg); var bytes = Util.decodeDataURL(jpegUrl, mimeType); if (bytes) { this._content = bytes; this.set_content_type(mimeType); } } //? Sets the content of a 'post' request as the XML tree //@ import("npm", "xmldom", "0.1.*") public set_content_as_xml(xml: XmlObject): void { this.set_content(xml.toString()); this.set_content_type("text/xml; charset=utf-8"); } //? Sets the name and password for basic authentication. Requires an HTTPS URL, empty string clears. public set_credentials(name: string, password: string): void { if (!this.url().match(/^https:\/\//i)) Util.userError(lf("Web Request->set credentials requires a secure HTTP url (https)")); this._credentialsName = name; this._credentialsPassword = password; } //? Gets the names of the headers //@ readsMutable public header_names(): Collection<string> { return this._headers.keys(); } //? Compresses the request content with gzip and sets the Content-Encoding header //@ [value].defl(true) //@ obsolete public set_compress(value: boolean): void { } } }
the_stack
import { TestBed, waitForAsync } from "@angular/core/testing"; import { By } from "@angular/platform-browser"; import { Component, OnInit } from "@angular/core"; import { MdlRadioComponent, MdlRadioGroupRegisty } from "./mdl-radio.component"; import { FormBuilder, FormControl, FormGroup, FormsModule, ReactiveFormsModule, } from "@angular/forms"; import { MdlRadioModule } from "./mdl-radio.module"; @Component({ // eslint-disable-next-line selector: 'test-radio', template: ` <mdl-radio name="r" [(ngModel)]="radioValue" value="1" mdl-ripple (change)="onChange($event)" >radio label 1 </mdl-radio> <mdl-radio name="r" [(ngModel)]="radioValue" value="2" mdl-ripple (change)="onChange($event)" >radio label 2 </mdl-radio> `, }) class MdlTestRadioComponent implements OnInit { radioValue = "2"; radioVisible = true; form: FormGroup; test = new FormControl(""); constructor(private fb: FormBuilder) {} public ngOnInit() { this.form = this.fb.group({ test: this.test, }); } // eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars public onChange(v: string) {} } @Component({ // eslint-disable-next-line selector: 'test-radio', template: ` <form [formGroup]="testForm"> <div formGroupName="group1" mdl-radio-group> <mdl-radio formControlName="type" value="type1" id="g1t1"></mdl-radio> <mdl-radio formControlName="type" value="type2" id="g1t2"></mdl-radio> </div> <div formGroupName="group2"> <mdl-radio formControlName="type" value="type1" id="g2t1"></mdl-radio> <mdl-radio formControlName="type" value="type2" id="g2t2"></mdl-radio> </div> </form> `, }) class MdlTestUseSameRadioInGroupsComponent implements OnInit { public testForm: FormGroup; public ngOnInit() { this.testForm = new FormGroup({ group1: new FormGroup({ type: new FormControl(""), }), group2: new FormGroup({ type: new FormControl(""), }), }); } } describe("Component: MdlRadio", () => { beforeEach( waitForAsync(() => { TestBed.configureTestingModule({ imports: [MdlRadioModule.forRoot(), FormsModule, ReactiveFormsModule], declarations: [ MdlTestRadioComponent, MdlTestUseSameRadioInGroupsComponent, ], }); }) ); it("should add the css class mdl-radio to the host element", () => { const fixture = TestBed.createComponent(MdlTestRadioComponent); fixture.detectChanges(); const checkboxEl: HTMLElement = fixture.nativeElement.children.item(0); expect(checkboxEl.classList.contains("mdl-radio")).toBe(true); }); it( "should support ngModel", waitForAsync(() => { const fixture = TestBed.createComponent(MdlTestRadioComponent); fixture.detectChanges(); const instance = fixture.componentInstance; const component = fixture.debugElement.queryAll( By.directive(MdlRadioComponent) )[0]; instance.radioValue = "1"; fixture.detectChanges(); fixture.whenStable().then(() => { expect(component.componentInstance.optionValue).toEqual("1"); const component2 = fixture.debugElement.queryAll( By.directive(MdlRadioComponent) )[1]; component2.nativeElement.click(); fixture.detectChanges(); fixture.whenStable().then(() => { expect(component.componentInstance.optionValue).toEqual("2"); }); }); }) ); it("should mark the component as focused and blured", () => { const fixture = TestBed.createComponent(MdlTestRadioComponent); fixture.detectChanges(); const inputEl: HTMLInputElement = fixture.debugElement.queryAll( By.css("input") )[0].nativeElement; inputEl.dispatchEvent(new Event("focus")); fixture.detectChanges(); const radioEl: HTMLElement = fixture.debugElement.queryAll( By.directive(MdlRadioComponent) )[0].nativeElement; expect(radioEl.classList.contains("is-focused")).toBe(true); inputEl.dispatchEvent(new Event("blur")); fixture.detectChanges(); expect(radioEl.classList.contains("is-focused")).toBe(false); }); it( "should throw if name and formcontrolname are different", waitForAsync(() => { TestBed.overrideComponent(MdlTestRadioComponent, { set: { template: ` <mdl-radio name="r" formControlName="test" value="1" mdl-ripple>radio label 1</mdl-radio> <mdl-radio name="r" formControlName="test" value="2" mdl-ripple>radio label 2</mdl-radio> `, }, }); const fixture = TestBed.createComponent(MdlTestRadioComponent); expect(() => { fixture.detectChanges(); }).toThrow(); }) ); it( "should take the name from formcontrolname if no name os provided", waitForAsync(() => { TestBed.overrideComponent(MdlTestRadioComponent, { set: { template: ` <form [formGroup]="form"> <mdl-radio formControlName="test" value="1" mdl-ripple>radio label 1</mdl-radio> </form> `, }, }); const fixture = TestBed.createComponent(MdlTestRadioComponent); fixture.detectChanges(); const radioComponent = fixture.debugElement.query( By.directive(MdlRadioComponent) ).componentInstance; expect(radioComponent.name).toEqual("test"); }) ); it( "should remove mdl-radio if the component is destroyed", waitForAsync(() => { TestBed.overrideComponent(MdlTestRadioComponent, { set: { template: ` <form [formGroup]="form"> <mdl-radio formControlName="test" value="1" mdl-ripple>radio label 1</mdl-radio> <mdl-radio *ngIf="radioVisible" formControlName="test" value="2" mdl-ripple>radio label 3</mdl-radio> </form> `, }, }); const fixture = TestBed.createComponent(MdlTestRadioComponent); fixture.detectChanges(); const registry = TestBed.inject(MdlRadioGroupRegisty); spyOn(registry, "remove").and.callThrough(); fixture.componentInstance.radioVisible = false; fixture.detectChanges(); expect(registry.remove).toHaveBeenCalled(); }) ); it( "should fire a change event if the state changed", waitForAsync(() => { const fixture = TestBed.createComponent(MdlTestRadioComponent); fixture.detectChanges(); const instance = fixture.componentInstance; spyOn(instance, "onChange"); const component2 = fixture.debugElement.queryAll( By.directive(MdlRadioComponent) )[1]; component2.nativeElement.click(); expect(instance.onChange).toHaveBeenCalledWith("2"); }) ); it( "should be possible to disable the radio input", waitForAsync(() => { const fixture = TestBed.createComponent(MdlTestRadioComponent); fixture.detectChanges(); const instance = fixture.componentInstance; const cbDebugElem = fixture.debugElement.queryAll( By.directive(MdlRadioComponent) )[0]; cbDebugElem.componentInstance.setDisabledState(true); fixture.detectChanges(); const checkboxEl: HTMLElement = cbDebugElem.nativeElement; expect(checkboxEl.classList.contains("is-disabled")).toBe( true, "should have css is-disabled" ); const value = instance.radioValue; // should not change on click cbDebugElem.nativeElement.click(); expect(instance.radioValue).toEqual(value); }) ); it( "should not change its current state if it is already checked", waitForAsync(() => { const fixture = TestBed.createComponent(MdlTestRadioComponent); fixture.detectChanges(); const cbDebugElem1 = fixture.debugElement.queryAll( By.directive(MdlRadioComponent) )[0]; const cbInputEl = cbDebugElem1.query(By.css("input")); expect(cbDebugElem1.componentInstance.checked).toBe(false); cbInputEl.triggerEventHandler("keyup.space", {}); fixture.detectChanges(); expect(cbDebugElem1.componentInstance.checked).toBe(false); }) ); it("should be possible to use the same radio buttons in different groups", () => { const fixture = TestBed.createComponent( MdlTestUseSameRadioInGroupsComponent ); fixture.detectChanges(); const g1t1Elem = fixture.debugElement.query(By.css("#g1t1")).nativeElement; const g1t2Elem = fixture.debugElement.query(By.css("#g1t2")).nativeElement; const g2t1Elem = fixture.debugElement.query(By.css("#g2t1")).nativeElement; g1t1Elem.click(); fixture.detectChanges(); expect(g1t1Elem.classList.contains("is-checked")).toBe( true, "the clicked one should be selected" ); expect(g2t1Elem.classList.contains("is-checked")).toBe( false, "the not clicked one should not be selected" ); g1t2Elem.click(); fixture.detectChanges(); expect(g1t1Elem.classList.contains("is-checked")).toBe( false, "the not clicked one should not be selected" ); expect(g2t1Elem.classList.contains("is-checked")).toBe( false, "the not clicked one should not be selected" ); }); it("should be possible to set a tabindex", () => { TestBed.overrideComponent(MdlTestRadioComponent, { set: { template: '<mdl-radio tabindex="2"></mdl-radio>', }, }); const fixture = TestBed.createComponent(MdlTestRadioComponent); fixture.detectChanges(); const btnEl: HTMLInputElement = fixture.debugElement.query(By.css("input")) .nativeElement; expect(btnEl.tabIndex).toBe(2); }); it("should not set a default tabindex", () => { TestBed.overrideComponent(MdlTestRadioComponent, { set: { template: "<mdl-radio></mdl-radio>", }, }); const fixture = TestBed.createComponent(MdlTestRadioComponent); fixture.detectChanges(); const el: HTMLInputElement = fixture.debugElement.query(By.css("input")) .nativeElement; expect(el.getAttribute("tabindex")).toEqual(null); }); });
the_stack
import { Tone } from "../Tone"; import { isDefined } from "./TypeCheck"; import { assert } from "./Debug"; /** * An IntervalTimeline event must have a time and duration */ export interface IntervalTimelineEvent { time: number; duration: number; [propName: string]: any; } type IteratorCallback = (event: IntervalTimelineEvent) => void; /** * Similar to Tone.Timeline, but all events represent * intervals with both "time" and "duration" times. The * events are placed in a tree structure optimized * for querying an intersection point with the timeline * events. Internally uses an [Interval Tree](https://en.wikipedia.org/wiki/Interval_tree) * to represent the data. */ export class IntervalTimeline extends Tone { readonly name: string = "IntervalTimeline"; /** * The root node of the inteval tree */ private _root: IntervalNode | null = null; /** * Keep track of the length of the timeline. */ private _length = 0; /** * The event to add to the timeline. All events must * have a time and duration value * @param event The event to add to the timeline */ add(event: IntervalTimelineEvent): this { assert(isDefined(event.time), "Events must have a time property"); assert(isDefined(event.duration), "Events must have a duration parameter"); event.time = event.time.valueOf(); let node: IntervalNode | null = new IntervalNode(event.time, event.time + event.duration, event); if (this._root === null) { this._root = node; } else { this._root.insert(node); } this._length++; // Restructure tree to be balanced while (node !== null) { node.updateHeight(); node.updateMax(); this._rebalance(node); node = node.parent; } return this; } /** * Remove an event from the timeline. * @param event The event to remove from the timeline */ remove(event: IntervalTimelineEvent): this { if (this._root !== null) { const results: IntervalNode[] = []; this._root.search(event.time, results); for (const node of results) { if (node.event === event) { this._removeNode(node); this._length--; break; } } } return this; } /** * The number of items in the timeline. * @readOnly */ get length(): number { return this._length; } /** * Remove events whose time time is after the given time * @param after The time to query. */ cancel(after: number): this { this.forEachFrom(after, event => this.remove(event)); return this; } /** * Set the root node as the given node */ private _setRoot(node: IntervalNode | null): void { this._root = node; if (this._root !== null) { this._root.parent = null; } } /** * Replace the references to the node in the node's parent * with the replacement node. */ private _replaceNodeInParent(node: IntervalNode, replacement: IntervalNode | null): void { if (node.parent !== null) { if (node.isLeftChild()) { node.parent.left = replacement; } else { node.parent.right = replacement; } this._rebalance(node.parent); } else { this._setRoot(replacement); } } /** * Remove the node from the tree and replace it with * a successor which follows the schema. */ private _removeNode(node: IntervalNode): void { if (node.left === null && node.right === null) { this._replaceNodeInParent(node, null); } else if (node.right === null) { this._replaceNodeInParent(node, node.left); } else if (node.left === null) { this._replaceNodeInParent(node, node.right); } else { const balance = node.getBalance(); let replacement: IntervalNode; let temp: IntervalNode | null = null; if (balance > 0) { if (node.left.right === null) { replacement = node.left; replacement.right = node.right; temp = replacement; } else { replacement = node.left.right; while (replacement.right !== null) { replacement = replacement.right; } if (replacement.parent) { replacement.parent.right = replacement.left; temp = replacement.parent; replacement.left = node.left; replacement.right = node.right; } } } else if (node.right.left === null) { replacement = node.right; replacement.left = node.left; temp = replacement; } else { replacement = node.right.left; while (replacement.left !== null) { replacement = replacement.left; } if (replacement.parent) { replacement.parent.left = replacement.right; temp = replacement.parent; replacement.left = node.left; replacement.right = node.right; } } if (node.parent !== null) { if (node.isLeftChild()) { node.parent.left = replacement; } else { node.parent.right = replacement; } } else { this._setRoot(replacement); } if (temp) { this._rebalance(temp); } } node.dispose(); } /** * Rotate the tree to the left */ private _rotateLeft(node: IntervalNode): void { const parent = node.parent; const isLeftChild = node.isLeftChild(); // Make node.right the new root of this sub tree (instead of node) const pivotNode = node.right; if (pivotNode) { node.right = pivotNode.left; pivotNode.left = node; } if (parent !== null) { if (isLeftChild) { parent.left = pivotNode; } else { parent.right = pivotNode; } } else { this._setRoot(pivotNode); } } /** * Rotate the tree to the right */ private _rotateRight(node: IntervalNode): void { const parent = node.parent; const isLeftChild = node.isLeftChild(); // Make node.left the new root of this sub tree (instead of node) const pivotNode = node.left; if (pivotNode) { node.left = pivotNode.right; pivotNode.right = node; } if (parent !== null) { if (isLeftChild) { parent.left = pivotNode; } else { parent.right = pivotNode; } } else { this._setRoot(pivotNode); } } /** * Balance the BST */ private _rebalance(node: IntervalNode): void { const balance = node.getBalance(); if (balance > 1 && node.left) { if (node.left.getBalance() < 0) { this._rotateLeft(node.left); } else { this._rotateRight(node); } } else if (balance < -1 && node.right) { if (node.right.getBalance() > 0) { this._rotateRight(node.right); } else { this._rotateLeft(node); } } } /** * Get an event whose time and duration span the give time. Will * return the match whose "time" value is closest to the given time. * @return The event which spans the desired time */ get(time: number): IntervalTimelineEvent | null { if (this._root !== null) { const results: IntervalNode[] = []; this._root.search(time, results); if (results.length > 0) { let max = results[0]; for (let i = 1; i < results.length; i++) { if (results[i].low > max.low) { max = results[i]; } } return max.event; } } return null; } /** * Iterate over everything in the timeline. * @param callback The callback to invoke with every item */ forEach(callback: IteratorCallback): this { if (this._root !== null) { const allNodes: IntervalNode[] = []; this._root.traverse(node => allNodes.push(node)); allNodes.forEach(node => { if (node.event) { callback(node.event); } }); } return this; } /** * Iterate over everything in the array in which the given time * overlaps with the time and duration time of the event. * @param time The time to check if items are overlapping * @param callback The callback to invoke with every item */ forEachAtTime(time: number, callback: IteratorCallback): this { if (this._root !== null) { const results: IntervalNode[] = []; this._root.search(time, results); results.forEach(node => { if (node.event) { callback(node.event); } }); } return this; } /** * Iterate over everything in the array in which the time is greater * than or equal to the given time. * @param time The time to check if items are before * @param callback The callback to invoke with every item */ forEachFrom(time: number, callback: IteratorCallback): this { if (this._root !== null) { const results: IntervalNode[] = []; this._root.searchAfter(time, results); results.forEach(node => { if (node.event) { callback(node.event); } }); } return this; } /** * Clean up */ dispose(): this { super.dispose(); if (this._root !== null) { this._root.traverse(node => node.dispose()); } this._root = null; return this; } } //------------------------------------- // INTERVAL NODE HELPER //------------------------------------- /** * Represents a node in the binary search tree, with the addition * of a "high" value which keeps track of the highest value of * its children. * References: * https://brooknovak.wordpress.com/2013/12/07/augmented-interval-tree-in-c/ * http://www.mif.vu.lt/~valdas/ALGORITMAI/LITERATURA/Cormen/Cormen.pdf * @param low * @param high */ class IntervalNode { // the event container event: IntervalTimelineEvent | null; // the low value low: number; // the high value high: number; // the high value for this and all child nodes max: number; // the nodes to the left private _left: IntervalNode | null = null; // the nodes to the right private _right: IntervalNode | null = null; // the parent node parent: IntervalNode | null = null; // the number of child nodes height = 0; constructor(low: number, high: number, event: IntervalTimelineEvent) { this.event = event; // the low value this.low = low; // the high value this.high = high; // the high value for this and all child nodes this.max = this.high; } /** * Insert a node into the correct spot in the tree */ insert(node: IntervalNode): void { if (node.low <= this.low) { if (this.left === null) { this.left = node; } else { this.left.insert(node); } } else if (this.right === null) { this.right = node; } else { this.right.insert(node); } } /** * Search the tree for nodes which overlap * with the given point * @param point The point to query * @param results The array to put the results */ search(point: number, results: IntervalNode[]): void { // If p is to the right of the rightmost point of any interval // in this node and all children, there won't be any matches. if (point > this.max) { return; } // Search left children if (this.left !== null) { this.left.search(point, results); } // Check this node if (this.low <= point && this.high > point) { results.push(this); } // If p is to the left of the time of this interval, // then it can't be in any child to the right. if (this.low > point) { return; } // Search right children if (this.right !== null) { this.right.search(point, results); } } /** * Search the tree for nodes which are less * than the given point * @param point The point to query * @param results The array to put the results */ searchAfter(point: number, results: IntervalNode[]): void { // Check this node if (this.low >= point) { results.push(this); if (this.left !== null) { this.left.searchAfter(point, results); } } // search the right side if (this.right !== null) { this.right.searchAfter(point, results); } } /** * Invoke the callback on this element and both it's branches * @param {Function} callback */ traverse(callback: (self: IntervalNode) => void): void { callback(this); if (this.left !== null) { this.left.traverse(callback); } if (this.right !== null) { this.right.traverse(callback); } } /** * Update the height of the node */ updateHeight(): void { if (this.left !== null && this.right !== null) { this.height = Math.max(this.left.height, this.right.height) + 1; } else if (this.right !== null) { this.height = this.right.height + 1; } else if (this.left !== null) { this.height = this.left.height + 1; } else { this.height = 0; } } /** * Update the height of the node */ updateMax(): void { this.max = this.high; if (this.left !== null) { this.max = Math.max(this.max, this.left.max); } if (this.right !== null) { this.max = Math.max(this.max, this.right.max); } } /** * The balance is how the leafs are distributed on the node * @return Negative numbers are balanced to the right */ getBalance(): number { let balance = 0; if (this.left !== null && this.right !== null) { balance = this.left.height - this.right.height; } else if (this.left !== null) { balance = this.left.height + 1; } else if (this.right !== null) { balance = -(this.right.height + 1); } return balance; } /** * @returns true if this node is the left child of its parent */ isLeftChild(): boolean { return this.parent !== null && this.parent.left === this; } /** * get/set the left node */ get left(): IntervalNode | null { return this._left; } set left(node: IntervalNode | null) { this._left = node; if (node !== null) { node.parent = this; } this.updateHeight(); this.updateMax(); } /** * get/set the right node */ get right(): IntervalNode | null { return this._right; } set right(node: IntervalNode | null) { this._right = node; if (node !== null) { node.parent = this; } this.updateHeight(); this.updateMax(); } /** * null out references. */ dispose(): void { this.parent = null; this._left = null; this._right = null; this.event = null; } }
the_stack
declare module "gl-matrix" { interface glMatrix { EPSILON: number; ARRAY_TYPE: number[]; RANDOM: () => number; setMatrixArrayType<T>(type: T): void; toRadian(a: number): number; } export var glMatrix: glMatrix; export interface vec2 { create(): number[]; clone(a: number[]): number[]; fromValues(x: number, y: number): number[]; copy(out: number[], a: number[]): number[]; set(out: number[], x: number, y: number): number[]; add(out: number[], a: number[], b: number[]): number[]; subtract(out: number[], a: number[], b: number[]): number[]; sub(out: number[], a: number[], b: number[]): number[]; multiply(out: number[], a: number[], b: number[]): number[]; mul(out: number[], a: number[], b: number[]): number[]; divide(out: number[], a: number[], b: number[]): number[]; div(out: number[], a: number[], b: number[]): number[]; min(out: number[], a: number[], b: number[]): number[]; max(out: number[], a: number[], b: number[]): number[]; scale(out: number[], a: number[], b: number): number[]; scaleAndAdd(out: number[], a: number[], b: number[], scale: number): number[]; distance(a: number[], b: number[]): number; dist(a: number[], b: number[]): number; squaredDistance(a: number[], b: number[]): number; sqrDist(a: number[], b: number[]): number; length(a: number[]): number; len(a: number[]): number; squaredLength(a: number[]): number; sqrLen(a: number[]): number; negate(out: number[], a: number[]): number[]; inverse(out: number[], a: number[]): number[]; normalize(out: number[], a: number[]): number[]; dot(a: number[], b: number[]): number; cross(out: number[], a: number[], b: number[]): number[]; lerp(out: number[], a: number[], b: number[], t: number): number[]; random(out: number[], scale: number): number[]; transformMat2(out: number[], a: number[], m: number[]): number[]; transformMat2d(out: number[], a: number[], m: number[]): number[]; transformMat3(out: number[], a: number[], m: number[]): number[]; transformMat4(out: number[], a: number[], m: number[]): number[]; forEach<T>(a: number[][], stride: number, offset: number, count: number, fn: (a: number[], b: number[], arg: T) => void, arg: T): number[][]; str(a: number[]): string; } export var vec2: vec2; interface vec3 { create(): number[]; clone(a: number[]): number[]; fromValues(x: number, y: number, z: number): number[]; copy(out: number[], a: number[]): number[]; set(out: number[], x: number, y: number, z: number): number[]; add(out: number[], a: number[], b: number[]): number[]; subtract(out: number[], a: number[], b: number[]): number[]; sub(out: number[], a: number[], b: number[]): number[]; multiply(out: number[], a: number[], b: number[]): number[]; mul(out: number[], a: number[], b: number[]): number[]; divide(out: number[], a: number[], b: number[]): number[]; div(out: number[], a: number[], b: number[]): number[]; min(out: number[], a: number[], b: number[]): number[]; max(out: number[], a: number[], b: number[]): number[]; scale(out: number[], a: number[], b: number): number[]; scaleAndAdd(out: number[], a: number[], b: number[], scale: number): number[]; distance(a: number[], b: number[]): number; dist(a: number[], b: number[]): number; squaredDistance(a: number[], b: number[]): number; sqrDist(a: number[], b: number[]): number; length(a: number[]): number; len(a: number[]): number; squaredLength(a: number[]): number; sqrLen(a: number[]): number; negate(out: number[], a: number[]): number[]; inverse(out: number[], a: number[]): number[]; normalize(out: number[], a: number[]): number[]; dot(a: number[], b: number[]): number; cross(out: number[], a: number[], b: number[]): number[]; lerp(out: number[], a: number[], b: number[], t: number): number[]; hermite(out: number[], a: number[], b: number[], c: number[], d: number[], t: number): number[]; bezier(out: number[], a: number[], b: number[], c: number[], d: number[], t: number): number[]; random(out: number[], scale: number): number[]; transformMat4(out: number[], a: number[], m: number[]): number[]; transformMat3(out: number[], a: number[], m: number[]): number[]; transformQuat(out: number[], a: number[], q: number[]): number[]; rotateX(out: number[], a: number[], b: number[], c: number): number[]; rotateY(out: number[], a: number[], b: number[], c: number): number[]; rotateZ(out: number[], a: number[], b: number[], c: number): number[]; forEach<T>(a: number[][], stride: number, offset: number, count: number, fn: (a: number[], b: number[], arg: T) => void, arg: T): number[][]; angle(a: number[], b: number[]): number; str(a: number[]): string; } export var vec3: vec3; interface vec4 { create(): number[]; clone(a: number[]): number[]; fromValues(x: number, y: number, z: number, w: number): number[]; copy(out: number[], a: number[]): number[]; set(out: number[], x: number, y: number, z: number, w: number): number[]; add(out: number[], a: number[], b: number[]): number[]; subtract(out: number[], a: number[], b: number[]): number[]; sub(out: number[], a: number[], b: number[]): number[]; multiply(out: number[], a: number[], b: number[]): number[]; mul(out: number[], a: number[], b: number[]): number[]; divide(out: number[], a: number[], b: number[]): number[]; div(out: number[], a: number[], b: number[]): number[]; min(out: number[], a: number[], b: number[]): number[]; max(out: number[], a: number[], b: number[]): number[]; scale(out: number[], a: number[], b: number): number[]; scaleAndAdd(out: number[], a: number[], b: number[], scale: number): number[]; distance(a: number[], b: number[]): number; dist(a: number[], b: number[]): number; squaredDistance(a: number[], b: number[]): number; sqrDist(a: number[], b: number[]): number; length(a: number[]): number; len(a: number[]): number; squaredLength(a: number[]): number; sqrLen(a: number[]): number; negate(out: number[], a: number[]): number[]; inverse(out: number[], a: number[]): number[]; normalize(out: number[], a: number[]): number[]; dot(a: number[], b: number[]): number; lerp(out: number[], a: number[], b: number[], t: number): number[]; random(out: number[], scale: number): number[]; transformMat4(out: number[], a: number[], m: number[]): number[]; transformQuat(out: number[], a: number[], q: number[]): number[]; forEach<T>(a: number[][], stride: number, offset: number, count: number, fn: (a: number[], b: number[], arg: T) => void, arg: T): number[][]; str(a: number[]): string; } export var vec4: vec4; interface mat2 { create(): number[]; clone(a: number[]): number[]; copy(out: number[], a: number[]): number[]; identity(out: number[]): number[]; transpose(out: number[], a: number[]): number[]; invert(out: number[], a: number[]): number[]; adjoint(out: number[], a: number[]): number[]; determinant(a: number[]): number; multiply(out: number[], a: number[], b: number[]): number[]; mul(out: number[], a: number[], b: number[]): number[]; rotate(out: number[], a: number[], rad: number): number[]; scale(out: number[], a: number[], v: number[]): number[]; fromRotation(out: number[], rad: number): number[]; fromScaling(out: number[], v: number[]): number[]; str(a: number[]): string; frob(a: number[]): number; LDU(L: number[], D: number[], U: number[], a: number[]): number[][]; } export var mat2: mat2; interface mat2d { create(): number[]; clone(a: number[]): number[]; copy(out: number[], a: number[]): number[]; identity(out: number[]): number[]; invert(out: number[], a: number[]): number[]; determinant(a: number[]): number; multiply(out: number[], a: number[], b: number[]): number[]; mul(out: number[], a: number[], b: number[]): number[]; rotate(out: number[], a: number[], rad: number): number[]; scale(out: number[], a: number[], v: number[]): number[]; translate(out: number[], a: number[], v: number[]): number[]; fromRotation(out: number[], rad: number): number[]; fromScaling(out: number[], v: number[]): number[]; fromTranslation(out: number[], v: number[]): number[]; str(a: number[]): string; frob(a: number[]): number; } export var mat2d: mat2d; interface mat3 { create(): number[]; fromMat4(out: number[], a: number[]): number[]; clone(a: number[]): number[]; copy(out: number[], a: number[]): number[]; identity(out: number[]): number[]; transpose(out: number[], a: number[]): number[]; invert(out: number[], a: number[]): number[]; adjoint(out: number[], a: number[]): number[]; determinant(a: number[]): number; multiply(out: number[], a: number[], b: number[]): number[]; mul(out: number[], a: number[], b: number[]): number[]; translate(out: number[], a: number[], v: number[]): number[]; rotate(out: number[], a: number[], rad: number): number[]; scale(out: number[], a: number[], v: number[]): number[]; fromTranslation(out: number[], v: number[]): number[]; fromRotation(out: number[], rad: number): number[]; fromScaling(out: number[], v: number[]): number[]; fromMat2d(out: number[], a: number[]): number[]; fromQuat(out: number[], q: number[]): number[]; normalFromMat4(out: number[], a: number[]): number[]; str(a: number[]): string; frob(a: number[]): number; } export var mat3: mat3; interface mat4 { create(): number[]; clone(a: number[]): number[]; copy(out: number[], a: number[]): number[]; identity(out: number[]): number[]; transpose(out: number[], a: number[]): number[]; invert(out: number[], a: number[]): number[]; adjoint(out: number[], a: number[]): number[]; determinant(a: number[]): number; multiply(out: number[], a: number[], b: number[]): number[]; mul(out: number[], a: number[], b: number[]): number[]; translate(out: number[], a: number[], v: number[]): number[]; scale(out: number[], a: number[], v: number[]): number[]; rotate(out: number[], a: number[], rad: number, axis: number[]): number[]; rotateX(out: number[], a: number[], rad: number): number[]; rotateY(out: number[], a: number[], rad: number): number[]; rotateZ(out: number[], a: number[], rad: number): number[]; fromTranslation(out: number[], v: number[]): number[]; fromScaling(out: number[], v: number[]): number[]; fromRotation(out: number[], rad: number, axis: number[]): number[]; fromXRotation(out: number[], rad: number): number[]; fromYRotation(out: number[], rad: number): number[]; fromZRotation(out: number[], rad: number): number[]; fromRotationTranslation(out: number[], q: number[], v: number[]): number[]; fromRotationTranslationScale(out: number[], q: number[], v: number[], s: number[]): number[]; fromRotationTranslationScaleOrigin(out: number[], q: number[], v: number[], s: number[], o: number[]): number[]; fromQuat(out: number[], q: number[]): number[]; frustum(out: number[], left: number, right: number, bottom: number, top: number, near: number, far: number): number[]; perspective(out: number[], fovy: number, aspect: number, near: number, far: number): number[]; perspectiveFromFieldOfView(out: number[], fov: number, near: number, far: number): number[]; ortho(out: number[], left: number, right: number, bottom: number, top: number, near: number, far: number): number[]; lookAt(out: number[], eye: number[], center: number[], up: number[]): number[]; str(a: number[]): string; frob(a: number[]): number; } export var mat4: mat4; interface quat { create(): number[]; rotationTo(out: number[], a: number[], b: number[]): number[]; setAxes(out: number[], view: number[], right: number[], up: number[]): number[]; clone(a: number[]): number[]; fromValues(x: number, y: number, z: number, w: number): number[]; copy(out: number[], a: number[]): number[]; set(out: number[], x: number, y: number, z: number, w: number): number[]; identity(out: number[]): number[]; setAxisAngle(out: number[], axis: number[], rad: number): number[]; add(out: number[], a: number[], b: number[]): number[]; multiply(out: number[], a: number[], b: number[]): number[]; mul(out: number[], a: number[], b: number[]): number[]; scale(out: number[], a: number[], b: number): number[]; rotateX(out: number[], a: number[], rad: number): number[]; rotateY(out: number[], a: number[], rad: number): number[]; rotateZ(out: number[], a: number[], rad: number): number[]; calculateW(out: number[], a: number[]): number[]; dot(a: number[], b: number[]): number; lerp(out: number[], a: number[], b: number[], t: number): number[]; slerp(out: number[], a: number[], b: number[], t: number): number[]; sqlerp(out: number[], a: number[], b: number[], c: number[], d: number[], t: number): number[]; invert(out: number[], a: number[]): number[]; conjugate(out: number[], a: number[]): number[]; length(a: number[]): number; len(a: number[]): number; squaredLength(a: number[]): number; sqrLen(a: number[]): number; normalize(out: number[], a: number[]): number[]; fromMat3(out: number[], m: number[]): number[]; str(a: number[]): string; } export var quat: quat; }
the_stack
import { AvNodeTransform, EndpointAddr, InitialInterfaceLock, MinimalPose, minimalPoseFromTransform } from '@aardvarkxr/aardvark-shared'; import bind from 'bind-decorator'; import * as React from 'react'; import { EntityComponent } from './aardvark_composed_entity'; import { AvEntityChild } from './aardvark_entity_child'; import { AvGadget } from './aardvark_gadget'; import { ActiveInterface } from './aardvark_interface_entity'; import { AvTransform } from './aardvark_transform'; import { k_remoteGrabbableInterface, RemoteGadgetComponent, RemoteGadgetEvent, RemoteGadgetEventType, RGESendEvent } from './component_remote_gadget'; import { NetworkedGadgetInfo, NetworkedItemInfo, NetworkUniverseEvent, NetworkUniverseEventType, UniverseInitInfo } from './network_universe'; interface RemoteUniverseProps { /** This callback is called when the room should convey the supplied event to the network * universe and call remoteEvent() on that object. * * @param event The opaque event object to send * @param reliable If this is true, the event must be delivered or the gadgets may * get out of synch with each other. If this is false, the room should * make its best effort to deliver the event, but the system will recover * if the event is discarded. */ onRemoteEvent: ( event: object, reliable: boolean ) => void; /** The initialization info packet provided by the NetworkUniverse that this remote * universe is connected to. */ initInfo: object; } interface RemoteItemInfo { itemId: string; iface: ActiveInterface; remoteGrabbableIface: ActiveInterface; universeFromItem: MinimalPose; grabbed: boolean; grabSent: boolean; } interface RemoteGadgetInfo { remoteGadgetId: number; url: string; remoteLocks: InitialInterfaceLock[]; iface: ActiveInterface; universeFromGadget: MinimalPose; items: Map<string, RemoteItemInfo>; } interface RemoteUniverseParams { remoteGadgetId: number; itemId?: string; } export class RemoteUniverseComponent implements EntityComponent { private remoteGadgets = new Map< number, RemoteGadgetInfo >(); private remoteEventCallback: ( event: object, reliable: boolean ) => void; private entityCallback: () => void = null; private entityEpa: EndpointAddr = null; private initInfo: UniverseInitInfo; /** @param initInfo The initialization info packet provided by the NetworkUniverse that this remote * universe is connected to. * * @param onRemoteEvent This callback is called when the room should convey the supplied event to the network * universe and call remoteEvent() on that object. * * event The opaque event object to send * reliable If this is true, the event must be delivered or the gadgets may * get out of synch with each other. If this is false, the room should * make its best effort to deliver the event, but the system will recover * if the event is discarded. */ constructor( initInfo: object, onRemoteEvent: ( event: object, reliable: boolean ) => void ) { this.initInfo = initInfo as UniverseInitInfo; this.remoteEventCallback = onRemoteEvent; } public onUpdate( callback: () => void ) { this.entityCallback = callback; } public setEntityEpa( epa: EndpointAddr ) { this.entityEpa = epa; if( epa ) { for( let gadgetInfo of this.initInfo.gadgets ) { this.createGadget( gadgetInfo ); } } else { console.log( "Discarding entity for remote universe. Remote gadgets will be destroyed.") } } public findOrAddItemInfo( gadgetInfo: RemoteGadgetInfo, itemId: string ) { let itemInfo = gadgetInfo.items.get( itemId ); if( !itemInfo ) { itemInfo = { itemId, iface: null, remoteGrabbableIface: null, universeFromItem: null, grabbed: false, grabSent: false, }; gadgetInfo.items.set( itemId, itemInfo ); } return itemInfo; } @bind private onRemoteInterface( activeRemoteGadget: ActiveInterface ) { let remoteParams = activeRemoteGadget.params as RemoteUniverseParams; let gadgetInfo = this.remoteGadgets.get( remoteParams.remoteGadgetId ); let itemInfo: RemoteItemInfo; if( remoteParams.itemId ) { itemInfo = this.findOrAddItemInfo( gadgetInfo, remoteParams.itemId ); itemInfo.iface = activeRemoteGadget; } else { gadgetInfo.iface = activeRemoteGadget; } this.entityCallback?.(); console.log( `Connection from remote gadget id ${ gadgetInfo.remoteGadgetId }` ); activeRemoteGadget.onEvent( ( event: RemoteGadgetEvent ) => { switch( event.type ) { case RemoteGadgetEventType.SendEventToMaster: { let sendEvent = event as RGESendEvent; this.remoteEventCallback( { type: NetworkUniverseEventType.SendMasterGadgetEvent, remoteGadgetId: gadgetInfo.remoteGadgetId, itemId: remoteParams.itemId, event: sendEvent.event } as NetworkUniverseEvent, sendEvent.reliable ); } break; case RemoteGadgetEventType.StartGrab: { itemInfo.grabbed = true; this.updateGrabState( gadgetInfo, itemInfo ); } break; case RemoteGadgetEventType.EndGrab: { itemInfo.grabbed = false; this.updateGrabState( gadgetInfo, itemInfo ); } break; } } ); activeRemoteGadget.onEnded( () => { if( gadgetInfo ) { if( !remoteParams.itemId ) { this.remoteGadgets.delete( gadgetInfo.remoteGadgetId ); } else { gadgetInfo.items.delete( remoteParams.itemId ); } } } ); } private updateGrabState( gadgetInfo: RemoteGadgetInfo, itemInfo: RemoteItemInfo ) { let shouldBeGrabbing = itemInfo.grabbed && itemInfo.remoteGrabbableIface != null; if( shouldBeGrabbing != itemInfo.grabSent ) { if( shouldBeGrabbing ) { let evt: NetworkUniverseEvent = { type: NetworkUniverseEventType.StartItemGrab, remoteGadgetId: gadgetInfo.remoteGadgetId, itemId: itemInfo.itemId, }; this.remoteEventCallback( evt, true ); } else { let evt: NetworkUniverseEvent = { type: NetworkUniverseEventType.EndItemGrab, remoteGadgetId: gadgetInfo.remoteGadgetId, itemId: itemInfo.itemId, }; this.remoteEventCallback( evt, true ); } itemInfo.grabSent = shouldBeGrabbing; } } private updateRemoteGrabbable( params: RemoteUniverseParams, universeFromItem: AvNodeTransform ) { let gadgetInfo = this.remoteGadgets.get( params.remoteGadgetId ); if( !gadgetInfo ) return; let itemInfo = gadgetInfo.items.get( params.itemId ); if( !itemInfo ) return; if( !itemInfo.grabbed ) return; let m: NetworkUniverseEvent = { type: NetworkUniverseEventType.UpdateNetworkItemTransform, remoteGadgetId: params.remoteGadgetId, itemId: params.itemId, universeFromGadget: minimalPoseFromTransform( universeFromItem ), } this.remoteEventCallback?.( m, false ); } @bind private onRemoteGrabbable( activeRemoteGrabbable: ActiveInterface ) { let remoteParams = activeRemoteGrabbable.params as RemoteUniverseParams; let gadgetInfo = this.remoteGadgets.get( remoteParams.remoteGadgetId ); if( !gadgetInfo ) return; let itemInfo = gadgetInfo.items.get( remoteParams.itemId ); if( !itemInfo ) return; this.updateRemoteGrabbable( remoteParams, activeRemoteGrabbable.selfFromPeer ); itemInfo.remoteGrabbableIface = activeRemoteGrabbable; this.updateGrabState( gadgetInfo, itemInfo ); activeRemoteGrabbable.onTransformUpdated( ( universeFromItem: AvNodeTransform ) => { this.updateRemoteGrabbable( remoteParams, universeFromItem) } ); activeRemoteGrabbable.onEnded( () => { itemInfo.remoteGrabbableIface = null; this.updateGrabState( gadgetInfo, itemInfo ); } ) } public get receives() { return [ { iface: RemoteGadgetComponent.interfaceName, processor: this.onRemoteInterface }, { iface: k_remoteGrabbableInterface, processor: this.onRemoteGrabbable }, ]; } public get wantsTransforms() { return true; } public networkEvent( event: object ) { let e = event as NetworkUniverseEvent; switch( e.type ) { case NetworkUniverseEventType.CreateRemoteGadget: this.createGadget( e.gadgetInfo ); break; case NetworkUniverseEventType.DestroyRemoteGadget: this.destroyGadget( e.remoteGadgetId ); break; case NetworkUniverseEventType.UpdateRemoteGadgetTransform: this.updateGadgetTransform( e.remoteGadgetId, e.itemId, e.universeFromGadget ); break; case NetworkUniverseEventType.BroadcastRemoteGadgetEvent: this.masterEvent( e.remoteGadgetId, e.event ); break; } } private createGadget( gadgetInfo: NetworkedGadgetInfo ) { if( this.remoteGadgets.has( gadgetInfo.remoteGadgetId ) ) { throw new Error( `duplicate createGadget for remote gadget id ${ gadgetInfo.remoteGadgetId } with ${ gadgetInfo.url }` ); } if( !this.entityEpa ) { throw new Error( `createGadget before the entity gave us their Endpoint Address` ); } let newGadget: RemoteGadgetInfo = { remoteGadgetId: gadgetInfo.remoteGadgetId, url: gadgetInfo.url, remoteLocks: gadgetInfo.remoteLocks, iface: null, universeFromGadget: gadgetInfo.universeFromGadget, items: new Map<string, RemoteItemInfo>(), }; for( let item of ( gadgetInfo.items ?? [] ) ) { newGadget.items.set( item.itemId, { itemId: item.itemId, iface: null, remoteGrabbableIface: null, universeFromItem: item.remoteUniverseFromItem, grabbed: false, grabSent: false, } ); } this.remoteGadgets.set( gadgetInfo.remoteGadgetId, newGadget ); let fullLockList = [ ...gadgetInfo.remoteLocks, { iface: RemoteGadgetComponent.interfaceName, receiver: this.entityEpa, params: { remoteGadgetId: gadgetInfo.remoteGadgetId }, } ]; AvGadget.instance().startGadget( gadgetInfo.url, fullLockList ); } private destroyGadget( remoteGadgetId: number ) { let gadgetInfo = this.remoteGadgets.get( remoteGadgetId ); let event: RemoteGadgetEvent = { type: RemoteGadgetEventType.DestroyGadget, }; gadgetInfo?.iface?.sendEvent( event ); } private updateGadgetTransform( remoteGadgetId: number, itemId: string, universeFromGadget: MinimalPose ) { let gadgetInfo = this.remoteGadgets.get( remoteGadgetId ); if( gadgetInfo ) { if( itemId ) { let itemInfo = this.findOrAddItemInfo( gadgetInfo, itemId ); itemInfo.universeFromItem = universeFromGadget; } else { gadgetInfo.universeFromGadget = universeFromGadget; } this.entityCallback?.(); } } private masterEvent( remoteGadgetId: number, event: object ) { let gadgetInfo = this.remoteGadgets.get( remoteGadgetId ); let sendEvent: RGESendEvent = { type: RemoteGadgetEventType.ReceiveEventFromMaster, event, }; gadgetInfo?.iface?.sendEvent( sendEvent ); } render() { let children: JSX.Element[] = []; for( let gadget of this.remoteGadgets.values() ) { if( !gadget.iface || !gadget.universeFromGadget ) continue; children.push( <AvTransform transform={ gadget.universeFromGadget } key={ gadget.remoteGadgetId }> <AvEntityChild child={ gadget.iface.peer } key={ gadget.remoteGadgetId }/> </AvTransform> ); for( let item of gadget.items.values() ) { if( !item.iface || !item.universeFromItem || item.grabbed ) { continue; } let key = `${ gadget.remoteGadgetId }/${ item.itemId }`; children.push( <AvTransform transform={ item.universeFromItem } key={ key }> <AvEntityChild child={ item.iface.peer } key={ key }/> </AvTransform> ); } } return <div key="remote_universe"> { children } </div>; } }
the_stack
import _ from 'lodash' import config from 'config' import got, { Got } from 'got' import { nanoid } from 'nanoid' import { EntityManager, getManager, getRepository, In } from 'typeorm' import { User } from '../core/user' import { UserEntity } from '../entities/user' import { InvalidArgumentError, UnauthorizedError } from '../error/error' import { PermissionWorkspaceRole } from '../types/permission' import { AccountStatus, UserInfoDTO } from '../types/user' import { getSecretKey } from '../utils/common' import { decrypt, encrypt } from '../utils/crypto' import { isSaaS, isAnonymous } from '../utils/env' import { md5 } from '../utils/helper' import { beauty, getUpstreamHook } from '../utils/http' import emailService from './email' import workspaceService from './workspace' type TokenPayload = { userId: string passHash: string expiresAt: number } interface IUserService { /** * in the scene of SaaS deployment, following methods are delegated to its own user service * - login * - generateUserVerification * - inviteMembersToWorkspace * - createUserByEmails * - confirmUser * - updateUser * - generateToken * Thus those routes will be hidden and workspaceService.addMembers will be exposed */ refreshToken(currentToken: string): Promise<string> verifyToken(token: string): Promise<{ userId: string expiresAt: number }> getInfos(ids: string[]): Promise<UserInfoDTO[]> } export class UserService implements IUserService { private secretKey: string private compatible: boolean constructor() { // ignore this, this is used for earlier version this.compatible = !!process.env.COMPATIBLE this.secretKey = getSecretKey() } async login(email: string, password: string): Promise<User> { const model = await getRepository(UserEntity).findOne({ email }) if (!model) { throw InvalidArgumentError.new('incorrect password') } const convertedPassword = this.getConvertedPassword(password, model.id) if (convertedPassword !== model.password) { throw InvalidArgumentError.new('incorrect password') } return User.fromEntity(model) } /** * * @returns code */ async generateUserVerification(email: string): Promise<{ user: User; code?: string }> { return getManager().transaction(async (t) => { const user = (await this.createUserByEmailsIfNotExist([email], t))[email] if (user.status === AccountStatus.VERIFYING) { return emailService.sendConfirmationEmail(user.id, email).then((code) => ({ user, code })) } return { user } }) } async inviteMembersToWorkspace( operatorId: string, workspaceId: string, users: { email: string; role: PermissionWorkspaceRole }[], ): Promise<{ email: string; inviteLink: string }[]> { const emails = _(users).map('email').value() const { name } = await workspaceService.mustFindOneWithMembers(workspaceId) return getManager().transaction(async (t) => { const userMap = await this.createUserByEmailsIfNotExist(emails, t, AccountStatus.CREATING) const userWithRole = _(users) .map(({ email, role }) => ({ userId: userMap[email].id, role })) .value() await workspaceService.addMembers(operatorId, workspaceId, userWithRole, t) return emailService.sendInvitationEmails( operatorId, _(userMap) .values() .map((u) => ({ userId: u.id, email: u.email })) .value(), name, ) }) } /** * * @returns key: email */ async createUserByEmailsIfNotExist( emails: string[], t?: EntityManager, // user init status status?: AccountStatus, ): Promise<{ [email: string]: User }> { const r = (t ?? getManager()).getRepository(UserEntity) const users = await r.find({ email: In(emails), }) if (users.length === emails.length) { return _(users) .map((u) => User.fromEntity(u)) .keyBy('email') .value() } const missing = _.difference(emails, _(users).map('email').value()) const inserts = _(missing) .map((email) => r.create({ username: email.split('@')[0] || email, email, password: '', status, }), ) .value() const insertedUsers = await r.save(inserts) return _([...users, ...insertedUsers]) .map((u) => User.fromEntity(u)) .keyBy('email') .value() } async confirmUser(code: string): Promise<{ id: string; status: AccountStatus }> { const payload = emailService.parseCode(code) if (_.now() > payload.expiresAt || payload.type !== 'confirm') { throw InvalidArgumentError.new('confirm link is expired') } const { userId } = payload const user = await getRepository(UserEntity).findOneOrFail(userId) if (![AccountStatus.VERIFYING, AccountStatus.CREATING].includes(user.status)) { return { id: user.id, status: user.status } } user.status = AccountStatus.CONFIRMED return user.save().then((u) => ({ id: u.id, status: u.status })) } async updateUser( userId: string, params: { username?: string avatar?: string newPassword?: string currentPassword?: string } = {}, ): Promise<User> { const { username, avatar, newPassword, currentPassword } = params const user = await getRepository(UserEntity).findOneOrFail(userId) if (username) { user.username = username } if (avatar) { user.avatar = avatar } if (newPassword) { if ( user.status === AccountStatus.CONFIRMED || this.getConvertedPassword(currentPassword ?? '', userId) === user.password ) { user.password = this.getConvertedPassword(newPassword, userId) } else { throw InvalidArgumentError.new('current password is not correct') } } user.status = AccountStatus.ACTIVE const newUser = await user.save() return User.fromEntity(newUser) } /** * visible for testing */ getConvertedPassword(password: string, userId: string): string { return md5(this.secretKey + password + (this.compatible ? '' : userId.substring(0, 8))) } async generateToken(user: string): Promise<string> { const model = await getRepository(UserEntity).findOneOrFail(user) return encrypt( JSON.stringify({ userId: user, expiresAt: _.now() + 30 * 24 * 3600 * 1000, // 30 days passHash: this.getConvertedPassword(model.password, user), } as TokenPayload), this.secretKey, ) } private decryptToken(token: string): TokenPayload { return JSON.parse(decrypt(token, this.secretKey)) } /** * @return { userId: string, expiresAt: number} */ async verifyToken(token: string): Promise<{ userId: string; expiresAt: number }> { const payload = this.decryptToken(token) if (payload.expiresAt < _.now()) { throw UnauthorizedError.notLogin() } let model: UserEntity try { model = await getRepository(UserEntity).findOneOrFail(payload.userId) } catch (_err) { throw UnauthorizedError.notExist() } const passHash = this.getConvertedPassword(model.password, model.id) if (passHash !== payload.passHash) { throw UnauthorizedError.notLogin() } return payload } async refreshToken(token: string): Promise<string> { const { userId } = this.decryptToken(token) return this.generateToken(userId) } async getInfos(ids: string[]): Promise<UserInfoDTO[]> { const models = await getRepository(UserEntity).find({ id: In(ids) }) return _.map(models, (u) => User.fromEntity(u).toDTO()) } } export class AnonymousUserService extends UserService { /** * if the token validation fails, it will create a new user and return a mock payload of token * its expiresAt is in line with the requirements of regenerating new token * so the user middleware will generate a new token cookie for users * @returns generated: the user is created by this function */ async verifyToken(token: string): Promise<{ userId: string; expiresAt: number }> { console.debug('anonymous ....................') return super.verifyToken(token).catch(async (err) => { console.debug(err) const email = this.randomEmail() const user = ( await this.createUserByEmailsIfNotExist([email], undefined, AccountStatus.ACTIVE) )[email] await this.updateUser(user.id, { username: `Anonymous-${nanoid(4)}` }) return { userId: user.id, expiresAt: _.now() + 3600 * 1000, generated: true, email: user.email, } }) } async generateUserVerification(): Promise<never> { throw InvalidArgumentError.notSupport('generating verification') } private randomEmail(): string { return `${nanoid()}@tellery.demo` } } export class SaaSExternalUserService implements IUserService { private got: Got = got.extend({ hooks: { beforeError: [getUpstreamHook('CloudUserService')], }, prefixUrl: config.get<string>('deploy.userServiceEndpoint'), timeout: 5000, responseType: 'json', }) async verifyToken(token: string): Promise<{ userId: string; expiresAt: number }> { return beauty(() => this.got .post<{ userId: string; expireAt: number }>('/v1/internal/user/jwt/verify', { json: { token, }, }) .json(), ) } async refreshToken(currentToken: string): Promise<string> { const { data } = await beauty<{ data: string }>(() => this.got .post('/v1/internal/user/jwt/refresh', { json: { token: currentToken, }, }) .json(), ) return data } async getInfos(ids: string[]): Promise<UserInfoDTO[]> { const { data } = await beauty<{ data: UserInfoDTO[] }>(() => this.got .get('/v1/internal/user/batchGet', { json: ids, }) .json(), ) return data } } const service: IUserService = isSaaS() ? new SaaSExternalUserService() : isAnonymous() ? new AnonymousUserService() : new UserService() export default service export const defaultUserService = service as UserService
the_stack
import { cleanup, screen } from "solid-testing-library"; import { IconCheckCircleSolid } from "../../icons/IconCheckCircleSolid"; import { renderWithHopeProvider } from "../../test-utils"; import { inputElementStyles, InputElementVariants } from "../input.styles"; import { InputElement, InputLeftElement, InputRightElement } from "../input-element"; import * as inputGroupModule from "../input-group"; const InputGroup = inputGroupModule.InputGroup; const inputGroupContextMock: inputGroupModule.InputGroupContextValue = { state: { variant: "outline", size: "md", hasLeftElement: false, hasRightElement: false, hasLeftAddon: false, hasRightAddon: false, }, setHasLeftElement: jest.fn(), setHasRightElement: jest.fn(), setHasLeftAddon: jest.fn(), setHasRightAddon: jest.fn(), }; describe("InputElement", () => { afterEach(() => { jest.clearAllMocks(); cleanup(); }); it("should render", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputElement data-testid="input-element"> <IconCheckCircleSolid /> </InputElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toBeInTheDocument(); }); it("should render <div> tag by default", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputElement data-testid="input-element"> <IconCheckCircleSolid /> </InputElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toBeInstanceOf(HTMLDivElement); }); it("should render tag provided by the 'as' prop", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputElement data-testid="input-element" as="span"> <IconCheckCircleSolid /> </InputElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toBeInstanceOf(HTMLSpanElement); }); it("should have class from class prop", () => { // arrange const stubClass = "stub"; // act renderWithHopeProvider(() => ( <InputGroup> <InputElement data-testid="input-element" class={stubClass}> <IconCheckCircleSolid /> </InputElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(stubClass); }); it("should have class from className prop", () => { // arrange const stubClass = "stub"; // act renderWithHopeProvider(() => ( <InputGroup> {/* eslint-disable-next-line solid/no-react-specific-props */} <InputElement data-testid="input-element" className={stubClass}> <IconCheckCircleSolid /> </InputElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(stubClass); }); it("should have class from classList prop", () => { // arrange const stubClass = "stub"; // act renderWithHopeProvider(() => ( <InputGroup> <InputElement data-testid="input-element" classList={{ [stubClass]: true }}> <IconCheckCircleSolid /> </InputElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(stubClass); }); it("should have stitches generated class from variant props", () => { // arrange const variantProps: InputElementVariants = { placement: "left", size: "lg", }; const inputElementClass = inputElementStyles(variantProps); // act renderWithHopeProvider(() => ( <InputGroup> <InputElement data-testid="input-element" {...variantProps}> <IconCheckCircleSolid /> </InputElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(inputElementClass.className); }); }); describe("InputLeftElement", () => { afterEach(() => { jest.clearAllMocks(); cleanup(); }); it("should render", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputLeftElement data-testid="input-element"> <IconCheckCircleSolid /> </InputLeftElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toBeInTheDocument(); }); it("should render <div> tag by default", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputLeftElement data-testid="input-element"> <IconCheckCircleSolid /> </InputLeftElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toBeInstanceOf(HTMLDivElement); }); it("should render tag provided by the 'as' prop", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputLeftElement data-testid="input-element" as="span"> <IconCheckCircleSolid /> </InputLeftElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toBeInstanceOf(HTMLSpanElement); }); it("should call inputGroupContext 'setHasLeftElement' on mount", () => { // arrange jest.spyOn(inputGroupModule, "useInputGroupContext").mockReturnValue(inputGroupContextMock); const stubContext = inputGroupModule.useInputGroupContext(); // act renderWithHopeProvider(() => ( <InputGroup> <InputLeftElement> <IconCheckCircleSolid /> </InputLeftElement> </InputGroup> )); // assert expect(stubContext?.setHasLeftElement).toHaveBeenCalledWith(true); }); it("should have semantic hope class", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputLeftElement data-testid="input-element"> <IconCheckCircleSolid /> </InputLeftElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass("hope-input-left-element"); }); it("should return semantic hope class as css selector when calling toString()", () => { expect(InputLeftElement.toString()).toBe(".hope-input-left-element"); }); it("should have class from class prop", () => { // arrange const stubClass = "stub"; // act renderWithHopeProvider(() => ( <InputGroup> <InputLeftElement data-testid="input-element" class={stubClass}> <IconCheckCircleSolid /> </InputLeftElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(stubClass); }); it("should have class from className prop", () => { // arrange const stubClass = "stub"; // act renderWithHopeProvider(() => ( <InputGroup> {/* eslint-disable-next-line solid/no-react-specific-props */} <InputLeftElement data-testid="input-element" className={stubClass}> <IconCheckCircleSolid /> </InputLeftElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(stubClass); }); it("should have class from classList prop", () => { // arrange const stubClass = "stub"; // act renderWithHopeProvider(() => ( <InputGroup> <InputLeftElement data-testid="input-element" classList={{ [stubClass]: true }}> <IconCheckCircleSolid /> </InputLeftElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(stubClass); }); it("should have stitches generated class from inputElementStyles with 'placement=left'", () => { // arrange const inputElementClass = inputElementStyles({ placement: "left", }); // act renderWithHopeProvider(() => ( <InputGroup> <InputLeftElement data-testid="input-element"> <IconCheckCircleSolid /> </InputLeftElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(inputElementClass.className); }); }); describe("InputRightElement", () => { afterEach(() => { jest.clearAllMocks(); cleanup(); }); it("should render", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputRightElement data-testid="input-element"> <IconCheckCircleSolid /> </InputRightElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toBeInTheDocument(); }); it("should render <div> tag by default", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputRightElement data-testid="input-element"> <IconCheckCircleSolid /> </InputRightElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toBeInstanceOf(HTMLDivElement); }); it("should render tag provided by the 'as' prop", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputRightElement data-testid="input-element" as="span"> <IconCheckCircleSolid /> </InputRightElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toBeInstanceOf(HTMLSpanElement); }); it("should call inputGroupContext 'setHasRightElement' on mount", () => { // arrange jest.spyOn(inputGroupModule, "useInputGroupContext").mockReturnValue(inputGroupContextMock); const stubContext = inputGroupModule.useInputGroupContext(); // act renderWithHopeProvider(() => ( <InputGroup> <InputRightElement> <IconCheckCircleSolid /> </InputRightElement> </InputGroup> )); // assert expect(stubContext?.setHasRightElement).toHaveBeenCalledWith(true); }); it("should have semantic hope class", () => { // act renderWithHopeProvider(() => ( <InputGroup> <InputRightElement data-testid="input-element"> <IconCheckCircleSolid /> </InputRightElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass("hope-input-right-element"); }); it("should return semantic hope class as css selector when calling toString()", () => { expect(InputRightElement.toString()).toBe(".hope-input-right-element"); }); it("should have class from class prop", () => { // arrange const stubClass = "stub"; // act renderWithHopeProvider(() => ( <InputGroup> <InputRightElement data-testid="input-element" class={stubClass}> <IconCheckCircleSolid /> </InputRightElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(stubClass); }); it("should have class from className prop", () => { // arrange const stubClass = "stub"; // act renderWithHopeProvider(() => ( <InputGroup> {/* eslint-disable-next-line solid/no-react-specific-props */} <InputRightElement data-testid="input-element" className={stubClass}> <IconCheckCircleSolid /> </InputRightElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(stubClass); }); it("should have class from classList prop", () => { // arrange const stubClass = "stub"; // act renderWithHopeProvider(() => ( <InputGroup> <InputRightElement data-testid="input-element" classList={{ [stubClass]: true }}> <IconCheckCircleSolid /> </InputRightElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(stubClass); }); it("should have stitches generated class from inputElementStyles with 'placement=right'", () => { // arrange const inputElementClass = inputElementStyles({ placement: "right", }); // act renderWithHopeProvider(() => ( <InputGroup> <InputRightElement data-testid="input-element"> <IconCheckCircleSolid /> </InputRightElement> </InputGroup> )); const inputElement = screen.getByTestId("input-element"); // assert expect(inputElement).toHaveClass(inputElementClass.className); }); });
the_stack
import * as Delir from '@delirvfx/core' import classnames from 'classnames' import _ from 'lodash' import React from 'react' import { SpreadType } from '../../utils/Spread' import TimePixelConversion from '../../utils/TimePixelConversion' import * as EditorOps from '../../domain/Editor/operations' import { ContextProp, withFleurContext } from '@fleur/react' import s from './KeyframeGraph.sass' interface OwnProps { width: number height: number viewBox: string scrollLeft: number composition: SpreadType<Delir.Entity.Composition> parentClip: SpreadType<Delir.Entity.Clip> entity: SpreadType<Delir.Entity.Clip> | SpreadType<Delir.Entity.Effect> | null paramName: string descriptor: Delir.AnyParameterTypeDescriptor keyframes: ReadonlyArray<Delir.Entity.Keyframe> pxPerSec: number zoomScale: number onKeyframeRemove: (parentClipId: string, keyframeId: string) => void onModified: (parentClipId: string, paramName: string, frameOnClip: number, patch: KeyframePatch) => void } type Props = OwnProps & ContextProp interface State { activeKeyframeId: string | null keyframeMovement: { x: number } | null easingHandleMovement: { x: number; y: number } | null } export interface KeyframePatch { easeInParam?: [number, number] easeOutParam?: [number, number] frameOnClip?: number } const EASING_HANDLER_SIZE = 2.5 export default withFleurContext( class KeyframeGraph extends React.Component<Props, State> { public state: State = { activeKeyframeId: null, keyframeMovement: null, easingHandleMovement: null, } private draggedKeyframeId: string | null = null private _initialKeyframePosition: { x: number; y: number } | null = null private _keyframeDragged: boolean = false private draggedEasingHandler: { type: 'ease-in' | 'ease-out' keyframeId: string element: SVGCircleElement container: SVGGElement initialPosition: { x: number; y: number } } | null = null public render() { const { width, height, viewBox, descriptor, keyframes } = this.props return ( <svg className={s.keyframeGraph} viewBox={viewBox} width={width} height={height} onMouseMove={this.mouseMoveOnSvg} onMouseUp={this.mouseUpOnSvg} onKeyDown={this.keydownOnKeyframeGraph} tabIndex={-1} > {descriptor.animatable && (() => { switch (descriptor.type) { case 'COLOR_RGB': case 'COLOR_RGBA': return this._renderColorKeyframes(keyframes) case 'FLOAT': case 'NUMBER': return this._renderNumberKeyframes(keyframes) case 'STRING': return this._renderStringKeyframes(keyframes) } })()} </svg> ) } private mouseMoveOnSvg = (e: React.MouseEvent<SVGElement>) => { if (this.draggedKeyframeId) { this._keyframeDragged = true this.setState({ keyframeMovement: { x: e.screenX - this._initialKeyframePosition!.x, }, }) } else if (this.draggedEasingHandler) { this.setState({ easingHandleMovement: { x: e.screenX - this.draggedEasingHandler.initialPosition!.x, y: e.screenY - this.draggedEasingHandler.initialPosition!.y, }, }) } } private mouseUpOnSvg = (e: React.MouseEvent<SVGElement>) => { e.preventDefault() e.stopPropagation() const { parentClip, entity, paramName, keyframes, onModified } = this.props const { keyframeMovement } = this.state if (!parentClip || !paramName || !entity) return process: { if (this.draggedKeyframeId) { // Process for keyframe dragged if (!this._keyframeDragged) { this.setState({ activeKeyframeId: this.draggedKeyframeId, keyframeMovement: null, }) break process } if (!keyframeMovement) break process const keyframe = keyframes.find(kf => kf.id === this.draggedKeyframeId)! const movedFrame = this.pxToFrame(keyframeMovement.x) onModified(parentClip.id, paramName, keyframe.frameOnClip, { frameOnClip: keyframe.frameOnClip + movedFrame, }) } else if (this.draggedEasingHandler) { // Process for easing handle dragged const data = this.draggedEasingHandler const transitionPath = this.draggedEasingHandler.container.querySelector( '[data-transition-path]', )! as HTMLElement const keyframes = entity.keyframes[paramName].slice(0).sort((a, b) => a.frameOnClip - b.frameOnClip) const keyframeIdx = keyframes.findIndex(kf => kf.id === this.draggedEasingHandler!.keyframeId)! if (keyframeIdx === -1) break process const { beginX, beginY, endX, endY } = _.mapValues(transitionPath.dataset, val => parseFloat(val!)) const rect = { width: endX - beginX, height: endY - beginY, } const handlePosition = { x: data.element.cx.baseVal.value, y: data.element.cy.baseVal.value, } const easeParam: [number, number] = [ (handlePosition.x - beginX) / rect.width, // guard from division by 0 rect.height === 0 ? 0 : (handlePosition.y - beginY) / rect.height, ] if (data.type === 'ease-in') { const keyframe = keyframes[keyframeIdx + 1] onModified(parentClip.id, paramName, keyframe.frameOnClip, { easeInParam: easeParam, }) } else if (data.type === 'ease-out') { const keyframe = keyframes[keyframeIdx] onModified(parentClip.id, paramName, keyframe.frameOnClip, { easeOutParam: easeParam, }) } } } // Clear dragging state this.draggedKeyframeId = null this._keyframeDragged = false this.draggedEasingHandler = null this.setState({ keyframeMovement: null, easingHandleMovement: null, }) } private keydownOnKeyframeGraph = (e: React.KeyboardEvent<SVGElement>) => { const { parentClip, onKeyframeRemove } = this.props const { activeKeyframeId } = this.state if ((e.key === 'Delete' || e.key === 'Backspace') && activeKeyframeId) { onKeyframeRemove(parentClip.id, activeKeyframeId) this.draggedKeyframeId = null } } private mouseDownOnEasingHandle = (e: React.MouseEvent<SVGCircleElement>) => { const { dataset } = e.currentTarget as any this.draggedEasingHandler = { type: dataset.isEaseIn ? 'ease-in' : 'ease-out', keyframeId: dataset.keyframeId, element: e.currentTarget, container: (e.currentTarget.parentElement! as any) as SVGGElement, initialPosition: { x: e.screenX, y: e.screenY }, } } private mouseDownOnKeyframe = (e: React.MouseEvent<SVGGElement>) => { this.draggedKeyframeId = (e.currentTarget as any).dataset.keyframeId this.setState({ activeKeyframeId: this.draggedKeyframeId }) this._keyframeDragged = false this._initialKeyframePosition = { x: e.screenX, y: e.screenY } } private doubleClickOnKeyframe = ({ currentTarget }: React.MouseEvent<SVGGElement>) => { const { parentClip } = this.props if (!parentClip) return this.props.executeOperation(EditorOps.seekPreviewFrame, { frame: parentClip.placedFrame + parseInt(currentTarget.dataset!.frame!, 10), }) } private _renderNumberKeyframes(keyframes: ReadonlyArray<Delir.Entity.Keyframe>) { const { keyframeMovement, easingHandleMovement } = this.state const points = this.buildKeyframePoints(keyframes) const NO_TRANSFORM = { x: 0, y: 0 } const selectedEasingHandler = this.draggedEasingHandler return points .map((point, idx) => { const keyframeDragMovement = keyframeMovement && point.keyframeId === this.draggedKeyframeId ? keyframeMovement : NO_TRANSFORM const easingHandleDragMovement = easingHandleMovement && point.keyframeId === this.draggedEasingHandler!.keyframeId ? easingHandleMovement : NO_TRANSFORM const easeOutHandleDragMovement = selectedEasingHandler && selectedEasingHandler!.type === 'ease-out' ? easingHandleDragMovement : NO_TRANSFORM const easeIntHandleDragMovement = selectedEasingHandler && selectedEasingHandler!.type === 'ease-in' ? easingHandleDragMovement : NO_TRANSFORM return ( <g key={point.keyframeId} data-index={idx}> {point.transitionPath && ( <path stroke="#fff" fill="none" strokeWidth="1" d={` M ${point.transitionPath.begin.x} ${point.transitionPath.begin.y} C ${point.transitionPath.begin.handleX + easeOutHandleDragMovement.x} ${point .transitionPath.begin.handleY + easeOutHandleDragMovement.y} ${point.transitionPath.end.handleX + easeIntHandleDragMovement.x} ${point .transitionPath.end.handleY + easeIntHandleDragMovement.y} ${point.transitionPath.end.x} ${point.transitionPath.end.y} `} data-begin-x={point.transitionPath.begin.x} data-begin-y={point.transitionPath.begin.y} data-end-x={point.transitionPath.end.x} data-end-y={point.transitionPath.end.y} data-transition-path /> )} {point.easeOutLine && ( <path className={s.keyframeLineToHandle} strokeWidth="1" d={` M ${point.easeOutLine.x} ${point.easeOutLine.y} L ${point.easeOutLine.endX + easeOutHandleDragMovement.x} ${point.easeOutLine.endY + easeOutHandleDragMovement.y} `} data-ease-out-handle-path /> )} {point.nextEaseInLine && ( <path className={s.keyframeLineToHandle} strokeWidth="1" d={` M ${point.nextEaseInLine.x} ${point.nextEaseInLine.y} L ${point.nextEaseInLine.endX + easeIntHandleDragMovement.x} ${point.nextEaseInLine .endY + easeIntHandleDragMovement.y} `} data-ease-in-handle-path /> )} <g transform={`translate(${point.point.x + keyframeDragMovement.x - 4} ${point.point.y - 4})`} onDoubleClick={this.doubleClickOnKeyframe} onMouseDown={this.mouseDownOnKeyframe} onMouseUp={this.mouseUpOnSvg} data-keyframe-id={point.keyframeId} data-frame={point.frame} > <rect className={classnames(s.keyframeInner, { [s['keyframeInner--selected']]: point.keyframeId === this.state.activeKeyframeId, })} width="8" height="8" /> </g> {point.nextEaseInHandle && ( <circle cx={point.nextEaseInHandle.x + easeIntHandleDragMovement.x} cy={point.nextEaseInHandle.y + easeIntHandleDragMovement.y} fill="#7100bf" r={EASING_HANDLER_SIZE} onMouseDown={this.mouseDownOnEasingHandle} onMouseUp={this.mouseUpOnSvg} data-keyframe-id={point.keyframeId} data-is-ease-in /> )} {point.easeOutHandle && ( <circle cx={point.easeOutHandle.x + easeOutHandleDragMovement.x} cy={point.easeOutHandle.y + easeOutHandleDragMovement.y} fill="#7100bf" r={EASING_HANDLER_SIZE} onMouseDown={this.mouseDownOnEasingHandle} onMouseUp={this.mouseUpOnSvg} data-keyframe-id={point.keyframeId} data-is-ease-out /> )} </g> ) }) .reverse() } private _renderColorKeyframes(keyframes: ReadonlyArray<Delir.Entity.Keyframe>) { const { parentClip, height: graphHeight } = this.props const { scrollLeft } = this.props const halfHeight = graphHeight / 2 if (!parentClip) return [] const clipPlacedPositionX = this.frameToPx(parentClip.placedFrame) - scrollLeft const orderedKeyframes = keyframes.slice(0).sort((a, b) => a.frameOnClip - b.frameOnClip) return orderedKeyframes.map((kf, idx) => { const x = clipPlacedPositionX + this.frameToPx(kf.frameOnClip) const nextX = orderedKeyframes[idx + 1] ? clipPlacedPositionX + this.frameToPx(orderedKeyframes[idx + 1].frameOnClip) : null const transform = this.state.keyframeMovement && kf.id === this.draggedKeyframeId ? this.state.keyframeMovement : { x: 0 } return ( <g ref={kf.id}> {nextX != null && ( <path stroke="#fff" fill="none" strokeWidth="1" d={`M ${x + 4} ${halfHeight + 4} L ${nextX - 4} ${halfHeight + 4}`} /> )} <g className={classnames(s.keyframe, s['keyframe--color'])} transform={`translate(${x + transform.x - 4} ${halfHeight})`} onDoubleClick={this.doubleClickOnKeyframe} onMouseDown={this.mouseDownOnKeyframe} onMouseUp={this.mouseUpOnSvg} data-keyframe-id={kf.id} data-frame={kf.frameOnClip} > <rect className={classnames(s.keyframeInner, { [s['keyframeInner--selected']]: kf.id === this.state.activeKeyframeId, })} width="8" height="8" stroke="#fff" strokeWidth="1" style={{ fill: (kf.value as Delir.Values.ColorRGBA).toString(), }} /> </g> </g> ) }) } private _renderStringKeyframes(keyframes: ReadonlyArray<Delir.Entity.Keyframe>) { const { parentClip, scrollLeft, height } = this.props const halfHeight = height / 2 if (!parentClip) return [] const clipPlacedPositionX = this.frameToPx(parentClip.placedFrame) - scrollLeft const orderedKeyframes = keyframes.slice(0).sort((a, b) => a.frameOnClip - b.frameOnClip) return orderedKeyframes.map((kf, idx) => { const x = clipPlacedPositionX + this.frameToPx(kf.frameOnClip) const nextX = orderedKeyframes[idx + 1] ? clipPlacedPositionX + this.frameToPx(orderedKeyframes[idx + 1].frameOnClip) : null const transform = this.state.keyframeMovement && kf.id === this.draggedKeyframeId ? this.state.keyframeMovement : { x: 0 } return ( <g ref={kf.id}> {nextX != null && ( <path stroke="#fff" fill="none" strokeWidth="1" d={`M ${x + 4} ${halfHeight + 4} L ${nextX - 4} ${halfHeight + 4}`} /> )} <g transform={`translate(${x + transform.x - 4} ${halfHeight})`} onDoubleClick={this.doubleClickOnKeyframe} onMouseDown={this.mouseDownOnKeyframe} onMouseUp={this.mouseUpOnSvg} data-keyframe-id={kf.id} data-frame={kf.frameOnClip} > <rect className={classnames(s.keyframeInner, { [s['keyframeInner--selected']]: kf.id === this.state.activeKeyframeId, })} width="8" height="8" fill="#fff" /> </g> </g> ) }) } private frameToPx(frame: number): number { const { pxPerSec, zoomScale, composition } = this.props return TimePixelConversion.framesToPixel({ pxPerSec, framerate: composition!.framerate, durationFrames: frame, scale: zoomScale, }) } private pxToFrame(x: number): number { // const {props: {pxPerSec, scale, editor: {activeComp}}} = this const { pxPerSec, zoomScale, composition } = this.props return TimePixelConversion.pixelToFrames({ framerate: composition!.framerate, pixel: x, pxPerSec, scale: zoomScale, }) } /** * Calculate keyframe place points */ private buildKeyframePoints = (keyframes: ReadonlyArray<Delir.Entity.Keyframe>) => { const { parentClip, descriptor, height, scrollLeft } = this.props if (!descriptor || descriptor.animatable === false) return [] const orderedKeyframes = keyframes.slice(0).sort((a, b) => a.frameOnClip - b.frameOnClip) const clipPlacedPositionX = this.frameToPx(parentClip.placedFrame) if (descriptor.type === 'NUMBER' || descriptor.type === 'FLOAT') { const maxValue = orderedKeyframes.reduce((memo, kf, idx, list) => { return Math.max(memo, kf.value as number) // , prevValue, nextValue) }, -Infinity) + 10 const minValue = orderedKeyframes.reduce((memo, kf, idx, list) => { return Math.min(memo, kf.value as number) // , prevValue, nextValue) }, +Infinity) + -10 const absMinValue = Math.abs(minValue) const minMaxRange = maxValue - minValue // Calc keyframe and handle points return orderedKeyframes.map((keyframe, idx) => { const nextKeyframe: Delir.Entity.Keyframe | undefined = orderedKeyframes[idx + 1] let nextX = 0 let nextY = 0 let easeOutHandleX = 0 let easeOutHandleY = 0 let nextKeyframeEaseInX = 0 let nextKeyframeEaseInY = 0 const beginX = clipPlacedPositionX + this.frameToPx(keyframe.frameOnClip) - scrollLeft const beginY = height - height * (((keyframe.value as number) - minValue) / minMaxRange) if (nextKeyframe) { // Next keyframe position nextX = clipPlacedPositionX + this.frameToPx(nextKeyframe.frameOnClip) - scrollLeft nextY = height - height * (((nextKeyframe.value as number) - minValue) / minMaxRange) // Handle of control transition to next keyframe easeOutHandleX = (nextX - beginX) * keyframe.easeOutParam[0] + beginX easeOutHandleY = (nextY - beginY) * keyframe.easeOutParam[1] + beginY // ((endPointY - beginY) * nextKeyframe.easeOutParam[1]) + beginY nextKeyframeEaseInX = (nextX - beginX) * nextKeyframe.easeInParam[0] + beginX nextKeyframeEaseInY = (nextY - beginY) * nextKeyframe.easeInParam[1] + beginY } return { keyframeId: keyframe.id, frame: keyframe.frameOnClip, point: { x: beginX, y: beginY }, transitionPath: nextKeyframe ? { begin: { x: beginX, y: beginY, handleX: easeOutHandleX, handleY: easeOutHandleY, }, end: { x: nextX, y: nextY, handleX: nextKeyframeEaseInX, handleY: nextKeyframeEaseInY, }, } : null, easeOutLine: nextKeyframe ? { x: beginX, y: beginY, endX: easeOutHandleX, endY: easeOutHandleY, } : null, nextEaseInLine: nextKeyframe ? { x: nextX, y: nextY, endX: nextKeyframeEaseInX, endY: nextKeyframeEaseInY, } : null, easeOutHandle: nextKeyframe ? { x: easeOutHandleX, y: easeOutHandleY } : null, nextEaseInHandle: nextKeyframe ? { x: nextKeyframeEaseInX, y: nextKeyframeEaseInY } : null, } }) } return [] } }, )
the_stack
import Base from '~/src/command/fetch/base' import TypeTaskConfig from '~/src/type/namespace/task_config' import PathConfig from '~/src/config/path' import fs from 'fs' import _ from 'lodash' import json5 from 'json5' import moment from 'moment' import ApiWeibo from '~/src/api/weibo' import MMblog from '~/src/model/mblog' import MMblogUser from '~/src/model/mblog_user' import CommonUtil from '~/src/library/util/common' import * as TypeWeibo from '~/src/type/namespace/weibo' import Util from '~/src/library/util/common' import querystring from 'query-string' /** * weibo.com的新Api对应的创建时间解析格式字符串 */ const Const_Moment_Parse_Format_4_WeiboComApi = "ddd MMM DD HH:mm:ss Z YYYY" /** * 重试时的等待时间 */ const Const_Retry_Wait_Seconds = 30 /** * 正常执行抓取流程的等待时间 */ const Const_Fetch_Wati_Seconds = 20 /** * 解析微博文章id,方便构造api, 抓取文章内容 * @param rawUrl * 原始 * rawurl格式 => https://m.weibo.cn/feature/applink?scheme=sinaweibo%3A%2F%2Farticlebrowser%3Fobject_id%3D1022%253A2309404446645566701785%26url%3Dhttps%253A%252F%252Fcard.weibo.com%252Farticle%252Fm%252Fshow%252Fid%252F2309404446645566701785%253F_wb_client_%253D1%26extparam%3Dlmid--4446645569803228&luicode=10000011&lfid=2304131913094142_-_WEIBO_SECOND_PROFILE_WEIBO * 解码后=> https://m.weibo.cn/feature/applink?scheme=sinaweibo://articlebrowser?object_id=1022:2309404446645566701785&url=https://card.weibo.com/article/m/show/id/2309404446645566701785?_wb_client_=1&extparam=lmid--4446645569803228&luicode=10000011&lfid=2304131913094142_-_WEIBO_SECOND_PROFILE_WEIBO * 2021年3月28日新增 * rawurl格式 => https://weibo.com/ttarticle/p/show?id=2309404619352241471539&luicode=10000011&lfid=2304131221171697_-_WEIBO_SECOND_PROFILE_WEIBO */ function getArticleId(rawUrl = '') { if (!rawUrl) { return '' } // 需要多次解析,才能将url完全解码成正常文本 let decodeUrl = unescape(unescape(unescape(rawUrl))) if (!decodeUrl) { return '' } if (decodeUrl.includes("id=") && decodeUrl.includes("/ttarticle/p/show")) { // 说明是新格式 https://weibo.com/ttarticle/p/show?id=2309404619352241471539&luicode=10000011&lfid=2304131221171697_-_WEIBO_SECOND_PROFILE_WEIBO let rawQuery = querystring.parseUrl(decodeUrl).query let articleId = rawQuery?.id || '' return articleId } let rawArticleUrl = decodeUrl.split('url=')[1] if (!rawArticleUrl) { return '' } let baseArticleUrl = rawArticleUrl.split('?')[0] // url => 'https://card.weibo.com/article/m/show/id/2309404446645566701785' if (!baseArticleUrl) { return '' } let articleId = baseArticleUrl.split('show/id/')[1] if (!articleId) { return '' } return articleId } class FetchCustomer extends Base { fetchStartAtPageNo = 0 fetchEndAtPageNo = 10000 requestConfig = { st: '', } static get signature() { return ` Fetch:Customer ` } static get description() { return `从${PathConfig.customerTaskConfigUri}中读取自定义抓取任务并执行` } async execute(args: any, options: any): Promise<any> { this.log(`从${PathConfig.customerTaskConfigUri}中读取配置文件`) let fetchConfigJSON = fs.readFileSync(PathConfig.customerTaskConfigUri).toString() this.log('content =>', fetchConfigJSON) let customerTaskConfig: TypeTaskConfig.Customer = json5.parse(fetchConfigJSON) this.fetchStartAtPageNo = customerTaskConfig.fetchStartAtPageNo || this.fetchStartAtPageNo this.fetchEndAtPageNo = customerTaskConfig.fetchEndAtPageNo || this.fetchEndAtPageNo if (customerTaskConfig.isSkipFetch) { this.log(`检测到isSkipFetch配置为${!!customerTaskConfig.isSkipFetch}, 自动跳过抓取流程`) return } this.log(`开始进行自定义抓取`) type TypeTaskPackage = { [key: string]: Array<string> } let taskConfigList: Array<TypeTaskConfig.Record> = customerTaskConfig.configList for (let taskConfig of taskConfigList) { let { uid, comment } = taskConfig this.log(`待抓取用户uid => ${uid}`) this.log(`备注信息 => ${comment}`) // 开工 this.log(`抓取用户${uid}信息`) let response = await ApiWeibo.asyncGetUserInfoResponseData(uid) if (_.isEmpty(response)) { this.log(`用户信息获取失败, 请检查登录状态`) continue } let userInfo = response.userInfo this.log(`用户信息获取完毕,待抓取用户为:${userInfo.screen_name},个人简介:${userInfo.description}`) // 拿到containerId let containerId: string = '' for (let tab of response.tabsInfo.tabs) { if (tab.tabKey === 'weibo') { containerId = tab.containerid } } if (containerId === '') { this.log(`未能获取到用户${userInfo.screen_name}对应的containerId,自动跳过`) continue } this.log(`开始抓取用户${userInfo.screen_name}微博记录`) let mblogCardList = await ApiWeibo.asyncGetWeiboList(uid).catch(e => { // 避免crash导致整个进程退出 return [] }) if (_.isEmpty(mblogCardList)) { this.log(`用户${userInfo.screen_name}微博记录为空,跳过抓取流程`) continue } let mblogCard = mblogCardList[0] let mblog = mblogCard.mblog let mblogUserInfo = mblog.user // 保存用户信息 await MMblogUser.replaceInto({ author_uid: `${mblogUserInfo.id}`, raw_json: JSON.stringify(mblogUserInfo), }) // 用户总微博数 let totalMblogCount = await ApiWeibo.asyncGetWeiboCount(uid) let totalPageCount = Math.ceil(totalMblogCount / 10) this.log(`用户${userInfo.screen_name}共发布了${totalMblogCount}条微博, 正式开始抓取`) let maxFetchPageNo = this.fetchEndAtPageNo <= totalPageCount ? this.fetchEndAtPageNo : totalPageCount this.log(`本次抓取的页码范围为:${this.fetchStartAtPageNo}~${maxFetchPageNo}`) // 为抓取微博自定义一套流程 // 获取st this.requestConfig.st = await ApiWeibo.asyncStep1FetchPageConfigSt() // 拿着st, 获取api config中的st this.requestConfig.st = await ApiWeibo.asyncStep2FetchApiConfig(this.requestConfig.st) for (let page = 1; page <= totalPageCount; page++) { if (page < this.fetchStartAtPageNo) { page = this.fetchStartAtPageNo this.log(`从第${this.fetchStartAtPageNo}页数据开始抓取`) } if (page > this.fetchEndAtPageNo) { this.log(`已抓取至设定的第${page}/${this.fetchEndAtPageNo}页数据, 自动跳过抓取`) } else { await this.fetchMblogListAndSaveToDb(uid, page, totalPageCount) // 微博的反爬虫措施太强, 只能用每20s抓一次的方式拿数据🤦‍♂️ this.log(`已抓取${page}/${totalPageCount}页记录, 休眠${Const_Fetch_Wati_Seconds}s, 避免被封`) await Util.asyncSleep(Const_Fetch_Wati_Seconds * 1000) } } this.log(`用户${userInfo.screen_name}的微博数据抓取完毕`) } this.log(`所有任务抓取完毕`) } /** * * @param author_uid * @param page * @param totalPage * @param newFormatRecordMap */ async fetchMblogListAndSaveToDb(author_uid: string, page: number, totalPage: number) { let target = `第${page}/${totalPage}页微博记录` this.log(`准备抓取${target}`) let rawMblogList = await ApiWeibo.asyncStep3GetWeiboList(this.requestConfig.st, author_uid, page).catch(e => { // 避免crash导致整个进程退出 return [] }) if (rawMblogList.length === 0) { // 说明抓取失败, 等待30s后重试一次 this.log(`经ApiV1接口抓取第${page}/${totalPage}页数据失败(1/3), 等待${Const_Retry_Wait_Seconds}s后重试`) await Util.asyncSleep(1000 * Const_Retry_Wait_Seconds) // 更新st let newSt = await ApiWeibo.asyncStep2FetchApiConfig(this.requestConfig.st) this.requestConfig.st = newSt // 带着新st重新抓取一次 rawMblogList = await ApiWeibo.asyncStep3GetWeiboList(this.requestConfig.st, author_uid, page) } if (rawMblogList.length === 0) { this.log(`经ApiV1接口抓取第${page}/${totalPage}页数据失败(2/3), 等待${Const_Retry_Wait_Seconds}s后重试`) await Util.asyncSleep(1000 * Const_Retry_Wait_Seconds) rawMblogList = await ApiWeibo.asyncStep3GetWeiboList(this.requestConfig.st, author_uid, page) } if (rawMblogList.length === 0) { this.log(`经ApiV1接口抓取第${page}/${totalPage}页数据失败(3/3), 跳过对本页的抓取`) await Util.asyncSleep(1000 * Const_Retry_Wait_Seconds) return } let mblogList: Array<TypeWeibo.TypeMblog> = [] // 此处要根据微博类型进行具体定制 for (let rawMblog of rawMblogList) { let mblog = rawMblog.mblog if (_.isEmpty(mblog) || _.isEmpty(mblog.user)) { // 数据为空自动跳过 continue } // 检查是否是长微博 if (rawMblog.mblog.isLongText === true) { // 长微博需要调取api重新获得微博内容 let bid = rawMblog.mblog.bid let realMblog = <TypeWeibo.TypeMblog>await ApiWeibo.asyncGetLongTextWeibo(bid).catch(e => { // 避免crash导致整个进程退出 return {} }) if (_.isEmpty(realMblog)) { continue } // @ts-ignore mblog = realMblog } if (_.isEmpty(rawMblog.mblog.retweeted_status) == false && rawMblog.mblog.retweeted_status !== undefined) { if (rawMblog.mblog.retweeted_status.isLongText === true) { // 转发微博属于长微博 let bid = rawMblog.mblog.retweeted_status.bid let realRetweetMblog = <TypeWeibo.TypeMblog>await ApiWeibo.asyncGetLongTextWeibo(bid) mblog.retweeted_status = realRetweetMblog } if ( rawMblog.mblog.retweeted_status !== undefined && rawMblog.mblog.retweeted_status.page_info !== undefined && rawMblog.mblog.retweeted_status.page_info.type === 'article' ) { // 转发的是微博文章 let pageInfo = rawMblog.mblog.retweeted_status.page_info let articleId = getArticleId(pageInfo.page_url) let articleRecord = await ApiWeibo.asyncGetWeiboArticle(articleId).catch(e => { // 避免crash导致整个进程退出 return {} }) if (_.isEmpty(articleRecord)) { // 文章详情获取失败, 不储存该记录 continue } mblog.retweeted_status.article = articleRecord } } if (rawMblog?.mblog?.page_info?.type === 'article') { // 文章类型为微博文章 let pageInfo = rawMblog.mblog.page_info let articleId = getArticleId(pageInfo.page_url) let articleRecord = await ApiWeibo.asyncGetWeiboArticle(articleId).catch(e => { // 避免crash导致整个进程退出 return {} }) if (_.isEmpty(articleRecord)) { // 文章详情获取失败, 不储存该记录 continue } mblog.article = articleRecord } mblogList.push(mblog) } this.log(`${target}抓取成功, 准备存入数据库`) for (let mblog of mblogList) { // 处理完毕, 将数据存入数据库中 let id = mblog.id let author_uid = `${mblog.user.id}` let createAt = 0 // 目前微博的created_at字段均为标准时区字符串格式 createAt = this.parseMblogCreateTimestamp(mblog) mblog.created_timestamp_at = createAt let raw_json = JSON.stringify(mblog) let is_retweet = mblog.retweeted_status ? 1 : 0 let is_article = mblog.article ? 1 : 0 // 这里可能会出报SQLITE_BUSY: database is locked await MMblog.replaceInto({ id, author_uid, is_retweet, is_article, raw_json, post_publish_at: mblog.created_timestamp_at, }).catch((e: Error) => { this.log("数据库插入出错 => ", { name: e?.name, message: e?.message, stack: e?.stack }) return }) } this.log(`${target}成功存入数据库`) } /** * 简单将微博发布时间解析为 * @param mlog */ parseMblogCreateTimestamp(mlog: TypeWeibo.TypeMblog) { let rawCreateAtStr = `${mlog.created_at}` if (rawCreateAtStr.includes('-') === false) { // Mon Sep 16 01:13:45 +0800 2019 if (rawCreateAtStr.includes('+0800')) { // 'Sun Sep 15 00:35:14 +0800 2019' 时区模式 return moment(rawCreateAtStr, Const_Moment_Parse_Format_4_WeiboComApi).unix() } // '12小时前' | '4分钟前' | '刚刚' | '1小时前' 模式 // 不含-符号, 表示是最近一天内, 直接认为是当前时间, 不进行细分 return moment().unix() } if (rawCreateAtStr.length === '08-07'.length) { // 月日模式, 表示当前年份,手工补上年份 return moment(`${moment().format('YYYY')}-${rawCreateAtStr}`).unix() } // 否则, 为'2012-01-02' 模式, 直接解析即可 return moment(rawCreateAtStr).unix() } } export default FetchCustomer
the_stack
'use strict'; import { ResultWithHttpResponse } from './results'; /** * Defines type describing callback with two results. * * @template TResult1 - Type of the first result value, possibly Error. * @template TResult2 - Type of the second result value, possibly Error. */ export type DoubleValueCallback<TResult1, TResult2> = (result1?: TResult1, result2?: TResult2) => void; /** * Defines type describing callback with one non-error result value. * * @template TResult - Type of the result value. */ export type NoErrorCallback<TResult> = (result?: TResult) => void; /** * Defines type describing callback with only Error result value. */ export type ErrorCallback = (error?: Error) => void; /** * Defines type describing regular callback with two results - one is the Error, the other one is the result value. * * @template TResult - Type of the result value. */ export type Callback<TResult> = DoubleValueCallback<Error, TResult>; /** * Defines type describing regular callback with three results - the first one is Error, the other two are the result values. * * @template TResult1 - Type of the first result value. * @template TResult2 - Type of the second result value. */ export type TripleValueCallback<TResult1, TResult2> = (error?: Error, result1?: TResult1, result2?: TResult2) => void; /** * Defines type describing callback with three results - response, raw HTTP response and an Error. * * @template TResult - Type of the result value. */ export type HttpResponseCallback<TResult> = TripleValueCallback<TResult, any>; /** * @private * * Converts method taking regular callback as a parameter to method returning a Promise if userCallback is not specified. * Otherwise, it executes the method with userCallback as the callback. * * @param {(callback: Callback<TResult>) => void} callBackOperation - Function taking regular callback as a parameter. * @param {Callback<TResult>} [userCallback] - Optional caller-provided callback. The method will not return a Promise if specified. * @returns {Promise<TResult> | void} Promise with result of TResult type or void if user's callback provided * @template TResult - Type of the result value. * @example * // When method takes only callback as the parameter like example: * function foo(callback: Function) {[...]} * // we call * callbackToPromise((_callback) => foo(_callback)); * // We need to create a lambda expression or an anonymous function because this method has to inject its own callback. * * // If converted method takes more than callback as its parameter, we need to create a closure. For method defined like * function foo(param: any, callback: Function) {[...]} * // we call * const param = 42; * callbackToPromise((_callback) => foo(param, _callback)).then(result => { console.log(result); }, error => { console.error(error); }); */ export function callbackToPromise<TResult>(callBackOperation: (callback: Callback<TResult>) => void, userCallback?: Callback<TResult>): Promise<TResult> | void { if (userCallback) { if (!(typeof userCallback === 'function')) { throw new TypeError('Callback has to be a Function'); } return callBackOperation(userCallback); } return new Promise<TResult>((resolve, reject) => { try { callBackOperation((error, result) => { if (error) { return reject(error); } return resolve(result); }); } catch (error) { reject(error); } }); } /** * @private * * Converts method taking callback returning only error as a parameter to method returning a void Promise if userCallback is not specified. * Otherwise, it executes the method with userCallback as the callback. * * @param {callback: ErrorCallback) => void} callBackOperation - Function taking error-only returning callback as a parameter. * @param {ErrorCallback} [userCallback] - Optional caller-provided callback. The method will not return a Promise if specified. * @returns {Promise<void> | void} Promise with empty result or a rejection or void if user's callback provided * @example * // When method takes only callback as the parameter like example: * function foo(callback: Function) {[...]} * // we call * errorCallbackToPromise((_callback) => foo(_callback)); * // We need to create a lambda expression or an anonymous function because this method has to inject its own callback. * * // If converted method takes more than callback as its parameter, we need to create a closure. For method defined like * function foo(param: any, callback: Function) {[...]} * // we call * const param = 42; * errorCallbackToPromise((_callback) => foo(param, _callback)).then(_ => { }, err => { console.log(err); }); */ export function errorCallbackToPromise(callBackOperation: (callback: ErrorCallback) => void, userCallback: ErrorCallback): void; export function errorCallbackToPromise(callBackOperation: (callback: ErrorCallback) => void): Promise<void>; export function errorCallbackToPromise(callBackOperation: (callback: ErrorCallback) => void, userCallback?: ErrorCallback): Promise<void> | void { return callbackToPromise(callBackOperation, userCallback); } /** * @private * * Converts method taking callback returning only result as a parameter to method returning a Promise with the result if userCallback is not specified. * Otherwise, it executes the method with userCallback as the callback. * * @param {(callback: NoErrorCallback<TResult>) => void} callBackOperation - Function taking result-only returning callback as a parameter. * @param {NoErrorCallback<TResult>} [userCallback] - Optional caller-provided callback. The method will not return a Promise if specified. * @returns {Promise<TResult> | void} Promise with the result, it never rejects or void if user's callback provided * @template TResult - Type of the result value. * @example * // When method takes only callback as the parameter like example: * function foo(callback: Function) {[...]} * // we call * noErrorCallbackToPromise((_callback) => foo(_callback)); * // We need to create a lambda expression or an anonymous function because this method has to inject its own callback. * * // If converted method takes more than callback as its parameter, we need to create a closure. For method defined like * function foo(param: any, callback: Function) {[...]} * // we call * const param = 42; * noErrorCallbackToPromise((_callback) => foo(param, _callback)).then(result => { console.log(result); }, err => { console.log("it never rejects"); }); */ export function noErrorCallbackToPromise<TResult>(callBackOperation: (callback: NoErrorCallback<TResult>) => void, userCallback: NoErrorCallback<TResult>): void; export function noErrorCallbackToPromise<TResult>(callBackOperation: (callback: NoErrorCallback<TResult>) => void): Promise<TResult>; export function noErrorCallbackToPromise<TResult>(callBackOperation: (callback: NoErrorCallback<TResult>) => void, userCallback?: NoErrorCallback<TResult>): Promise<TResult> | void { if (userCallback) { if (!(typeof userCallback === 'function')) { throw new TypeError('Callback has to be a Function'); } return callBackOperation(userCallback); } return new Promise<TResult>((resolve, _reject) => { callBackOperation((result) => { return resolve(result); }); }); } /** * @private * * Converts method taking callback with two result values (one can be an Error) as a parameter to method returning a Promise if userCallback is not specified. * Otherwise, it executes the method with userCallback as the callback. * Promise cannot return multiple objects so the return values have to be packed into a single object. * * @param {(callback: DoubleValueCallback<TResult1, TResult2>) => void} callBackOperation - Function taking callback with two return values and an error as a parameter. * @param {(result1: TResult1, result2: TResult2) => TPromiseResult} packResults - Function converting two return values from the callback to a single object of {TPromiseResult} type. * @param {DoubleValueCallback<TResult1, TResult2>} [userCallback] - Optional caller-provided callback. The method will not return a Promise if specified. * @returns {Promise<TResult> | void} Promise with result of TResult type or void if user's callback provided * @template TResult1 - Type of the first result value. * @template TResult2 - Type of the second result value. * @template TPromiseResult - Type of the Promise result value. * @example * // When method takes only callback as the parameter like example: * function foo(callback: Function) {[...]} * // we call * const pack = (result1, result2) => { return { res1: result1, res2: result2 }; }; * doubleValueCallbackToPromise((_callback) => foo(_callback), pack); * // We need to create a lambda expression or an anonymous function because this method has to inject its own callback * // and we need to provide a method packing two results into one object which is returned in the Promise. * * // If converted method takes more than callback as its parameter, we need to create a closure. For method defined like * function foo(param: any, callback: Function) {[...]} * // we call * const pack = (result1, result2) => { return { res1: result1, res2: result2 }; }; * const param = 42; * doubleValueCallbackToPromise((_callback) => foo(param, _callback), pack).then(result => { console.log(result); }, err => { console.error(error); }); */ export function doubleValueCallbackToPromise<TResult1, TResult2, TPromiseResult>( callBackOperation: (callback: DoubleValueCallback<TResult1, TResult2>) => void, packResults: (result1: TResult1, result2: TResult2) => TPromiseResult, userCallback: DoubleValueCallback<TResult1, TResult2>): void; export function doubleValueCallbackToPromise<TResult1, TResult2, TPromiseResult>( callBackOperation: (callback: DoubleValueCallback<TResult1, TResult2>) => void, packResults: (result1: TResult1, result2: TResult2) => TPromiseResult): Promise<TPromiseResult>; export function doubleValueCallbackToPromise<TResult1, TResult2, TPromiseResult>( callBackOperation: (callback: DoubleValueCallback<TResult1, TResult2>) => void, packResults: (result1: TResult1, result2: TResult2) => TPromiseResult, userCallback?: DoubleValueCallback<TResult1, TResult2>): Promise<TPromiseResult> | void { if (userCallback) { if (!(typeof userCallback === 'function')) { throw new TypeError('Callback has to be a Function'); } return callBackOperation(userCallback); } return new Promise<TPromiseResult>((resolve, reject) => { try { callBackOperation((result1, result2) => { if (result1 instanceof Error) { reject(result1); } if (result2 instanceof Error) { reject(result2); } return resolve(packResults(result1, result2)); }); } catch (error) { reject(error); } }); } /** * @private * * Converts method taking callback with two result values and an error as a parameter to method returning a Promise if userCallback is not specified. * Otherwise, it executes the method with userCallback as the callback. * Promise cannot return multiple objects so the return values have to be packed into a single object. * * @param {(callback: DoubleValueCallback<TResult1, TResult2>) => void} callbackOperation - Function taking callback with two return values and an error as a parameter. * @param {(result1: TResult1, result2: TResult2) => TPromiseResult} packResults - Function converting two return values from the callback to a single object of {TPromiseResult} type. * @param {TripleValueCallback<TResult1, TResult2>} [userCallback] - Optional caller-provided callback. The method will not return a Promise if specified. * @returns {Promise<TResult> | void} Promise with result of TResult type or void if user's callback provided. * @template TResult1 - Type of the first result value. * @template TResult2 - Type of the second result value. * @template TPromiseResult - Type of the Promise result value. * @example * // When method takes only callback as the parameter like example: * function foo(callback: Function) {[...]} * // we call * const pack = (result1, result2) => { return { res1: result1, res2: result2 }; }; * tripleValueCallbackToPromise((_callback) => foo(_callback), pack); * // We need to create a lambda expression or an anonymous function because this method has to inject its own callback * // and we need to provide a method packing two results into one object which is returned in the Promise. * * // If converted method takes more than callback as its parameter, we need to create a closure. For method defined like * function foo(param: any, callback: Function) {[...]} * // we call * const pack = (result1, result2) => { return { res1: result1, res2: result2 }; }; * const param = 42; * tripleValueCallbackToPromise((_callback) => foo(param, _callback), pack).then(result => { console.log(result); }, err => { console.error(error); }); */ export function tripleValueCallbackToPromise<TResult1, TResult2, TPromiseResult>( callbackOperation: (callback: TripleValueCallback<TResult1, TResult2>) => void, packResults: (result1: TResult1, result2: TResult2) => TPromiseResult, userCallback: TripleValueCallback<TResult1, TResult2>): void; export function tripleValueCallbackToPromise<TResult1, TResult2, TPromiseResult>( callbackOperation: (callback: TripleValueCallback<TResult1, TResult2>) => void, packResults: (result1: TResult1, result2: TResult2) => TPromiseResult, ): Promise<TPromiseResult>; export function tripleValueCallbackToPromise<TResult1, TResult2, TPromiseResult>( callbackOperation: (callback: TripleValueCallback<TResult1, TResult2>) => void, packResults: (result1: TResult1, result2: TResult2) => TPromiseResult, userCallback?: TripleValueCallback<TResult1, TResult2>): Promise<TPromiseResult> | void { if (userCallback) { if (!(typeof userCallback === 'function')) { throw new TypeError('Callback has to be a Function'); } return callbackOperation(userCallback); } return new Promise<TPromiseResult>((resolve, reject) => { try { callbackOperation((error, result1, result2) => { if (error) { reject(error); } resolve(packResults(result1, result2)); }); } catch (error) { reject(error); } }); } /** * @private * * Converts method taking callback with two result values (response body and HTTP response itself) * and an error as a parameter to method returning a Promise if userCallback is not specified. * Otherwise, it executes the method with userCallback as the callback. * * @param {(callback: HttpResponseCallback<TResult>) => void} callbackOperation - Function taking callback with two return values (response body and HTTP response) and an error as a parameter. * @param {HttpResponseCallback<TResult>} callback - Optional caller-provided callback. The method will not return a Promise if specified. * @returns {Promise<TResult> | void} Promise with result of TResult type or void if user's callback provided * @template TResult - Type of the response body result. */ export function httpCallbackToPromise<TResult>( callbackOperation: (callback: HttpResponseCallback<TResult>) => void, callback: HttpResponseCallback<TResult>): void; export function httpCallbackToPromise<TResult>( callbackOperation: (callback: HttpResponseCallback<TResult>) => void): Promise<ResultWithHttpResponse<TResult>>; export function httpCallbackToPromise<TResult>( callbackOperation: (callback: HttpResponseCallback<TResult>) => void, callback?: HttpResponseCallback<TResult>): Promise<ResultWithHttpResponse<TResult>> | void { return tripleValueCallbackToPromise(callbackOperation, (b, r) => createResultWithHttpResponse(b, r), callback); } /** * @private */ export function createResultWithHttpResponse<TResult>(responseBody: TResult, httpResponse: any): ResultWithHttpResponse<TResult> { return { responseBody: responseBody, httpResponse: httpResponse }; }
the_stack
import { ATN } from 'antlr4ts/atn/ATN' import { ATNDeserializer } from 'antlr4ts/atn/ATNDeserializer' import { FailedPredicateException } from 'antlr4ts/FailedPredicateException' import { NoViableAltException } from 'antlr4ts/NoViableAltException' import { Parser } from 'antlr4ts/Parser' import { ParserRuleContext } from 'antlr4ts/ParserRuleContext' import { ParserATNSimulator } from 'antlr4ts/atn/ParserATNSimulator' import { RecognitionException } from 'antlr4ts/RecognitionException' // import { RuleVersion } from "antlr4ts/RuleVersion"; import { TerminalNode } from 'antlr4ts/tree/TerminalNode' import { TokenStream } from 'antlr4ts/TokenStream' import { Vocabulary } from 'antlr4ts/Vocabulary' import { VocabularyImpl } from 'antlr4ts/VocabularyImpl' import * as Utils from 'antlr4ts/misc/Utils' import { AgtypeListener } from './AgtypeListener' export class AgtypeParser extends Parser { public static readonly T__0 = 1; public static readonly T__1 = 2; public static readonly T__2 = 3; public static readonly T__3 = 4; public static readonly T__4 = 5; public static readonly T__5 = 6; public static readonly T__6 = 7; public static readonly T__7 = 8; public static readonly T__8 = 9; public static readonly T__9 = 10; public static readonly T__10 = 11; public static readonly T__11 = 12; public static readonly T__12 = 13; public static readonly IDENT = 14; public static readonly STRING = 15; public static readonly INTEGER = 16; public static readonly RegularFloat = 17; public static readonly ExponentFloat = 18; public static readonly WS = 19; public static readonly RULE_agType = 0; public static readonly RULE_agValue = 1; public static readonly RULE_value = 2; public static readonly RULE_obj = 3; public static readonly RULE_pair = 4; public static readonly RULE_array = 5; public static readonly RULE_typeAnnotation = 6; public static readonly RULE_floatLiteral = 7; // tslint:disable:no-trailing-whitespace public static readonly ruleNames: string[] = [ 'agType', 'agValue', 'value', 'obj', 'pair', 'array', 'typeAnnotation', 'floatLiteral' ]; private static readonly _LITERAL_NAMES: Array<string | undefined> = [ undefined, "'true'", "'false'", "'null'", "'{'", "','", "'}'", "':'", "'['", "']'", "'::'", "'-'", "'Infinity'", "'NaN'" ]; private static readonly _SYMBOLIC_NAMES: Array<string | undefined> = [ undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, 'IDENT', 'STRING', 'INTEGER', 'RegularFloat', 'ExponentFloat', 'WS' ]; public static readonly VOCABULARY: Vocabulary = new VocabularyImpl(AgtypeParser._LITERAL_NAMES, AgtypeParser._SYMBOLIC_NAMES, []); // @Override // @NotNull public get vocabulary (): Vocabulary { return AgtypeParser.VOCABULARY } // tslint:enable:no-trailing-whitespace // @Override public get grammarFileName (): string { return 'Agtype.g4' } // @Override public get ruleNames (): string[] { return AgtypeParser.ruleNames } // @Override public get serializedATN (): string { return AgtypeParser._serializedATN } protected createFailedPredicateException (predicate?: string, message?: string): FailedPredicateException { return new FailedPredicateException(this, predicate, message) } constructor (input: TokenStream) { super(input) this._interp = new ParserATNSimulator(AgtypeParser._ATN, this) } // @RuleVersion(0) public agType (): AgTypeContext { const _localctx: AgTypeContext = new AgTypeContext(this._ctx, this.state) this.enterRule(_localctx, 0, AgtypeParser.RULE_agType) try { this.enterOuterAlt(_localctx, 1) { this.state = 16 this.agValue() this.state = 17 this.match(AgtypeParser.EOF) } } catch (re) { if (re instanceof RecognitionException) { _localctx.exception = re this._errHandler.reportError(this, re) this._errHandler.recover(this, re) } else { throw re } } finally { this.exitRule() } return _localctx } // @RuleVersion(0) public agValue (): AgValueContext { const _localctx: AgValueContext = new AgValueContext(this._ctx, this.state) this.enterRule(_localctx, 2, AgtypeParser.RULE_agValue) let _la: number try { this.enterOuterAlt(_localctx, 1) { this.state = 19 this.value() this.state = 21 this._errHandler.sync(this) _la = this._input.LA(1) if (_la === AgtypeParser.T__9) { { this.state = 20 this.typeAnnotation() } } } } catch (re) { if (re instanceof RecognitionException) { _localctx.exception = re this._errHandler.reportError(this, re) this._errHandler.recover(this, re) } else { throw re } } finally { this.exitRule() } return _localctx } // @RuleVersion(0) public value (): ValueContext { let _localctx: ValueContext = new ValueContext(this._ctx, this.state) this.enterRule(_localctx, 4, AgtypeParser.RULE_value) try { this.state = 31 this._errHandler.sync(this) switch (this._input.LA(1)) { case AgtypeParser.STRING: _localctx = new StringValueContext(_localctx) this.enterOuterAlt(_localctx, 1) { this.state = 23 this.match(AgtypeParser.STRING) } break case AgtypeParser.INTEGER: _localctx = new IntegerValueContext(_localctx) this.enterOuterAlt(_localctx, 2) { this.state = 24 this.match(AgtypeParser.INTEGER) } break case AgtypeParser.T__10: case AgtypeParser.T__11: case AgtypeParser.T__12: case AgtypeParser.RegularFloat: case AgtypeParser.ExponentFloat: _localctx = new FloatValueContext(_localctx) this.enterOuterAlt(_localctx, 3) { this.state = 25 this.floatLiteral() } break case AgtypeParser.T__0: _localctx = new TrueBooleanContext(_localctx) this.enterOuterAlt(_localctx, 4) { this.state = 26 this.match(AgtypeParser.T__0) } break case AgtypeParser.T__1: _localctx = new FalseBooleanContext(_localctx) this.enterOuterAlt(_localctx, 5) { this.state = 27 this.match(AgtypeParser.T__1) } break case AgtypeParser.T__2: _localctx = new NullValueContext(_localctx) this.enterOuterAlt(_localctx, 6) { this.state = 28 this.match(AgtypeParser.T__2) } break case AgtypeParser.T__3: _localctx = new ObjectValueContext(_localctx) this.enterOuterAlt(_localctx, 7) { this.state = 29 this.obj() } break case AgtypeParser.T__7: _localctx = new ArrayValueContext(_localctx) this.enterOuterAlt(_localctx, 8) { this.state = 30 this.array() } break default: throw new NoViableAltException(this) } } catch (re) { if (re instanceof RecognitionException) { _localctx.exception = re this._errHandler.reportError(this, re) this._errHandler.recover(this, re) } else { throw re } } finally { this.exitRule() } return _localctx } // @RuleVersion(0) public obj (): ObjContext { const _localctx: ObjContext = new ObjContext(this._ctx, this.state) this.enterRule(_localctx, 6, AgtypeParser.RULE_obj) let _la: number try { this.state = 46 this._errHandler.sync(this) switch (this.interpreter.adaptivePredict(this._input, 3, this._ctx)) { case 1: this.enterOuterAlt(_localctx, 1) { this.state = 33 this.match(AgtypeParser.T__3) this.state = 34 this.pair() this.state = 39 this._errHandler.sync(this) _la = this._input.LA(1) while (_la === AgtypeParser.T__4) { { { this.state = 35 this.match(AgtypeParser.T__4) this.state = 36 this.pair() } } this.state = 41 this._errHandler.sync(this) _la = this._input.LA(1) } this.state = 42 this.match(AgtypeParser.T__5) } break case 2: this.enterOuterAlt(_localctx, 2) { this.state = 44 this.match(AgtypeParser.T__3) this.state = 45 this.match(AgtypeParser.T__5) } break } } catch (re) { if (re instanceof RecognitionException) { _localctx.exception = re this._errHandler.reportError(this, re) this._errHandler.recover(this, re) } else { throw re } } finally { this.exitRule() } return _localctx } // @RuleVersion(0) public pair (): PairContext { const _localctx: PairContext = new PairContext(this._ctx, this.state) this.enterRule(_localctx, 8, AgtypeParser.RULE_pair) try { this.enterOuterAlt(_localctx, 1) { this.state = 48 this.match(AgtypeParser.STRING) this.state = 49 this.match(AgtypeParser.T__6) this.state = 50 this.agValue() } } catch (re) { if (re instanceof RecognitionException) { _localctx.exception = re this._errHandler.reportError(this, re) this._errHandler.recover(this, re) } else { throw re } } finally { this.exitRule() } return _localctx } // @RuleVersion(0) public array (): ArrayContext { const _localctx: ArrayContext = new ArrayContext(this._ctx, this.state) this.enterRule(_localctx, 10, AgtypeParser.RULE_array) let _la: number try { this.state = 65 this._errHandler.sync(this) switch (this.interpreter.adaptivePredict(this._input, 5, this._ctx)) { case 1: this.enterOuterAlt(_localctx, 1) { this.state = 52 this.match(AgtypeParser.T__7) this.state = 53 this.agValue() this.state = 58 this._errHandler.sync(this) _la = this._input.LA(1) while (_la === AgtypeParser.T__4) { { { this.state = 54 this.match(AgtypeParser.T__4) this.state = 55 this.agValue() } } this.state = 60 this._errHandler.sync(this) _la = this._input.LA(1) } this.state = 61 this.match(AgtypeParser.T__8) } break case 2: this.enterOuterAlt(_localctx, 2) { this.state = 63 this.match(AgtypeParser.T__7) this.state = 64 this.match(AgtypeParser.T__8) } break } } catch (re) { if (re instanceof RecognitionException) { _localctx.exception = re this._errHandler.reportError(this, re) this._errHandler.recover(this, re) } else { throw re } } finally { this.exitRule() } return _localctx } // @RuleVersion(0) public typeAnnotation (): TypeAnnotationContext { const _localctx: TypeAnnotationContext = new TypeAnnotationContext(this._ctx, this.state) this.enterRule(_localctx, 12, AgtypeParser.RULE_typeAnnotation) try { this.enterOuterAlt(_localctx, 1) { this.state = 67 this.match(AgtypeParser.T__9) this.state = 68 this.match(AgtypeParser.IDENT) } } catch (re) { if (re instanceof RecognitionException) { _localctx.exception = re this._errHandler.reportError(this, re) this._errHandler.recover(this, re) } else { throw re } } finally { this.exitRule() } return _localctx } // @RuleVersion(0) public floatLiteral (): FloatLiteralContext { const _localctx: FloatLiteralContext = new FloatLiteralContext(this._ctx, this.state) this.enterRule(_localctx, 14, AgtypeParser.RULE_floatLiteral) let _la: number try { this.state = 77 this._errHandler.sync(this) switch (this._input.LA(1)) { case AgtypeParser.RegularFloat: this.enterOuterAlt(_localctx, 1) { this.state = 70 this.match(AgtypeParser.RegularFloat) } break case AgtypeParser.ExponentFloat: this.enterOuterAlt(_localctx, 2) { this.state = 71 this.match(AgtypeParser.ExponentFloat) } break case AgtypeParser.T__10: case AgtypeParser.T__11: this.enterOuterAlt(_localctx, 3) { this.state = 73 this._errHandler.sync(this) _la = this._input.LA(1) if (_la === AgtypeParser.T__10) { { this.state = 72 this.match(AgtypeParser.T__10) } } this.state = 75 this.match(AgtypeParser.T__11) } break case AgtypeParser.T__12: this.enterOuterAlt(_localctx, 4) { this.state = 76 this.match(AgtypeParser.T__12) } break default: throw new NoViableAltException(this) } } catch (re) { if (re instanceof RecognitionException) { _localctx.exception = re this._errHandler.reportError(this, re) this._errHandler.recover(this, re) } else { throw re } } finally { this.exitRule() } return _localctx } public static readonly _serializedATN: string = '\x03\uC91D\uCABA\u058D\uAFBA\u4F53\u0607\uEA8B\uC241\x03\x15R\x04\x02' + '\t\x02\x04\x03\t\x03\x04\x04\t\x04\x04\x05\t\x05\x04\x06\t\x06\x04\x07' + '\t\x07\x04\b\t\b\x04\t\t\t\x03\x02\x03\x02\x03\x02\x03\x03\x03\x03\x05' + '\x03\x18\n\x03\x03\x04\x03\x04\x03\x04\x03\x04\x03\x04\x03\x04\x03\x04' + '\x03\x04\x05\x04"\n\x04\x03\x05\x03\x05\x03\x05\x03\x05\x07\x05(\n\x05' + '\f\x05\x0E\x05+\v\x05\x03\x05\x03\x05\x03\x05\x03\x05\x05\x051\n\x05\x03' + '\x06\x03\x06\x03\x06\x03\x06\x03\x07\x03\x07\x03\x07\x03\x07\x07\x07;' + '\n\x07\f\x07\x0E\x07>\v\x07\x03\x07\x03\x07\x03\x07\x03\x07\x05\x07D\n' + '\x07\x03\b\x03\b\x03\b\x03\t\x03\t\x03\t\x05\tL\n\t\x03\t\x03\t\x05\t' + 'P\n\t\x03\t\x02\x02\x02\n\x02\x02\x04\x02\x06\x02\b\x02\n\x02\f\x02\x0E' + '\x02\x10\x02\x02\x02\x02Y\x02\x12\x03\x02\x02\x02\x04\x15\x03\x02\x02' + '\x02\x06!\x03\x02\x02\x02\b0\x03\x02\x02\x02\n2\x03\x02\x02\x02\fC\x03' + '\x02\x02\x02\x0EE\x03\x02\x02\x02\x10O\x03\x02\x02\x02\x12\x13\x05\x04' + '\x03\x02\x13\x14\x07\x02\x02\x03\x14\x03\x03\x02\x02\x02\x15\x17\x05\x06' + '\x04\x02\x16\x18\x05\x0E\b\x02\x17\x16\x03\x02\x02\x02\x17\x18\x03\x02' + '\x02\x02\x18\x05\x03\x02\x02\x02\x19"\x07\x11\x02\x02\x1A"\x07\x12\x02' + '\x02\x1B"\x05\x10\t\x02\x1C"\x07\x03\x02\x02\x1D"\x07\x04\x02\x02\x1E' + '"\x07\x05\x02\x02\x1F"\x05\b\x05\x02 "\x05\f\x07\x02!\x19\x03\x02\x02' + '\x02!\x1A\x03\x02\x02\x02!\x1B\x03\x02\x02\x02!\x1C\x03\x02\x02\x02!\x1D' + '\x03\x02\x02\x02!\x1E\x03\x02\x02\x02!\x1F\x03\x02\x02\x02! \x03\x02\x02' + '\x02"\x07\x03\x02\x02\x02#$\x07\x06\x02\x02$)\x05\n\x06\x02%&\x07\x07' + "\x02\x02&(\x05\n\x06\x02\'%\x03\x02\x02\x02(+\x03\x02\x02\x02)\'\x03\x02" + '\x02\x02)*\x03\x02\x02\x02*,\x03\x02\x02\x02+)\x03\x02\x02\x02,-\x07\b' + '\x02\x02-1\x03\x02\x02\x02./\x07\x06\x02\x02/1\x07\b\x02\x020#\x03\x02' + '\x02\x020.\x03\x02\x02\x021\t\x03\x02\x02\x0223\x07\x11\x02\x0234\x07' + '\t\x02\x0245\x05\x04\x03\x025\v\x03\x02\x02\x0267\x07\n\x02\x027<\x05' + '\x04\x03\x0289\x07\x07\x02\x029;\x05\x04\x03\x02:8\x03\x02\x02\x02;>\x03' + '\x02\x02\x02<:\x03\x02\x02\x02<=\x03\x02\x02\x02=?\x03\x02\x02\x02><\x03' + '\x02\x02\x02?@\x07\v\x02\x02@D\x03\x02\x02\x02AB\x07\n\x02\x02BD\x07\v' + '\x02\x02C6\x03\x02\x02\x02CA\x03\x02\x02\x02D\r\x03\x02\x02\x02EF\x07' + '\f\x02\x02FG\x07\x10\x02\x02G\x0F\x03\x02\x02\x02HP\x07\x13\x02\x02IP' + '\x07\x14\x02\x02JL\x07\r\x02\x02KJ\x03\x02\x02\x02KL\x03\x02\x02\x02L' + 'M\x03\x02\x02\x02MP\x07\x0E\x02\x02NP\x07\x0F\x02\x02OH\x03\x02\x02\x02' + 'OI\x03\x02\x02\x02OK\x03\x02\x02\x02ON\x03\x02\x02\x02P\x11\x03\x02\x02' + '\x02\n\x17!)0<CKO'; public static __ATN: ATN; public static get _ATN (): ATN { if (!AgtypeParser.__ATN) { AgtypeParser.__ATN = new ATNDeserializer().deserialize(Utils.toCharArray(AgtypeParser._serializedATN)) } return AgtypeParser.__ATN } } export class AgTypeContext extends ParserRuleContext { public agValue (): AgValueContext { return this.getRuleContext(0, AgValueContext) } public EOF (): TerminalNode { return this.getToken(AgtypeParser.EOF, 0) } constructor (parent: ParserRuleContext | undefined, invokingState: number) { super(parent, invokingState) } // @Override public get ruleIndex (): number { return AgtypeParser.RULE_agType } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterAgType) { listener.enterAgType(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitAgType) { listener.exitAgType(this) } } } export class AgValueContext extends ParserRuleContext { public value (): ValueContext { return this.getRuleContext(0, ValueContext) } public typeAnnotation (): TypeAnnotationContext | undefined { return this.tryGetRuleContext(0, TypeAnnotationContext) } constructor (parent: ParserRuleContext | undefined, invokingState: number) { super(parent, invokingState) } // @Override public get ruleIndex (): number { return AgtypeParser.RULE_agValue } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterAgValue) { listener.enterAgValue(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitAgValue) { listener.exitAgValue(this) } } } export class ValueContext extends ParserRuleContext { constructor (parent: ParserRuleContext | undefined, invokingState: number) { super(parent, invokingState) } // @Override public get ruleIndex (): number { return AgtypeParser.RULE_value } public copyFrom (ctx: ValueContext): void { super.copyFrom(ctx) } } export class StringValueContext extends ValueContext { public STRING (): TerminalNode { return this.getToken(AgtypeParser.STRING, 0) } constructor (ctx: ValueContext) { super(ctx.parent, ctx.invokingState) this.copyFrom(ctx) } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterStringValue) { listener.enterStringValue(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitStringValue) { listener.exitStringValue(this) } } } export class IntegerValueContext extends ValueContext { public INTEGER (): TerminalNode { return this.getToken(AgtypeParser.INTEGER, 0) } constructor (ctx: ValueContext) { super(ctx.parent, ctx.invokingState) this.copyFrom(ctx) } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterIntegerValue) { listener.enterIntegerValue(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitIntegerValue) { listener.exitIntegerValue(this) } } } export class FloatValueContext extends ValueContext { public floatLiteral (): FloatLiteralContext { return this.getRuleContext(0, FloatLiteralContext) } constructor (ctx: ValueContext) { super(ctx.parent, ctx.invokingState) this.copyFrom(ctx) } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterFloatValue) { listener.enterFloatValue(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitFloatValue) { listener.exitFloatValue(this) } } } export class TrueBooleanContext extends ValueContext { constructor (ctx: ValueContext) { super(ctx.parent, ctx.invokingState) this.copyFrom(ctx) } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterTrueBoolean) { listener.enterTrueBoolean(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitTrueBoolean) { listener.exitTrueBoolean(this) } } } export class FalseBooleanContext extends ValueContext { constructor (ctx: ValueContext) { super(ctx.parent, ctx.invokingState) this.copyFrom(ctx) } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterFalseBoolean) { listener.enterFalseBoolean(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitFalseBoolean) { listener.exitFalseBoolean(this) } } } export class NullValueContext extends ValueContext { constructor (ctx: ValueContext) { super(ctx.parent, ctx.invokingState) this.copyFrom(ctx) } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterNullValue) { listener.enterNullValue(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitNullValue) { listener.exitNullValue(this) } } } export class ObjectValueContext extends ValueContext { public obj (): ObjContext { return this.getRuleContext(0, ObjContext) } constructor (ctx: ValueContext) { super(ctx.parent, ctx.invokingState) this.copyFrom(ctx) } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterObjectValue) { listener.enterObjectValue(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitObjectValue) { listener.exitObjectValue(this) } } } export class ArrayValueContext extends ValueContext { public array (): ArrayContext { return this.getRuleContext(0, ArrayContext) } constructor (ctx: ValueContext) { super(ctx.parent, ctx.invokingState) this.copyFrom(ctx) } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterArrayValue) { listener.enterArrayValue(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitArrayValue) { listener.exitArrayValue(this) } } } export class ObjContext extends ParserRuleContext { public pair(): PairContext[]; public pair(i: number): PairContext; public pair (i?: number): PairContext | PairContext[] { if (i === undefined) { return this.getRuleContexts(PairContext) } else { return this.getRuleContext(i, PairContext) } } constructor (parent: ParserRuleContext | undefined, invokingState: number) { super(parent, invokingState) } // @Override public get ruleIndex (): number { return AgtypeParser.RULE_obj } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterObj) { listener.enterObj(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitObj) { listener.exitObj(this) } } } export class PairContext extends ParserRuleContext { public STRING (): TerminalNode { return this.getToken(AgtypeParser.STRING, 0) } public agValue (): AgValueContext { return this.getRuleContext(0, AgValueContext) } constructor (parent: ParserRuleContext | undefined, invokingState: number) { super(parent, invokingState) } // @Override public get ruleIndex (): number { return AgtypeParser.RULE_pair } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterPair) { listener.enterPair(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitPair) { listener.exitPair(this) } } } export class ArrayContext extends ParserRuleContext { public agValue(): AgValueContext[]; public agValue(i: number): AgValueContext; public agValue (i?: number): AgValueContext | AgValueContext[] { if (i === undefined) { return this.getRuleContexts(AgValueContext) } else { return this.getRuleContext(i, AgValueContext) } } constructor (parent: ParserRuleContext | undefined, invokingState: number) { super(parent, invokingState) } // @Override public get ruleIndex (): number { return AgtypeParser.RULE_array } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterArray) { listener.enterArray(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitArray) { listener.exitArray(this) } } } export class TypeAnnotationContext extends ParserRuleContext { public IDENT (): TerminalNode { return this.getToken(AgtypeParser.IDENT, 0) } constructor (parent: ParserRuleContext | undefined, invokingState: number) { super(parent, invokingState) } // @Override public get ruleIndex (): number { return AgtypeParser.RULE_typeAnnotation } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterTypeAnnotation) { listener.enterTypeAnnotation(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitTypeAnnotation) { listener.exitTypeAnnotation(this) } } } export class FloatLiteralContext extends ParserRuleContext { public RegularFloat (): TerminalNode | undefined { return this.tryGetToken(AgtypeParser.RegularFloat, 0) } public ExponentFloat (): TerminalNode | undefined { return this.tryGetToken(AgtypeParser.ExponentFloat, 0) } constructor (parent: ParserRuleContext | undefined, invokingState: number) { super(parent, invokingState) } // @Override public get ruleIndex (): number { return AgtypeParser.RULE_floatLiteral } // @Override public enterRule (listener: AgtypeListener): void { if (listener.enterFloatLiteral) { listener.enterFloatLiteral(this) } } // @Override public exitRule (listener: AgtypeListener): void { if (listener.exitFloatLiteral) { listener.exitFloatLiteral(this) } } }
the_stack
import { JSToken, stringToTokens, tokenToKeywordMap } from "../../javascript"; import { TokenReader, IRenderSettings, makeRenderSettings, IRenderable, defaultRenderSettings } from "../../../helpers"; import { ValueTypes, Value, Type, nullValue } from "./value"; import { ArgumentList } from "../constructs/function"; import { ObjectLiteral } from "./object"; import { FunctionDeclaration } from "../constructs/function"; import { ArrayLiteral } from "./array"; import { TemplateLiteral } from "./template-literal"; import { RegExpLiteral } from "./regex"; import { ClassDeclaration } from "../constructs/class"; import { TypeSignature } from "../types/type-signature"; import { AsExpression } from "../types/statements"; import { Group } from "./group"; /** * All operations: */ export enum Operation { Assign, Index, Group, Initialize, Call, StrictEqual, StrictNotEqual, Equal, NotEqual, Add, Subtract, Multiply, Divide, Modulo, Exponent, GreaterThan, LessThan, LessThanEqual, GreaterThanEqual, InstanceOf, Of, In, Spread, UnaryPlus, UnaryNegation, BitShiftLeft, BitShiftRight, BitUShiftRight, BitNot, BitAnd, BitXOr, BitOr, LogNot, LogAnd, LogOr, AddAssign, SubtractAssign, MultiplyAssign, DivideAssign, ModuloAssign, ExponentAssign, BitShiftLeftAssign, BitShiftRightAssign, BitUShiftRightAssign, BitAndAssign, BitXOrAssign, BitOrAssign, PrefixIncrement, PostfixIncrement, PostfixDecrement, PrefixDecrement, Await, TypeOf, Void, Delete, Yield, DelegatedYield, NullCoalescing, OptionalChain, OptionalCall, OptionalIndex, Ternary, } /** * A map of operations that take part *between* two values */ const binaryOperators = new Map([ [JSToken.Plus, Operation.Add], [JSToken.PlusAssign, Operation.AddAssign], [JSToken.Minus, Operation.Subtract], [JSToken.SubtractAssign, Operation.SubtractAssign], [JSToken.Multiply, Operation.Multiply], [JSToken.MultiplyAssign, Operation.MultiplyAssign], [JSToken.Divide, Operation.Divide], [JSToken.DivideAssign, Operation.DivideAssign], [JSToken.Exponent, Operation.Exponent], [JSToken.ExponentAssign, Operation.ExponentAssign], [JSToken.Percent, Operation.Modulo], [JSToken.Equal, Operation.Equal], [JSToken.NotEqual, Operation.NotEqual], [JSToken.StrictEqual, Operation.StrictEqual], [JSToken.StrictNotEqual, Operation.StrictNotEqual], [JSToken.LogicalOr, Operation.LogOr], [JSToken.LogicalAnd, Operation.LogAnd], [JSToken.OpenAngle, Operation.LessThan], [JSToken.LessThanEqual, Operation.LessThanEqual], [JSToken.CloseAngle, Operation.GreaterThan], [JSToken.GreaterThanEqual, Operation.GreaterThanEqual], [JSToken.In, Operation.In], [JSToken.InstanceOf, Operation.InstanceOf], [JSToken.Assign, Operation.Assign], [JSToken.Exponent, Operation.Exponent], [JSToken.BitwiseAnd, Operation.BitAnd], [JSToken.BitwiseOr, Operation.BitOr], [JSToken.BitwiseXor, Operation.BitXOr], [JSToken.BitwiseShiftLeft, Operation.BitShiftLeft], [JSToken.BitwiseShiftRight, Operation.BitShiftRight], [JSToken.UnaryBitwiseShiftRight, Operation.BitUShiftRight], [JSToken.NullishCoalescing, Operation.NullCoalescing], ]); // Create a map to make binary token lookup easy TODO why const binaryOperatorToToken = new Map(Array.from(binaryOperators).map(([t, o]) => [o, t])); // These infix operators require whitespace around when rendering to prevent clashing const nonSymbolBinary = new Set([Operation.In, Operation.InstanceOf]); // These are operations that can be chained const valueThings = new Set([JSToken.OpenBracket, JSToken.OpenSquare, JSToken.Dot, JSToken.Increment, JSToken.Decrement, JSToken.OptionalChain]); const operators = new Map([ [JSToken.Yield, Operation.Yield], [JSToken.DelegatedYield, Operation.DelegatedYield], [JSToken.OpenBracket, Operation.Call], [JSToken.OpenSquare, Operation.Index], [JSToken.Spread, Operation.Spread], [JSToken.New, Operation.Initialize], [JSToken.TypeOf, Operation.TypeOf], [JSToken.LogicalNot, Operation.LogNot], [JSToken.BitwiseNot, Operation.BitNot], [JSToken.Await, Operation.Await], [JSToken.Void, Operation.Void], [JSToken.Delete, Operation.Delete], [JSToken.Plus, Operation.UnaryPlus], [JSToken.Minus, Operation.UnaryNegation], [JSToken.OptionalChain, Operation.OptionalChain], ]); const otherOperators = new Set(operators.values()); // TODO temp Added for rendering otherOperators.add(Operation.OptionalChain); otherOperators.add(Operation.OptionalCall); otherOperators.add(Operation.OptionalIndex); otherOperators.add(Operation.PrefixIncrement); otherOperators.add(Operation.PrefixDecrement); otherOperators.add(Operation.PostfixIncrement); otherOperators.add(Operation.PostfixDecrement); /* From: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Operator_Precedence left out: Dot, constructor without (), comma */ const operationPrecedence = new Map([ [Operation.Group, 21], [Operation.Index, 20], [Operation.Initialize, 20], [Operation.Call, 20], [Operation.PostfixIncrement, 18], [Operation.PostfixDecrement, 18], [Operation.LogNot, 17], [Operation.BitNot, 17], [Operation.UnaryPlus, 17], [Operation.UnaryNegation, 17], [Operation.PrefixIncrement, 17], [Operation.PrefixDecrement, 17], [Operation.TypeOf, 17], [Operation.Await, 17], [Operation.Delete, 17], [Operation.Void, 17], [Operation.Exponent, 16], [Operation.Multiply, 15], [Operation.Divide, 15], [Operation.Modulo, 15], [Operation.Add, 14], [Operation.Subtract, 14], [Operation.BitShiftLeft, 13], [Operation.BitShiftRight, 13], [Operation.BitUShiftRight, 13], [Operation.LessThan, 12], [Operation.LessThanEqual, 12], [Operation.GreaterThan, 12], [Operation.GreaterThanEqual, 12], [Operation.In, 12], [Operation.InstanceOf, 12], [Operation.Equal, 11], [Operation.NotEqual, 11], [Operation.StrictEqual, 11], [Operation.StrictNotEqual, 11], [Operation.BitAnd, 10], [Operation.BitXOr, 9], [Operation.BitOr, 8], [Operation.NullCoalescing, 7], [Operation.LogAnd, 6], [Operation.LogOr, 5], [Operation.Ternary, 4], [Operation.Assign, 3], [Operation.AddAssign, 3], [Operation.SubtractAssign, 3], [Operation.ExponentAssign, 3], [Operation.MultiplyAssign, 3], [Operation.DivideAssign, 3], [Operation.ModuloAssign, 3], [Operation.BitShiftLeftAssign, 3], [Operation.BitShiftRightAssign, 3], [Operation.BitUShiftRightAssign, 3], [Operation.BitAndAssign, 3], [Operation.BitXOrAssign, 3], [Operation.BitOrAssign, 3], [Operation.Yield, 2], [Operation.DelegatedYield, 2], ]); /** * Represents a expression with a LHS, operation (and a possible RHS) * Note than the LHS is not always left hand side visually. E.g for `!x` -> lhs = x */ export class Expression implements IRenderable { public lhs: ValueTypes; public operation: Operation; public rhs: ValueTypes | ArgumentList | null; constructor({ lhs, operation, rhs = null }: { lhs: ValueTypes, operation: Operation, rhs?: ValueTypes | ArgumentList | null }) { this.lhs = lhs; this.operation = operation; this.rhs = rhs; if ( operation === Operation.Call || operation === Operation.Initialize || operation === Operation.OptionalCall ) { if (rhs && !(rhs instanceof ArgumentList)) { this.rhs = new ArgumentList([rhs]); } } } render(partialSettings: Partial<IRenderSettings> = {}): string { const settings = makeRenderSettings(partialSettings); if (otherOperators.has(this.operation) || this.operation === Operation.Ternary) { switch (this.operation) { case Operation.Call: return this.lhs.render(settings) + (this.rhs?.render?.(settings) ?? "()"); case Operation.Index: return `${this.lhs.render(settings)}[${this.rhs!.render(settings)}]`; case Operation.TypeOf: return `typeof ${this.lhs.render(settings)}`; case Operation.Initialize: return `new ${this.lhs.render(settings)}${this.rhs?.render?.(settings) ?? "()"}`; case Operation.Ternary: let acc = this.lhs.render(settings); if (!settings.minify) acc += " "; acc += "?" const lhs = (this.rhs as ArgumentList).args[0].render(settings); acc += settings.minify ? `${lhs}:` : ` ${lhs} : `; acc += (this.rhs as ArgumentList).args[1].render(settings); return acc; case Operation.Await: return `await ${this.lhs.render(settings)}`; case Operation.Delete: return `delete ${this.lhs.render(settings)}`; case Operation.Void: return `void ${this.lhs.render(settings)}`; case Operation.LogNot: return "!" + this.lhs.render(settings); case Operation.PrefixIncrement: return "++" + this.lhs.render(settings); case Operation.PrefixDecrement: return "--" + this.lhs.render(settings); case Operation.PostfixIncrement: return this.lhs.render(settings) + "++"; case Operation.PostfixDecrement: return this.lhs.render(settings) + "--"; case Operation.UnaryPlus: return "+" + this.lhs.render(settings); case Operation.UnaryNegation: return "-" + this.lhs.render(settings); case Operation.Spread: return "..." + this.lhs.render(settings); case Operation.OptionalChain: case Operation.OptionalCall: return this.lhs.render(settings) + "?." + (this.rhs?.render?.(settings) ?? "()"); case Operation.OptionalIndex: return `${this.lhs.render(settings)}?.[${this.rhs!.render(settings)}]`; case Operation.Yield: return `yield ${this.lhs.render(settings)}`; case Operation.DelegatedYield: return `yield* ${this.lhs.render(settings)}`; default: throw Error(`Cannot render operation: ${Operation[this.operation]}`); } } else if (binaryOperatorToToken.has(this.operation)) { const token = binaryOperatorToToken.get(this.operation); const value = tokenToKeywordMap.get(token!); if (nonSymbolBinary.has(this.operation) || !settings.minify) { return `${this.lhs.render(settings)} ${value} ${this.rhs!.render(settings)}`; } else { return `${this.lhs.render(settings)}${value}${this.rhs!.render(settings)}`; } } else { throw Error(`Cannot render operation: ${Operation[this.operation]}`); } } static fromString(string: string) { const reader = stringToTokens(string); const expression = Expression.fromTokens(reader); reader.expect(JSToken.EOF); return expression; } static fromTokens(reader: TokenReader<JSToken>, precedence = 0): ValueTypes { let value: ValueTypes | null = null; switch (reader.current.type) { // Value types: case JSToken.NumberLiteral: const number = reader.current.value!; if (number.endsWith("n")) { value = new Value(Type.bigint, number); } else { value = new Value(Type.number, number); } reader.move(); break; case JSToken.Identifier: if (reader.peek()?.type === JSToken.ArrowFunction) { return FunctionDeclaration.fromTokens(reader); } if (reader.peek()?.type === JSToken.TemplateLiteralStart) { value = TemplateLiteral.fromTokens(reader) } else { value = new VariableReference(reader.current.value!); reader.move(); } break; case JSToken.True: case JSToken.False: value = new Value(Type.boolean, reader.current.type === JSToken.True ? "true" : "false"); reader.move(); break; case JSToken.StringLiteral: value = new Value(Type.string, reader.current.value!); reader.move(); break; case JSToken.OpenSquare: value = ArrayLiteral.fromTokens(reader); break; case JSToken.OpenBracket: // Tests for what is after closing bracket () * <- let bracketCount = 0; let isArrowFunction: boolean = false; try { const afterBrackets = reader.run((tokenType) => { if (tokenType === JSToken.OpenBracket) bracketCount++; else if (tokenType === JSToken.CloseBracket) bracketCount--; if (bracketCount === 0) return true; else return false; }, true); isArrowFunction = afterBrackets[0] === JSToken.ArrowFunction; } catch (error) { // TODO temp ??? reader.throwError("Unmatched closing brackets"); } if (isArrowFunction) { return FunctionDeclaration.fromTokens(reader); } else { reader.move(); const group = new Group(Expression.fromTokens(reader)); reader.expect(JSToken.CloseBracket); reader.move(); value = group; } break; case JSToken.OpenCurly: return ObjectLiteral.fromTokens(reader); case JSToken.Null: reader.move(); return nullValue; case JSToken.Undefined: reader.move(); return new Value(Type.undefined); case JSToken.Async: case JSToken.Function: return FunctionDeclaration.fromTokens(reader); case JSToken.RegexLiteral: value = RegExpLiteral.fromTokens(reader); break; case JSToken.TemplateLiteralStart: value = TemplateLiteral.fromTokens(reader); break; case JSToken.Class: return ClassDeclaration.fromTokens(reader, { isExpression: true }); // Unary operators / prefixes: case JSToken.Increment: case JSToken.Decrement: const operationType = reader.current.type === JSToken.Increment ? Operation.PrefixIncrement : Operation.PrefixDecrement; reader.move(); value = new Expression({ lhs: Expression.fromTokens(reader, operationPrecedence.get(operationType)), operation: operationType }); break; case JSToken.Spread: reader.move(); return new Expression({ lhs: Expression.fromTokens(reader), operation: Operation.Spread }); case JSToken.Yield: reader.move(); value = new Expression({ lhs: Expression.fromTokens(reader, operationPrecedence.get(Operation.Yield)), operation: Operation.Yield }); break; case JSToken.DelegatedYield: reader.move(); value = new Expression({ lhs: Expression.fromTokens(reader, operationPrecedence.get(Operation.DelegatedYield)), operation: Operation.DelegatedYield }); break; case JSToken.Await: reader.move(); return new Expression({ lhs: Expression.fromTokens(reader, operationPrecedence.get(Operation.Await)), operation: Operation.Await }); case JSToken.LogicalNot: case JSToken.BitwiseNot: case JSToken.Plus: case JSToken.Minus: case JSToken.Await: case JSToken.Void: case JSToken.Delete: case JSToken.TypeOf: const operator = operators.get(reader.current.type)!; reader.move(); value = new Expression({ lhs: Expression.fromTokens(reader, operationPrecedence.get(operator)), operation: operator }); break; case JSToken.New: reader.move(); const constructor_ = Expression.fromTokens(reader, operationPrecedence.get(Operation.Initialize)); let args: ArgumentList; if (reader.current.type as JSToken === JSToken.OpenBracket) { args = ArgumentList.fromTokens(reader); } else { args = new ArgumentList; } value = new Expression({ lhs: constructor_, operation: Operation.Initialize, rhs: args }); break; // Other case JSToken.EOF: reader.throwExpect("Expected expression"); default: try { const tokenName = tokenAsIdent(reader.current.type); value = new VariableReference(tokenName); reader.move(); break; } catch { reader.throwExpect("Expected value"); } } while (valueThings.has(reader.current.type)) { switch (reader.current.type as JSToken) { // Chain case JSToken.Dot: { reader.move(); const prop = reader.current.value || tokenAsIdent(reader.current.type); value = new VariableReference(prop, value); reader.move(); break; } // Optional Chain case JSToken.OptionalChain: { reader.move(); if (reader.current.type === JSToken.OpenBracket) { const args = ArgumentList.fromTokens(reader); value = new Expression({ lhs: value, operation: Operation.OptionalCall, rhs: args }); } else if (reader.current.type === JSToken.OpenSquare) { reader.move(); const expr = Expression.fromTokens(reader); reader.move(); value = new Expression({ lhs: value, operation: Operation.OptionalIndex, rhs: expr }); } else { const prop = reader.current.value || tokenAsIdent(reader.current.type); reader.move(); value = new Expression({ lhs: value, operation: Operation.OptionalChain, rhs: new VariableReference(prop) }); } break; } // Call case JSToken.OpenBracket: if (operationPrecedence.get(Operation.Call)! <= precedence) { return value; } const args = ArgumentList.fromTokens(reader); value = new Expression({ lhs: value, operation: Operation.Call, rhs: args }); break; // Index case JSToken.OpenSquare: if (operationPrecedence.get(Operation.Call)! <= precedence) { return value; } reader.move(); const indexer = Expression.fromTokens(reader); reader.expectNext(JSToken.CloseSquare); value = new Expression({ lhs: value, operation: Operation.Index, rhs: indexer }); break; // Postfix increment & decrement case JSToken.Increment: case JSToken.Decrement: const operation = reader.current.type === JSToken.Increment ? Operation.PostfixIncrement : Operation.PostfixDecrement; reader.move(); value = new Expression({ lhs: value, operation }); break; } } while (reader.current) { if (binaryOperators.has(reader.current.type)) { const operator = binaryOperators.get(reader.current.type)!; const newPrecedence = operationPrecedence.get(operator)!; if (newPrecedence <= precedence) { break; } reader.move(); value = new Expression({ lhs: value, operation: operator, rhs: Expression.fromTokens(reader, newPrecedence) }); // @ts-ignore } else if (reader.current.type === JSToken.QuestionMark) { if (operationPrecedence.get(Operation.Ternary)! < precedence) { break; } reader.move(); const lhs = Expression.fromTokens(reader); reader.expectNext(JSToken.Colon); const rhs = Expression.fromTokens(reader); return new Expression({ lhs: value, operation: Operation.Ternary, rhs: new ArgumentList([lhs, rhs]) }); } else if (reader.current.type === JSToken.As) { reader.move(); const typeArg = TypeSignature.fromTokens(reader); // TODO does not work for `x as string + ""`. Need to incorporate into binary operators with a special case. Not sure of the precedence of the as "operator" return new AsExpression(value, typeArg); } else { break; } } return value; } } // TODO use the reverse tokens map from the tokenizer and complete list export function tokenAsIdent(token: JSToken) { switch (token) { case JSToken.Get: return "get"; case JSToken.Set: return "set"; case JSToken.Void: return "void"; case JSToken.Import: return "import"; case JSToken.This: return "this"; case JSToken.Super: return "super"; case JSToken.Default: return "default"; case JSToken.Class: return "class"; case JSToken.As: return "as"; case JSToken.From: return "from"; case JSToken.Null: return "null"; case JSToken.Type: return "type"; case JSToken.Do: return "do"; case JSToken.Undefined: return "undefined"; case JSToken.Switch: return "switch"; case JSToken.Private: return "private"; case JSToken.True: return "true"; case JSToken.False: return "false"; case JSToken.Type: return "type"; case JSToken.TypeOf: return "typeof"; case JSToken.Try: return "try"; case JSToken.Catch: return "catch"; case JSToken.Delete: return "delete"; default: throw Error(`No conversion for token ${JSToken[token]}`); } } /** * Class that represents a variable reference */ export class VariableReference implements IRenderable { parent?: ValueTypes; name: string; constructor(name: string, parent?: ValueTypes) { this.name = name; if (parent) this.parent = parent; } render(settings: IRenderSettings = defaultRenderSettings): string { let acc = this.name; if (this.parent) { acc = this.parent.render(settings) + "." + acc; } return acc; } /** * Returns the chain of a variable * @example this.data.member -> ["this", "data", "member"] */ toChain(): string[] { const series = [this.name]; let parent = this.parent; while (parent) { if (!(parent instanceof VariableReference)) break; // TODO not sure about this series.unshift(parent.name); // Temp prevents recursion if (parent === parent.parent) throw Error(); parent = parent.parent; } return series; } /** * Returns whether two variable references are equal * @param fuzzy will return true if partial tree match, etc x.y === x.y.z * TODO refactor to not use .toChain() */ isEqual(variable: VariableReference, fuzzy = false): boolean { // If references equal: if (this === variable) return true; // Else test by equating value let variable1chain = this.toChain(), variable2chain = variable.toChain(); if (fuzzy) { const minLength = Math.min(variable1chain.length, variable2chain.length); variable1chain.length = minLength; variable2chain.length = minLength; } return variable1chain.length === variable2chain.length && variable1chain.every((v, i) => v === variable2chain[i]); } /** * Returns left most parent / value variable exists under * Will return self if no parent * @example `a.b.c.d.e` -> `a` */ get tail(): ValueTypes { let cur: ValueTypes = this; while (cur instanceof VariableReference && cur.parent) { cur = cur.parent; } return cur; } static fromTokens(reader: TokenReader<JSToken>) { reader.expect(JSToken.Identifier); // TODO let variable = new VariableReference(reader.current.value!); reader.move(); while (reader.current.type === JSToken.Dot) { reader.expect(JSToken.Identifier) variable = new VariableReference(reader.current.value!, variable); reader.move(2); } return variable; } /** * Helper method for generating a reference to a nested variable * @param items * @example ["this", "data", "member"] -> {name: "member", parent: {name: "data", parent: {...}}} */ static fromChain(...items: Array<string | number | ValueTypes>): ValueTypes { let head: ValueTypes; if (typeof items[0] === "number") { throw Error("First arg to VariableReference.FromChain must be string"); } else if (typeof items[0] === "string") { head = new VariableReference(items[0] as string); } else { head = items[0]; } // Iterator through items appending forming linked list for (let i = 1; i < items.length; i++) { const currentProp = items[i]; if (typeof currentProp === "number") { head = new Expression({ lhs: head, operation: Operation.Index, rhs: new Value(Type.number, currentProp) }); } else if (typeof currentProp === "string") { head = new VariableReference(currentProp, head); } else if (currentProp instanceof VariableReference && currentProp.tail instanceof VariableReference) { currentProp.tail.parent = head; head = currentProp; } else { throw Error("Cannot use prop in fromChain"); } } return head; } static fromString(string: string): VariableReference { const reader = stringToTokens(string); const variable = VariableReference.fromTokens(reader); reader.expect(JSToken.EOF); return variable; } }
the_stack
import type { AccountLike, Account, Currency, CryptoCurrency, TokenCurrency, } from "../../types"; import type { CounterValuesState } from "../../countervalues/types"; import { calculate, calculateMany } from "../../countervalues/logic"; import { flattenAccounts, getAccountCurrency, getAccountHistoryBalances, } from "../../account"; import { getEnv } from "../../env"; import type { BalanceHistory, PortfolioRange, BalanceHistoryWithCountervalue, AccountPortfolio, Portfolio, CurrencyPortfolio, AssetsDistribution, ValueChange, } from "./types"; import { getPortfolioRangeConfig, getDates } from "./range"; import { defaultAssetsDistribution } from "../"; import type { AssetsDistributionOpts } from "../"; export function getPortfolioCount( accounts: AccountLike[], range: PortfolioRange ): number { const conf = getPortfolioRangeConfig(range); if (typeof conf.count === "number") return conf.count; if (!accounts.length) return 0; let oldestDate = accounts[0].creationDate; for (let i = 1; i < accounts.length; i++) { const d = accounts[i].creationDate; if (d < oldestDate) { oldestDate = d; } } return getPortfolioCountByDate(oldestDate, range); } export function getPortfolioCountByDate( start: Date, range: PortfolioRange ): number { const conf = getPortfolioRangeConfig(range); const now = Date.now(); const count = Math.ceil((now - (start as any)) / conf.increment) + 2; const defaultYearCount = getPortfolioRangeConfig("year").count ?? 0; // just for type casting return count < defaultYearCount ? defaultYearCount : count; } export function getBalanceHistory( account: AccountLike, range: PortfolioRange, count: number ): BalanceHistory { const conf = getPortfolioRangeConfig(range); const balances = getAccountHistoryBalances(account, conf.granularityId); const history: { date: Date; value: number }[] = []; const now = new Date(); history.unshift({ date: now, value: account.balance.toNumber(), }); const t = new Date((conf.startOf(now) as any) - 1).getTime(); // end of yesterday for (let i = 0; i < count - 1; i++) { history.unshift({ date: new Date(t - conf.increment * i), value: balances[balances.length - 1 - i] ?? 0, }); } return history; } export function getBalanceHistoryWithCountervalue( account: AccountLike, range: PortfolioRange, count: number, cvState: CounterValuesState, cvCurrency: Currency ): AccountPortfolio { const balanceHistory = getBalanceHistory(account, range, count); const currency = getAccountCurrency(account); const counterValues = calculateMany(cvState, balanceHistory, { from: currency, to: cvCurrency, }); let countervalueAvailable = false; const history: { date: any; value: any; countervalue: any }[] = []; for (let i = 0; i < balanceHistory.length; i++) { const { date, value } = balanceHistory[i]; const countervalue = counterValues[i]; if (countervalue) countervalueAvailable = true; history.push({ date, value, countervalue, }); } function calcChanges(h: BalanceHistoryWithCountervalue) { const from = h[0]; const to = h[h.length - 1]; return { countervalueReceiveSum: 0, // not available here countervalueSendSum: 0, cryptoChange: { value: to.value - from.value, percentage: null, }, countervalueChange: { value: (to.countervalue || 0) - (from.countervalue || 0), percentage: meaningfulPercentage( (to.countervalue || 0) - (from.countervalue || 0), from.countervalue ), }, }; } return { history, countervalueAvailable, ...calcChanges(history), }; } function meaningfulPercentage( deltaChange: number | null | undefined, balanceDivider: number | null | undefined, percentageHighThreshold = 100000 ): number | null | undefined { if (deltaChange && balanceDivider && balanceDivider !== 0) { const percent = deltaChange / balanceDivider; if (percent < percentageHighThreshold) { return percent; } } } type Available = { account: AccountLike; history: BalanceHistoryWithCountervalue; change: ValueChange; countervalueReceiveSum: number; countervalueSendSum: number; }; /** * calculate the total balance history for all accounts in a reference fiat unit * and using a CalculateCounterValue function (see countervalue helper) * NB the last item of the array is actually the current total balance. * @memberof account */ export function getPortfolio( topAccounts: Account[], range: PortfolioRange, cvState: CounterValuesState, cvCurrency: Currency ): Portfolio { const accounts = flattenAccounts(topAccounts); const count = getPortfolioCount(accounts, range); const { availables, unavailableAccounts } = accounts.reduce<{ availables: Available[]; unavailableAccounts: AccountLike[]; }>( (prev, account) => { const p = getBalanceHistoryWithCountervalue( account, range, count, cvState, cvCurrency ); return p.countervalueAvailable ? { ...prev, availables: [ ...prev.availables, { account, history: p.history, change: p.countervalueChange, countervalueReceiveSum: p.countervalueReceiveSum, countervalueSendSum: p.countervalueSendSum, }, ], } : { ...prev, unavailableAccounts: [...prev.unavailableAccounts, account], }; }, { availables: [], unavailableAccounts: [], } ); const histories = availables.map((a) => a.history); const balanceHistory = getDates(range, count).map((date, i) => ({ date, value: histories.reduce((sum, h) => sum + (h[i]?.countervalue ?? 0), 0), })); const [countervalueChangeValue, countervalueReceiveSum, countervalueSendSum] = availables.reduce( (prev, a) => [ prev[0] + a.change.value, // TODO Portfolio: it'll always be 0, no? 🤔 prev[1] + a.countervalueReceiveSum, prev[2] + a.countervalueSendSum, ], [0, 0, 0] ); // in case there were no receive, we just track the market change // weighted by the current balances const balanceDivider = getEnv("EXPERIMENTAL_ROI_CALCULATION") ? countervalueReceiveSum === 0 ? balanceHistory[0].value + countervalueSendSum : countervalueReceiveSum : balanceHistory[0].value; return { balanceHistory, balanceAvailable: accounts.length === 0 || availables.length > 0, availableAccounts: availables.map((a) => a.account), unavailableCurrencies: [ ...new Set(unavailableAccounts.map(getAccountCurrency)), ] as any[], accounts, range, histories, countervalueReceiveSum, countervalueSendSum, countervalueChange: { percentage: meaningfulPercentage(countervalueChangeValue, balanceDivider), value: countervalueChangeValue, }, }; } export function getCurrencyPortfolio( accounts: AccountLike[], range: PortfolioRange, cvState: CounterValuesState, cvCurrency: Currency ): CurrencyPortfolio { const count = getPortfolioCount(accounts, range); const portfolios = accounts.map((a) => getBalanceHistoryWithCountervalue(a, range, count, cvState, cvCurrency) ); let countervalueAvailable = false; const histories = portfolios.map((p) => { if (p.countervalueAvailable) { countervalueAvailable = true; } return p.history; }); const history = getDates(range, count).map((date, i) => ({ date, value: histories.reduce((sum, h) => sum + h[i]?.value, 0), countervalue: histories.reduce( (sum, h) => sum + (h[i]?.countervalue ?? 0), 0 ), })); const from = history[0]; const to = history[history.length - 1]; const cryptoChange = { value: to.value - from.value, percentage: null, }; const countervalueChange = { value: (to.countervalue || 0) - (from.countervalue || 0), percentage: meaningfulPercentage( (to.countervalue || 0) - (from.countervalue || 0), from.countervalue ), }; return { history, countervalueAvailable, accounts, range, histories, cryptoChange, countervalueChange, }; } export function getAssetsDistribution( topAccounts: Account[], cvState: CounterValuesState, cvCurrency: Currency, opts?: AssetsDistributionOpts ): AssetsDistribution { const { minShowFirst, maxShowFirst, showFirstThreshold } = { ...defaultAssetsDistribution, ...opts, }; const idBalances: Record<string, number> = {}; const idCurrencies: Record<string, CryptoCurrency | TokenCurrency> = {}; const accounts = flattenAccounts(topAccounts); for (let i = 0; i < accounts.length; i++) { const account = accounts[i]; const cur = getAccountCurrency(account); const id = cur.id; if (account.balance.isGreaterThan(0)) { idCurrencies[id] = cur; idBalances[id] = (idBalances[id] ?? 0) + account.balance.toNumber(); } } const { sum, idCountervalues } = Object.entries(idBalances).reduce( (prev, [id, value]) => { const cv = calculate(cvState, { value: Number(value), // just for casting mixed type. from: idCurrencies[id], to: cvCurrency, }); return cv ? { sum: prev.sum + cv, idCountervalues: { ...prev.idCountervalues, [id]: cv }, } : prev; }, { sum: 0, idCountervalues: {}, } ); const idCurrenciesKeys = Object.keys(idCurrencies); if (idCurrenciesKeys.length === 0) { return assetsDistributionNotAvailable; } const isAvailable = sum !== 0; const list = idCurrenciesKeys .map((id) => { const currency = idCurrencies[id]; const amount = idBalances[id]; const countervalue = idCountervalues[id] ?? 0; return { currency, countervalue, amount, distribution: isAvailable ? countervalue / sum : 0, }; }) .sort((a, b) => { const diff = b.countervalue - a.countervalue; return diff === 0 ? a.currency.name.localeCompare(b.currency.name) : diff; }); let i; let acc = 0; for (i = 0; i < maxShowFirst && i < list.length; i++) { if (acc > showFirstThreshold) { break; } acc += list[i].distribution; } const showFirst = Math.max(minShowFirst, i); const data = { isAvailable, list, showFirst, sum, }; return data; } const assetsDistributionNotAvailable: AssetsDistribution = { isAvailable: false, list: [], showFirst: 0, sum: 0, };
the_stack
import * as os from 'os'; import * as util from 'util'; import * as path from 'path'; // Thirdparty import { UX } from '@salesforce/command'; import { Messages, fs } from '@salesforce/core'; import { has } from 'lodash'; import { get, getObject, getString } from '@salesforce/ts-types'; import { set, isEmpty } from '@salesforce/kit'; import { Config } from '../../lib/core/configApi'; import OrgPrefRegistry = require('./orgPrefRegistry'); const js2xmlparser = require('js2xmlparser'); Messages.importMessagesDirectory(__dirname); const orgSettingsMessages: Messages = Messages.loadMessages('salesforce-alm', 'org_settings'); /** This is the contents of the package.xml that we will use when we deploy settings to a scratch org. */ const _packageFileContents = `<?xml version="1.0" encoding="UTF-8"?> <Package xmlns="http://soap.sforce.com/2006/04/metadata"> %s <version>%s</version> </Package>`; /** This is the contents for a single section for a particular metadata type in the package.xml. */ const _packageFileTypeSection = ` <types> %s <name>%s</name> </types> `; /** * Helper class for dealing with the settings that are defined in a scratch definition file. This class knows how to extract the * settings from the definition, how to expand them into a MD directory and how to generate a package.xml. */ class SettingsGenerator { private settingData: object; private objectSettingsData: object; private currentApiVersion = new Config().getApiVersion(); /** extract the settings from the scratch def file, if they are present. */ async extract(scratchDef, apiVersion?): Promise<void> { if (util.isNullOrUndefined(apiVersion)) { apiVersion = this.currentApiVersion; } if (apiVersion >= 47.0 && this.orgPreferenceSettingsMigrationRequired(scratchDef)) { await this.extractAndMigrateSettings(scratchDef); } else { this.settingData = getObject(scratchDef, 'settings'); this.objectSettingsData = getObject(scratchDef, 'objectSettings'); } // TODO, this is where we will validate the settings. // See W-5068155 // if (this.hasSettings()) { } } /** True if we are currently tracking setting or object setting data. */ hasSettings() { return !(isEmpty(this.settingData) && isEmpty(this.objectSettingsData)); } /** Check to see if the scratchDef contains orgPreferenceSettings * orgPreferenceSettings are no longer supported after api version 46.0 */ orgPreferenceSettingsMigrationRequired(scratchDef) { return !( util.isNullOrUndefined(scratchDef) || util.isNullOrUndefined(scratchDef.settings) || util.isNullOrUndefined(scratchDef.settings.orgPreferenceSettings) ); } /** This will copy all of the settings in the scratchOrgInfo orgPreferences mapping into the settings structure. * It will also spit out a warning about the pending deprecation og the orgPreferences structure. * This returns a failure message in the promise upon critical error for api versions after 46.0. * For api versions less than 47.0 it will return a warning. */ async migrate(scratchDef, apiVersion): Promise<void> { // Make sure we have old style preferences if (!scratchDef.orgPreferences) { return; } if (util.isNullOrUndefined(apiVersion)) { apiVersion = this.currentApiVersion; } // First, let's map the old style tooling preferences into MD-API preferences this.settingData = {}; const ux = await UX.create(); function lhccmdt(mdt) { // lowercase head camel case metadata type return util.isNullOrUndefined(mdt) ? mdt : mdt.substring(0, 1).toLowerCase() + mdt.substring(1); } function storePrefs(data, pref, prefVal) { const orgPrefApi = lhccmdt(OrgPrefRegistry.whichApi(pref, apiVersion)); if (util.isNullOrUndefined(orgPrefApi)) { ux.warn(`Unsupported org preference: ${pref}, ignored`); return; } const mdApiName = lhccmdt(OrgPrefRegistry.forMdApi(pref, apiVersion)); if (!has(data, orgPrefApi)) { set(data, orgPrefApi, {}); } const apiOrgPrefs: object = getObject(data, orgPrefApi); set(apiOrgPrefs, mdApiName, prefVal); } if (scratchDef.orgPreferences.enabled) { scratchDef.orgPreferences.enabled.forEach((pref) => { storePrefs(this.settingData, pref, true); }); } if (scratchDef.orgPreferences.disabled) { scratchDef.orgPreferences.disabled.forEach((pref) => { storePrefs(this.settingData, pref, false); }); } // It would be nice if cli.ux.styledJSON could return a colorized JSON string instead of logging to stdout. const message = orgSettingsMessages.getMessage( apiVersion >= 47.0 ? 'deprecatedPrefFormat' : 'deprecatedPrefFormatLegacy', [ JSON.stringify({ orgPreferences: scratchDef.orgPreferences }, null, 4), JSON.stringify({ settings: this.settingData }, null, 4), ] ); if (apiVersion >= 47.0) { throw new Error(message); } else { ux.warn(message); } // No longer need these delete scratchDef.orgPreferences; } /** This method converts all orgPreferenceSettings preferences into their respective * org settings objects. */ async extractAndMigrateSettings(scratchDef): Promise<void> { const oldScratchDef = JSON.stringify({ settings: scratchDef.settings }, null, 4); // Make sure we have old style preferences if (!this.orgPreferenceSettingsMigrationRequired(scratchDef)) { this.settingData = getObject(scratchDef, 'settings'); return; } // First, let's map the old style tooling preferences into MD-API preferences this.settingData = {}; const ux = await UX.create(); function storePrefs(data, pref, prefVal): boolean { let mdApiName = OrgPrefRegistry.newPrefNameForOrgSettingsMigration(pref); if (util.isNullOrUndefined(mdApiName)) { mdApiName = pref; } const orgPrefApi = OrgPrefRegistry.whichApiFromFinalPrefName(mdApiName); if (util.isNullOrUndefined(orgPrefApi)) { ux.warn(`Unknown org preference: ${pref}, ignored.`); return false; } if (OrgPrefRegistry.isMigrationDeprecated(orgPrefApi)) { ux.warn(`The setting "${pref}" is no longer supported as of API version 47.0`); return false; } if (!has(data, orgPrefApi)) { set(data, orgPrefApi, {}); } const apiOrgPrefs = getObject(data, orgPrefApi); // check to see if the value is already set set(apiOrgPrefs, mdApiName, prefVal); return orgPrefApi != OrgPrefRegistry.ORG_PREFERENCE_SETTINGS; } const orgPreferenceSettings = getObject(scratchDef, 'settings.orgPreferenceSettings'); delete scratchDef.settings.orgPreferenceSettings; this.settingData = getObject(scratchDef, 'settings'); let migrated = false; for (const preference in orgPreferenceSettings) { if (storePrefs(this.settingData, preference, orgPreferenceSettings[preference])) { migrated = true; } } // Since we could have recommended some preferences that are still in OPS, only warn if any actually got moved there if (migrated) { // It would be nice if cli.ux.styledJSON could return a colorized JSON string instead of logging to stdout. const message = orgSettingsMessages.getMessage('migratedPrefFormat', [ oldScratchDef, JSON.stringify({ settings: this.settingData }, null, 4), ]); ux.warn(message); } } /** Create temporary deploy directory used to upload the scratch org shape. * This will create the dir, generate package.xml and all of the .setting files. */ public async createDeployDir(apiVersion) { // The root of our package; use SFDX_MDAPI_TEMP_DIR if set. const targetDir = process.env.SFDX_MDAPI_TEMP_DIR || os.tmpdir(); const destRoot = path.join(targetDir, 'shape'); const settingsDir = path.join(destRoot, 'settings'); const objectsDir = path.join(destRoot, 'objects'); const packageFilePath = path.join(destRoot, 'package.xml'); const allRecTypes: string[] = []; const allBps: string[] = []; try { await fs.access(destRoot, fs.constants.F_OK); await fs.rmdir(destRoot); } catch (e) { // If access failed, the root dir probably doesn't exist, so we're fine } await Promise.all([ this.writeSettingsIfNeeded(settingsDir), this.writeObjectSettingsIfNeeded(objectsDir, allRecTypes, allBps), ]); await this.writePackageFile(allRecTypes, allBps, packageFilePath, apiVersion); return destRoot; } private async writePackageFile(allRecTypes: string[], allBps: string[], packageFilePath: string, apiVersion: any) { let packageContentInternals = ''; let settingsMemberReferences = ''; if (this.settingData) { Object.keys(this.settingData).forEach((item) => { const typeName = this.cap(item).replace('Settings', ''); settingsMemberReferences += '\n <members>' + typeName + '</members>'; }); packageContentInternals += util.format(_packageFileTypeSection, settingsMemberReferences, 'Settings'); } let objectMemberReferences = ''; if (this.objectSettingsData) { Object.keys(this.objectSettingsData).forEach((item) => { objectMemberReferences += '\n <members>' + this.cap(item) + '</members>'; }); packageContentInternals += util.format(_packageFileTypeSection, objectMemberReferences, 'CustomObject'); packageContentInternals += this.getTypeReferences(allRecTypes, 'RecordType'); packageContentInternals += this.getTypeReferences(allBps, 'BusinessProcess'); } await fs.writeFile(packageFilePath, util.format(_packageFileContents, packageContentInternals, apiVersion)); } private async writeObjectSettingsIfNeeded(objectsDir: string, allRecTypes: string[], allBps: string[]) { if (this.objectSettingsData) { await fs.mkdirp(objectsDir); for (const item of Object.keys(this.objectSettingsData)) { const value: object = getObject(this.objectSettingsData, item); const fileContent = this._createObjectFileContent(this.cap(item), value, allRecTypes, allBps); await fs.writeFile(path.join(objectsDir, this.cap(item) + '.object'), fileContent); } } } private async writeSettingsIfNeeded(settingsDir: string) { if (this.settingData) { await fs.mkdirp(settingsDir); for (const item of Object.keys(this.settingData)) { const value: object = getObject(this.settingData, item); const typeName = this.cap(item); const fname = typeName.replace('Settings', ''); const fileContent = this._createSettingsFileContent(typeName, value); await fs.writeFile(path.join(settingsDir, fname + '.settings'), fileContent); } } } private getTypeReferences(componentNames: string[], componentType: string) { return componentNames && componentNames.length > 0 ? util.format( _packageFileTypeSection, componentNames.map((item) => '\n <members>' + item + '</members>').join(''), componentType ) : ''; } _createSettingsFileContent(name, json) { if (name == 'OrgPreferenceSettings') { // this is a stupid format let res = `<?xml version="1.0" encoding="UTF-8"?> <OrgPreferenceSettings xmlns="http://soap.sforce.com/2006/04/metadata"> `; res += Object.keys(json) .map( (pref) => ` <preferences> <settingName>` + this.cap(pref) + `</settingName> <settingValue>` + get(json, pref) + `</settingValue> </preferences>` ) .join('\n'); res += '\n</OrgPreferenceSettings>'; return res; } else { return js2xmlparser.parse(name, json); } } _createObjectFileContent(name: string, json: Object, allRecTypes: string[], allBps: string[]) { // name already capped let res = `<?xml version="1.0" encoding="UTF-8"?> <Object xmlns="http://soap.sforce.com/2006/04/metadata"> `; const sharingModel = getString(json, 'sharingModel'); if (sharingModel) { res += ' <sharingModel>' + this.cap(sharingModel) + '</sharingModel>\n'; } const defaultRecordType = getString(json, 'defaultRecordType'); if (defaultRecordType) { // We need to keep track of these globally for when we generate the package XML. allRecTypes.push(name + '.' + this.cap(defaultRecordType)); let bpName = null; let bpPicklistVal = null; // These four objects require any record type to specify a "business process"-- // a restricted set of items from a standard picklist on the object. if (['Case', 'Lead', 'Opportunity', 'Solution'].includes(name)) { bpName = this.cap(defaultRecordType) + 'Process'; switch (name) { case 'Case': bpPicklistVal = 'New'; break; case 'Lead': bpPicklistVal = 'New - Not Contacted'; break; case 'Opportunity': bpPicklistVal = 'Prospecting'; break; case 'Solution': bpPicklistVal = 'Draft'; } } // Create the record type res += ` <recordTypes> <fullName>` + this.cap(defaultRecordType) + `</fullName> <label>` + this.cap(defaultRecordType) + `</label> <active>true</active> `; if (bpName) { // We need to keep track of these globally for the package.xml allBps.push(name + '.' + bpName); res += ' <businessProcess>' + bpName + '</businessProcess>\n'; } res += ' </recordTypes>\n'; // If required, create the business processes they refer to if (bpName) { res += ` <businessProcesses> <fullName>` + bpName + `</fullName> <isActive>true</isActive> <values> <fullName>` + bpPicklistVal + '</fullName>'; // For Opportunity you don't have the option to set a default if (name != 'Opportunity') { res += ` <default>true</default>`; } res += ` </values> </businessProcesses> `; } } res += '</Object>'; return res; } cap(s: string) { return s ? (s.length > 0 ? s.charAt(0).toUpperCase() + s.substring(1) : '') : null; } } export = SettingsGenerator;
the_stack
import React, { useState, useEffect } from 'react'; import * as actions from '../../actions'; import { connect } from "react-redux"; import { Dispatch } from 'redux'; import classNames from 'classnames'; import { Modal, Steps, Form, Button, Spin } from 'antd'; import { FormComponentProps } from 'antd/lib/form'; import CollectObjectConfiguration from './CollectObjectConfiguration'; import CollectLogConfiguration from './CollectLogConfiguration'; import ClientClearSelfMonitor from './ClientClearSelfMonitor'; import ReceiverAdvancedConfiguration from './ReceiverAdvancedConfiguration'; import { NavRouterLink } from '../../component/CustomComponent'; import { addCollectTask, getCollectDetails, editCollectTask } from '../../api/collect'; import { ILogCollectTask, ILogCollectTaskDetail } from '../../interface/collect'; import { RouteComponentProps } from 'react-router-dom'; import { setStepParams, setEditThreeParams, setEditFourParams } from './config'; import { setLimitUnit, useDebounce } from '../../lib/utils'; import moment from 'moment'; import './index.less'; import { getHostListbyServiceId } from '../../api'; import { setHostNameList, setlogFilePathKey } from './dateRegAndGvar'; const { Step } = Steps; const validateStepRegex = { 0: /^step1_/, 1: /^step2_/, 2: /^step3_/, 3: /^step4_/, } as any; interface IStepsFormProps extends FormComponentProps { history?: any; taskId?: any; } const mapDispatchToProps = (dispatch: Dispatch) => ({ setCollectType: (collectType: number) => dispatch(actions.setCollectType(collectType)), setLogType: (logType: string) => dispatch(actions.setLogType(logType)), }); type Props = ReturnType<typeof mapDispatchToProps>; const StepsForm = (props: Props & RouteComponentProps & IStepsFormProps) => { const editUrl = window.location.pathname.includes('/edit-task'); const [loading, setLoading] = useState(false); const [currentStep, setCurrentStep] = useState(0); const { validateFieldsAndScroll, setFieldsValue, validateFields, resetFields } = props.form; // step1 编辑时复原页面1的动作 const [collectMode, setCollectMode] = useState(0); const [openHistory, setOpenHistory] = useState(false); const [historyFilter, setHistoryFilter] = useState('current'); const [hostRange, setHostRange] = useState(0); const [hostWhite, setHostWhite] = useState('hostname'); const [hostNames, setHostNames] = useState([]) // step2 编辑时复原页面2的动作 const [collectLogType, setCollectLogType] = useState('file'); const [logFilter, setLogFilter] = useState(0); const [cataPathlist, setCataPathlist] = useState([] as string[]); const [slicingRuleLog, setSlicingRuleLog] = useState(0); // LogRepeatForm cata (cata/file重复)页面 const [filePathList, setFilePathList] = useState(['']); const [slicingRuleLogList, setSlicingRuleLogList] = useState([] as number[]); // LogRepeatForm file 循环 (cata/file重复)页面 const [suffixfilesList, setSuffixfilesList] = useState([] as number[]); // LogFileType (file里面的)循环 页面 const [isNotLogPath, setisNotLogPath] = useState(false) const steps = [{ title: '采集对象配置', content: <CollectObjectConfiguration hostNames={hostNames} form={props.form} collectMode={collectMode} openHistory={openHistory} historyFilter={historyFilter} hostRange={hostRange} hostWhite={hostWhite} isNotLogPath={isNotLogPath} setisNotLogPath={setisNotLogPath} />, }, { title: '采集日志配置', content: <CollectLogConfiguration form={props.form} hostNames={hostNames} collectLogType={collectLogType} logFilter={logFilter} cataPathlist={cataPathlist} slicingRuleLog={slicingRuleLog} filePathList={filePathList} slicingRuleLogList={slicingRuleLogList} suffixfilesList={suffixfilesList} isNotLogPath={isNotLogPath} setisNotLogPath={setisNotLogPath} />, }, { title: '接收端配置与监控', content: <ClientClearSelfMonitor form={props.form} />, }, // { // title: '接收端与高级配置', // content: <ReceiverAdvancedConfiguration form={props.form} />, // } ]; const onStepsChange = (current: number) => { setCurrentStep(current); if (current === 1 || current === 3) { validateFields((errors) => { resetFields(Object.keys(errors)); }); } }; const handleNext = () => { // 校验每一步的参数 validateFieldsAndScroll((errors, values) => { if (errors) { const errorReg = validateStepRegex[currentStep]; const err = {} as any; for (let key in errors) { if (key.indexOf(`step${currentStep + 1}`) === -1) { err[key] = errors[key]; }; } resetFields(Object.keys(err)); const contentErrorKey = Object.keys(errors).find(errorItem => { if (errorReg.test(errorItem)) { return true; } return false; }); // 只有相应的步骤出现未填写参数的情况,才会阻断下一步 if (contentErrorKey) { return; } } setCurrentStep(prevStep => { return prevStep + 1; }); }); }; const handlePrev = () => { setCurrentStep(prevStep => { return prevStep - 1; }); }; const processParameters = (values: any) => { const params = setStepParams(values); if (editUrl) { params.id = props.taskId; return editTask(params); } return addTask(params); } const addTask = (params: ILogCollectTask) => { addCollectTask(params).then((res: any) => { if (!res.stateus) { Modal.success({ content: '新增成功!', okText: '确认', onOk: () => props.history.push({ pathname: '/collect' }), }); } else { Modal.error({ content: `${res.message}`, okText: '确认', // onCancel: () => {}, }); } }).catch((err: any) => { // console.log(err); }); } const editTask = (params: ILogCollectTask) => { editCollectTask(params).then((res: any) => { Modal.success({ content: '修改成功!', okText: '确认', onOk: () => props.history.push({ pathname: '/collect' }), }); }).catch((err: any) => { // console.log(err); }); } const handleSubmit = () => { validateFieldsAndScroll((errors, values) => { if (errors) { const indexs = { 'step1': 0, 'step2': 1, 'step3': 2, 'step4': 3, } as any; const err = Object.keys(errors); const currentStep = indexs[err[0]?.split('_')[0]]; setCurrentStep(currentStep); return false; } return processParameters(values); }); } const handleAddTaskSubmit = useDebounce(() => { // 新建按钮 handleSubmit(); }, 600); const handleEditTaskSubmit = useDebounce(() => { // 编辑按钮 handleSubmit(); }, 600); const getCollectDetail = () => { setLoading(true); getCollectDetails(props.taskId).then((res: ILogCollectTaskDetail) => { assignmentParameters(res); setLoading(false); }).catch((err: any) => { // console.log(err); setLoading(false); }); } const setPathObj = (list: string[]) => { const arr = list?.map((ele, index) => { let pathlog = {} as any; pathlog[`step2_catalog_path_${index}`] = ele; return pathlog; }); let objs = {}; arr?.forEach(ele => { objs = { ...objs, ...ele }; }) return objs; } //处理编辑传参 EditStep1 const setEditOneParams = async (objs: ILogCollectTaskDetail) => { const serviceIdList = objs.services?.map(ele => ele.id) as number[]; let step1_collectBusinessTime = [] as unknown; let step1_openHistory = false as boolean; let step1_historyFilter = '' as string; let step1_collectStartBusinessTime = moment(); setCollectMode(objs.logCollectTaskType) props.setCollectType(objs.logCollectTaskType); if (objs.logCollectTaskType === 0) { if (objs.oldDataFilterType === 0) { step1_collectBusinessTime = []; } else if (objs.oldDataFilterType === 1) { step1_openHistory = true; setOpenHistory(step1_openHistory); step1_historyFilter = 'current'; setHistoryFilter(step1_historyFilter); step1_collectStartBusinessTime = moment(new Date(objs.collectStartBusinessTime)); } else if (objs.oldDataFilterType === 2) { step1_openHistory = true; setOpenHistory(step1_openHistory); step1_historyFilter = 'custom'; setHistoryFilter(step1_historyFilter); step1_collectStartBusinessTime = moment(new Date(objs.collectStartBusinessTime)); } } else { step1_collectBusinessTime = [moment(new Date(objs.collectStartBusinessTime)), moment(new Date(objs.collectEndBusinessTime))]; step1_openHistory = false; setOpenHistory(step1_openHistory); } const step1_needHostFilterRule = objs.hostFilterRuleVO?.needHostFilterRule; setHostRange(step1_needHostFilterRule); let step1_hostWhiteList = '' as string; let step1_hostNames: any = []; let step1_filterSQL = '' as string; let result = [] if (step1_needHostFilterRule === 1) { if (objs.hostFilterRuleVO?.filterSQL) { step1_hostWhiteList = 'sql'; setHostWhite(step1_hostWhiteList); step1_filterSQL = objs.hostFilterRuleVO?.filterSQL; } else { step1_hostWhiteList = 'hostname'; setHostWhite(step1_hostWhiteList); result = await getHostListbyServiceId(serviceIdList[0]) step1_hostNames = objs.hostFilterRuleVO?.hostNames?.map(ele => Number(ele)); } } // const hostNameList = result.hostList?.filter((item: any) => { // return objs.hostFilterRuleVO?.hostNames.map(ele => Number(ele)).includes(item.id) // }) if (result?.hostList?.length) { setHostNameList([...result.hostList]) setHostNames(result.hostList) } return { step1_logCollectTaskName: objs.logCollectTaskName, // 日志采集任务名 step1_serviceId: serviceIdList[0], // 采集应用 number[] step1_logCollectTaskType: objs.logCollectTaskType, // 采集模式 // 0:流式 1:时间段 step1_collectBusinessTime, // collectStartBusinessTime logCollectTaskType = 1 时间段 step1_openHistory, // 历史数据过滤 默认false 开启true step1_historyFilter, // 选择历史数据过滤 // current从当前开始采集 custom自定义采集开始时间 step1_collectStartBusinessTime, // 默认自定义采集开始时间 step1_needHostFilterRule, // 0全部 1部分 step1_hostWhiteList, // 选择主机白名单 // hostname主机名 sqlSQL step1_hostNames, //.filter((item: any) => step1_hostNames.includes(item.id)), // 主机名 step1_filterSQL, // SQL匹配 // step1_logCollectTaskRemark: objs.logCollectTaskRemark, // 日志采集任务备注 } as any; } //处理编辑传参 EditStep2 const setEditTwoParams = (objs: ILogCollectTaskDetail) => { const logObj = { step2_logContentFilterExpression: objs.logContentFilterRuleVO?.logContentFilterExpression, // 日志内容过滤表达式,needLogContentFilter为1时必填 step2_logContentFilterType: objs.logContentFilterRuleVO?.logContentFilterType, // 日志内容过滤类型 0:包含 1:不包含,needLogContentFilter为1时必填 step2_needLogContentFilter: objs.logContentFilterRuleVO?.needLogContentFilter, // 是否需要对日志内容进行过滤 0:否 1:是 // step2_maxBytesPerLogEvent: objs.maxBytesPerLogEvent * 1 > flowUnitList[1].value ? (objs.maxBytesPerLogEvent * 1 / Number(flowUnitList[1].value)) : (objs.maxBytesPerLogEvent * 1 / Number(flowUnitList[0].value)),// 单条日志大小上限 // Number(values.step2_maxBytesPerLogEvent) * Number(values.step2_flowunit) step2_maxBytesPerLogEvent: setLimitUnit(objs.maxBytesPerLogEvent)?.maxBytesPerLogEvent,// 单条日志大小上限 step2_file_sliceTimestampPrefixStringIndex: objs.logContentSliceRule?.sliceTimestampPrefixStringIndex, // 左起第几个匹配 step2_file_sliceTimestampPrefixString: objs.logContentSliceRule?.sliceTimestampPrefixString, // 左起第几个匹配 step2_file_sliceTimestampFormat: objs.logContentSliceRule?.sliceTimestampFormat, // 时间戳格式 step2_file_sliceRegular: objs.logContentSliceRule?.sliceRegular, // 日志切片规则选1 出现 切片正则 step2_file_suffixMatchRegular: objs.fileNameSuffixMatchRule?.suffixMatchRegular || '', // 选1出现采集文件后缀匹配 } setLogFilter(logObj.step2_needLogContentFilter); // collect if (objs.directoryLogCollectPathList?.length) { setCollectLogType('catalog'); props.setLogType('catalog'); let cataBase = {} as unknown; const cata = objs.directoryLogCollectPathList[0]; const collectwhites = cata.filterRuleChain?.filter(ele => ele.key === 0); const collectblacks = cata.filterRuleChain?.filter(ele => ele.key === 1); const paths = objs.directoryLogCollectPathList.map(ele => ele.path); setCataPathlist(paths); const pathObj = setPathObj(paths); cata.logSliceRuleVO?.sliceRegular ? setSlicingRuleLog(1) : setSlicingRuleLog(0); cataBase = { step2_collectionLogType: 'catalog', // 采集日志类型 step2_charset: cata.charset, // 编码格式 step2_catalog_path: cata.path, // 目录路径 step2_catalog_directoryCollectDepth: cata.directoryCollectDepth, // 采集深度 step2_catalog_collectwhitelist: collectwhites[0]?.value, // 采集文件白名单 step2_catalog_collectblacklist: collectblacks[0]?.value, // 采集文件黑名单 step2_catalog_maxBytesPerLogEvent_: setLimitUnit(cata?.maxBytesPerLogEvent, 2)?.maxBytesPerLogEvent, // 单条日志大小上限 step2_catalog_flowunit_: setLimitUnit(cata?.maxBytesPerLogEvent, 2)?.flowunit, // 单位 1024 KB step2_catalog_sliceType_: cata.logSliceRuleVO?.sliceType, // 日志切片规则 0时间戳 1正则匹配 step2_catalog_sliceTimestampPrefixStringIndex_: cata.logSliceRuleVO?.sliceTimestampPrefixStringIndex || 0, // 左起第几个匹配 step2_catalog_sliceTimestampPrefixString_: cata.logSliceRuleVO?.sliceTimestampPrefixString || '', // 切片时间戳前缀字符串 step2_catalog_sliceTimestampFormat_: cata.logSliceRuleVO?.sliceTimestampFormat || '', // 时间戳格式 step2_catalog_sliceRegular_: cata.logSliceRuleVO?.sliceRegular || '', // 日志切片规则选1 出现 切片正则 } const cataObj = {}; Object.assign(cataObj, logObj, pathObj, cataBase); return cataObj; } else { // file setCollectLogType('file'); props.setLogType('file'); const file = objs.fileLogCollectPathList; let filePathArr = [] as string[]; let slicingRuleLogArr = [] as number[]; let suffixLengthArr = [] as number[]; const fileBase = { step2_collectionLogType: 'file', // 采集日志类型 step2_charset: file[0].charset, // 编码格式 } const fileArr = file?.map((ele, index) => { let filelog = {} as any; filePathArr.push(ele.path); slicingRuleLogArr.push(ele.logSliceRuleVO?.sliceType ? 1 : 0); suffixLengthArr.push(ele.fileNameSuffixMatchRuleVO?.suffixMatchType); filelog[`step2_file_path_${index}`] = ele.path;// 文件日志路径 filelog[`step2_file_suffixSeparationCharacter_${index}`] = ele.fileNameSuffixMatchRuleVO?.suffixSeparationCharacter; // 文件名后缀分隔字符 // filelog[`step2_file_suffixMatchType_${index}`] = ele.fileNameSuffixMatchRuleVO?.suffixMatchType; // 采集文件后缀匹配 0固定格式匹配 1正则匹配 filelog[`step2_file_suffixLength_${index}`] = ele.fileNameSuffixMatchRuleVO?.suffixLength || '';// 选0出现采集文件后缀匹配 filelog[`step2_file_maxBytesPerLogEvent_${index}`] = setLimitUnit(ele?.maxBytesPerLogEvent, 2)?.maxBytesPerLogEvent;// 单条日志大小上限 filelog[`step2_file_flowunit_${index}`] = setLimitUnit(ele?.maxBytesPerLogEvent, 2)?.flowunit; // 单位 1024 KB filelog[`step2_file_sliceType_${index}`] = ele.logSliceRuleVO?.sliceType; // 日志切片规则 0时间戳 1正则匹配 return filelog; }) setFilePathList(filePathArr); setSlicingRuleLogList(slicingRuleLogArr); setSuffixfilesList(suffixLengthArr); let fileOth = {}; fileArr?.forEach(ele => { fileOth = { ...fileOth, ...ele }; }) const fileObj = {}; Object.assign(fileObj, logObj, fileBase, fileOth); return fileObj; } } const assignmentParameters = async (objs: ILogCollectTaskDetail) => { const obj_step1 = await setEditOneParams(objs); // step1 const obj_step2 = await setEditTwoParams(objs); // step2 const obj_step3 = await setEditThreeParams(objs); // step3 const obj_step4 = await setEditFourParams(objs); // step4 const params = {} as any; Object.assign(params, obj_step1, obj_step2, obj_step3, obj_step4); setFieldsValue(params); } useEffect(() => { if (editUrl) { getCollectDetail() } setHostNameList([]); // 清除映射主机列表,进入新增任务时不显示 setlogFilePathKey(0); // 清除保存日志路径的下标 return; }, []); return ( <Spin spinning={loading}> <div className="p-steps-form steps-form"> <Steps progressDot current={currentStep} onChange={onStepsChange}> {steps?.map(item => ( <Step key={item.title} title={item.title} /> ))} </Steps> <div className="p-steps-form-content"> {steps?.map((item, itemIdx) => { const itemCls = classNames( 'p-steps-form-content-item', currentStep === itemIdx && 'p-steps-form-content-item-active', ); return (<div key={`step-${itemIdx}`} className={itemCls}>{item.content}</div>); })} </div> <div className="p-steps-form-action p-steps-edit"> {!editUrl ? <> {currentStep > 0 && (<Button className='mr-10' onClick={handlePrev}>上一步</Button>)} {currentStep < steps.length - 1 && (<Button type="primary" onClick={handleNext}>下一步</Button>)} {currentStep === steps.length - 1 && (<Button type="primary" onClick={handleAddTaskSubmit}>完成</Button>)} <div className="edit-btns"> <Button><NavRouterLink element='取消' href="/collect" /></Button> </div> </> : <> {currentStep > 0 && (<Button className='mr-10' onClick={handlePrev}>上一步</Button>)} {currentStep < steps.length - 1 && (<Button type="primary" onClick={handleNext}>下一步</Button>)} {/* {currentStep === steps.length - 1 && (<Button type="primary" onClick={handleAddTaskSubmit}>完成</Button>)} */} <div className="edit-btns"> <Button type="primary" className='mr-10' onClick={handleEditTaskSubmit}>确认</Button> <Button><NavRouterLink element='取消' href="/collect" /></Button> </div> </>} </div> </div> </Spin> ); }; export default Form.create<FormComponentProps & IStepsFormProps>({})(connect(null, mapDispatchToProps)(StepsForm));
the_stack
function id(d: any[]): any { return d[0]; } const opExpr = (operator) => { return d => ({ operator: operator, left: d[0], right: d[2] }); } const notOp = (d) => { return { operator: 'NOT', operand: d[1] }; } const unquotedValue = (d, location, reject) => { let query = d.join(''); if (query === 'true') { query = true; } else if (query === 'false') { query = false; } else if (query === 'null') { query = null; } return { quoted: false, query, }; } const range = ( minInclusive, maxInclusive) => { return (d) => { return { range: { min: d[2], minInclusive, maxInclusive, max: d[6], } } }; } const field = d => { return { field: d[0], fieldPath: d[0].split('.').filter(Boolean), ...d[3] } }; interface NearleyToken { value: any; [key: string]: any; }; interface NearleyLexer { reset: (chunk: string, info: any) => void; next: () => NearleyToken | undefined; save: () => any; formatError: (token: never) => string; has: (tokenType: string) => boolean; }; interface NearleyRule { name: string; symbols: NearleySymbol[]; postprocess?: (d: any[], loc?: number, reject?: {}) => any; }; type NearleySymbol = string | { literal: any } | { test: (token: any) => boolean }; interface Grammar { Lexer: NearleyLexer | undefined; ParserRules: NearleyRule[]; ParserStart: string; }; const grammar: Grammar = { Lexer: undefined, ParserRules: [ {"name": "_$ebnf$1", "symbols": []}, {"name": "_$ebnf$1", "symbols": ["_$ebnf$1", "wschar"], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "_", "symbols": ["_$ebnf$1"], "postprocess": function(d) {return null;}}, {"name": "__$ebnf$1", "symbols": ["wschar"]}, {"name": "__$ebnf$1", "symbols": ["__$ebnf$1", "wschar"], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "__", "symbols": ["__$ebnf$1"], "postprocess": function(d) {return null;}}, {"name": "wschar", "symbols": [/[ \t\n\v\f]/], "postprocess": id}, {"name": "dqstring$ebnf$1", "symbols": []}, {"name": "dqstring$ebnf$1", "symbols": ["dqstring$ebnf$1", "dstrchar"], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "dqstring", "symbols": [{"literal":"\""}, "dqstring$ebnf$1", {"literal":"\""}], "postprocess": function(d) {return d[1].join(""); }}, {"name": "sqstring$ebnf$1", "symbols": []}, {"name": "sqstring$ebnf$1", "symbols": ["sqstring$ebnf$1", "sstrchar"], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "sqstring", "symbols": [{"literal":"'"}, "sqstring$ebnf$1", {"literal":"'"}], "postprocess": function(d) {return d[1].join(""); }}, {"name": "btstring$ebnf$1", "symbols": []}, {"name": "btstring$ebnf$1", "symbols": ["btstring$ebnf$1", /[^`]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "btstring", "symbols": [{"literal":"`"}, "btstring$ebnf$1", {"literal":"`"}], "postprocess": function(d) {return d[1].join(""); }}, {"name": "dstrchar", "symbols": [/[^\\"\n]/], "postprocess": id}, {"name": "dstrchar", "symbols": [{"literal":"\\"}, "strescape"], "postprocess": function(d) { return JSON.parse("\""+d.join("")+"\""); } }, {"name": "sstrchar", "symbols": [/[^\\'\n]/], "postprocess": id}, {"name": "sstrchar", "symbols": [{"literal":"\\"}, "strescape"], "postprocess": function(d) { return JSON.parse("\""+d.join("")+"\""); }}, {"name": "sstrchar$string$1", "symbols": [{"literal":"\\"}, {"literal":"'"}], "postprocess": (d) => d.join('')}, {"name": "sstrchar", "symbols": ["sstrchar$string$1"], "postprocess": function(d) {return "'"; }}, {"name": "strescape", "symbols": [/["\\/bfnrt]/], "postprocess": id}, {"name": "strescape", "symbols": [{"literal":"u"}, /[a-fA-F0-9]/, /[a-fA-F0-9]/, /[a-fA-F0-9]/, /[a-fA-F0-9]/], "postprocess": function(d) { return d.join(""); } }, {"name": "unsigned_int$ebnf$1", "symbols": [/[0-9]/]}, {"name": "unsigned_int$ebnf$1", "symbols": ["unsigned_int$ebnf$1", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "unsigned_int", "symbols": ["unsigned_int$ebnf$1"], "postprocess": function(d) { return parseInt(d[0].join("")); } }, {"name": "int$ebnf$1$subexpression$1", "symbols": [{"literal":"-"}]}, {"name": "int$ebnf$1$subexpression$1", "symbols": [{"literal":"+"}]}, {"name": "int$ebnf$1", "symbols": ["int$ebnf$1$subexpression$1"], "postprocess": id}, {"name": "int$ebnf$1", "symbols": [], "postprocess": () => null}, {"name": "int$ebnf$2", "symbols": [/[0-9]/]}, {"name": "int$ebnf$2", "symbols": ["int$ebnf$2", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "int", "symbols": ["int$ebnf$1", "int$ebnf$2"], "postprocess": function(d) { if (d[0]) { return parseInt(d[0][0]+d[1].join("")); } else { return parseInt(d[1].join("")); } } }, {"name": "unsigned_decimal$ebnf$1", "symbols": [/[0-9]/]}, {"name": "unsigned_decimal$ebnf$1", "symbols": ["unsigned_decimal$ebnf$1", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "unsigned_decimal$ebnf$2$subexpression$1$ebnf$1", "symbols": [/[0-9]/]}, {"name": "unsigned_decimal$ebnf$2$subexpression$1$ebnf$1", "symbols": ["unsigned_decimal$ebnf$2$subexpression$1$ebnf$1", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "unsigned_decimal$ebnf$2$subexpression$1", "symbols": [{"literal":"."}, "unsigned_decimal$ebnf$2$subexpression$1$ebnf$1"]}, {"name": "unsigned_decimal$ebnf$2", "symbols": ["unsigned_decimal$ebnf$2$subexpression$1"], "postprocess": id}, {"name": "unsigned_decimal$ebnf$2", "symbols": [], "postprocess": () => null}, {"name": "unsigned_decimal", "symbols": ["unsigned_decimal$ebnf$1", "unsigned_decimal$ebnf$2"], "postprocess": function(d) { return parseFloat( d[0].join("") + (d[1] ? "."+d[1][1].join("") : "") ); } }, {"name": "decimal$ebnf$1", "symbols": [{"literal":"-"}], "postprocess": id}, {"name": "decimal$ebnf$1", "symbols": [], "postprocess": () => null}, {"name": "decimal$ebnf$2", "symbols": [/[0-9]/]}, {"name": "decimal$ebnf$2", "symbols": ["decimal$ebnf$2", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "decimal$ebnf$3$subexpression$1$ebnf$1", "symbols": [/[0-9]/]}, {"name": "decimal$ebnf$3$subexpression$1$ebnf$1", "symbols": ["decimal$ebnf$3$subexpression$1$ebnf$1", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "decimal$ebnf$3$subexpression$1", "symbols": [{"literal":"."}, "decimal$ebnf$3$subexpression$1$ebnf$1"]}, {"name": "decimal$ebnf$3", "symbols": ["decimal$ebnf$3$subexpression$1"], "postprocess": id}, {"name": "decimal$ebnf$3", "symbols": [], "postprocess": () => null}, {"name": "decimal", "symbols": ["decimal$ebnf$1", "decimal$ebnf$2", "decimal$ebnf$3"], "postprocess": function(d) { return parseFloat( (d[0] || "") + d[1].join("") + (d[2] ? "."+d[2][1].join("") : "") ); } }, {"name": "percentage", "symbols": ["decimal", {"literal":"%"}], "postprocess": function(d) { return d[0]/100; } }, {"name": "jsonfloat$ebnf$1", "symbols": [{"literal":"-"}], "postprocess": id}, {"name": "jsonfloat$ebnf$1", "symbols": [], "postprocess": () => null}, {"name": "jsonfloat$ebnf$2", "symbols": [/[0-9]/]}, {"name": "jsonfloat$ebnf$2", "symbols": ["jsonfloat$ebnf$2", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "jsonfloat$ebnf$3$subexpression$1$ebnf$1", "symbols": [/[0-9]/]}, {"name": "jsonfloat$ebnf$3$subexpression$1$ebnf$1", "symbols": ["jsonfloat$ebnf$3$subexpression$1$ebnf$1", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "jsonfloat$ebnf$3$subexpression$1", "symbols": [{"literal":"."}, "jsonfloat$ebnf$3$subexpression$1$ebnf$1"]}, {"name": "jsonfloat$ebnf$3", "symbols": ["jsonfloat$ebnf$3$subexpression$1"], "postprocess": id}, {"name": "jsonfloat$ebnf$3", "symbols": [], "postprocess": () => null}, {"name": "jsonfloat$ebnf$4$subexpression$1$ebnf$1", "symbols": [/[+-]/], "postprocess": id}, {"name": "jsonfloat$ebnf$4$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null}, {"name": "jsonfloat$ebnf$4$subexpression$1$ebnf$2", "symbols": [/[0-9]/]}, {"name": "jsonfloat$ebnf$4$subexpression$1$ebnf$2", "symbols": ["jsonfloat$ebnf$4$subexpression$1$ebnf$2", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "jsonfloat$ebnf$4$subexpression$1", "symbols": [/[eE]/, "jsonfloat$ebnf$4$subexpression$1$ebnf$1", "jsonfloat$ebnf$4$subexpression$1$ebnf$2"]}, {"name": "jsonfloat$ebnf$4", "symbols": ["jsonfloat$ebnf$4$subexpression$1"], "postprocess": id}, {"name": "jsonfloat$ebnf$4", "symbols": [], "postprocess": () => null}, {"name": "jsonfloat", "symbols": ["jsonfloat$ebnf$1", "jsonfloat$ebnf$2", "jsonfloat$ebnf$3", "jsonfloat$ebnf$4"], "postprocess": function(d) { return parseFloat( (d[0] || "") + d[1].join("") + (d[2] ? "."+d[2][1].join("") : "") + (d[3] ? "e" + (d[3][1] || "+") + d[3][2].join("") : "") ); } }, {"name": "main", "symbols": ["expr"], "postprocess": id}, {"name": "expr", "symbols": ["two_op_expr"], "postprocess": id}, {"name": "two_op_expr$string$1", "symbols": [{"literal":"O"}, {"literal":"R"}], "postprocess": (d) => d.join('')}, {"name": "two_op_expr", "symbols": ["pre_two_op_expr", "two_op_expr$string$1", "post_one_op_expr"], "postprocess": opExpr('OR')}, {"name": "two_op_expr$string$2", "symbols": [{"literal":"A"}, {"literal":"N"}, {"literal":"D"}], "postprocess": (d) => d.join('')}, {"name": "two_op_expr", "symbols": ["pre_two_op_expr", "two_op_expr$string$2", "post_one_op_expr"], "postprocess": opExpr('AND')}, {"name": "two_op_expr", "symbols": ["one_op_expr"], "postprocess": d => d[0]}, {"name": "pre_two_op_expr", "symbols": ["two_op_expr", "__"], "postprocess": d => d[0]}, {"name": "pre_two_op_expr", "symbols": [{"literal":"("}, "_", "two_op_expr", "_", {"literal":")"}], "postprocess": d => d[2]}, {"name": "one_op_expr", "symbols": [{"literal":"("}, "_", "two_op_expr", "_", {"literal":")"}], "postprocess": d => d[2]}, {"name": "one_op_expr$string$1", "symbols": [{"literal":"N"}, {"literal":"O"}, {"literal":"T"}], "postprocess": (d) => d.join('')}, {"name": "one_op_expr", "symbols": ["one_op_expr$string$1", "post_boolean_primary"], "postprocess": notOp}, {"name": "one_op_expr", "symbols": ["boolean_primary"], "postprocess": d => d[0]}, {"name": "post_one_op_expr", "symbols": ["__", "one_op_expr"], "postprocess": d => d[1]}, {"name": "post_one_op_expr", "symbols": [{"literal":"("}, "_", "one_op_expr", "_", {"literal":")"}], "postprocess": d => d[2]}, {"name": "boolean_primary", "symbols": ["side"], "postprocess": id}, {"name": "post_boolean_primary", "symbols": [{"literal":"("}, "_", "boolean_primary", "_", {"literal":")"}], "postprocess": d => d[2]}, {"name": "post_boolean_primary", "symbols": ["__", "boolean_primary"], "postprocess": d => d[1]}, {"name": "side", "symbols": ["field", {"literal":":"}, "_", "query"], "postprocess": field}, {"name": "side", "symbols": ["query"], "postprocess": d => ({field: '<implicit>', ...d[0]})}, {"name": "field$ebnf$1", "symbols": []}, {"name": "field$ebnf$1", "symbols": ["field$ebnf$1", /[a-zA-Z\d_$.]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "field", "symbols": [/[_a-zA-Z$]/, "field$ebnf$1"], "postprocess": d => d[0] + d[1].join('')}, {"name": "field", "symbols": ["sqstring"], "postprocess": id}, {"name": "field", "symbols": ["dqstring"], "postprocess": id}, {"name": "query", "symbols": ["relational_operator", "_", "decimal"], "postprocess": d => ({quoted: false, query: d[2], relationalOperator: d[0][0]})}, {"name": "query", "symbols": ["decimal"], "postprocess": d => ({quoted: false, query: d.join('')})}, {"name": "query", "symbols": ["regex"], "postprocess": d => ({quoted: false, regex: true, query: d.join('')})}, {"name": "query", "symbols": ["range"], "postprocess": id}, {"name": "query", "symbols": ["unquoted_value"], "postprocess": unquotedValue}, {"name": "query", "symbols": ["sqstring"], "postprocess": d => ({quoted: true, query: d.join('')})}, {"name": "query", "symbols": ["dqstring"], "postprocess": d => ({quoted: true, query: d.join('')})}, {"name": "range$string$1", "symbols": [{"literal":"T"}, {"literal":"O"}], "postprocess": (d) => d.join('')}, {"name": "range", "symbols": [{"literal":"["}, "_", "decimal", "_", "range$string$1", "_", "decimal", "_", {"literal":"]"}], "postprocess": range(true, true)}, {"name": "range$string$2", "symbols": [{"literal":"T"}, {"literal":"O"}], "postprocess": (d) => d.join('')}, {"name": "range", "symbols": [{"literal":"{"}, "_", "decimal", "_", "range$string$2", "_", "decimal", "_", {"literal":"]"}], "postprocess": range(false, true)}, {"name": "range$string$3", "symbols": [{"literal":"T"}, {"literal":"O"}], "postprocess": (d) => d.join('')}, {"name": "range", "symbols": [{"literal":"["}, "_", "decimal", "_", "range$string$3", "_", "decimal", "_", {"literal":"}"}], "postprocess": range(true, false)}, {"name": "range$string$4", "symbols": [{"literal":"T"}, {"literal":"O"}], "postprocess": (d) => d.join('')}, {"name": "range", "symbols": [{"literal":"{"}, "_", "decimal", "_", "range$string$4", "_", "decimal", "_", {"literal":"}"}], "postprocess": range(false, false)}, {"name": "relational_operator", "symbols": [{"literal":"="}]}, {"name": "relational_operator", "symbols": [{"literal":">"}]}, {"name": "relational_operator", "symbols": [{"literal":"<"}]}, {"name": "relational_operator$string$1", "symbols": [{"literal":">"}, {"literal":"="}], "postprocess": (d) => d.join('')}, {"name": "relational_operator", "symbols": ["relational_operator$string$1"]}, {"name": "relational_operator$string$2", "symbols": [{"literal":"<"}, {"literal":"="}], "postprocess": (d) => d.join('')}, {"name": "relational_operator", "symbols": ["relational_operator$string$2"]}, {"name": "regex", "symbols": ["regex_body", "regex_flags"], "postprocess": d => d.join('')}, {"name": "regex_body$ebnf$1", "symbols": []}, {"name": "regex_body$ebnf$1", "symbols": ["regex_body$ebnf$1", "regex_body_char"], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "regex_body", "symbols": [{"literal":"/"}, "regex_body$ebnf$1", {"literal":"/"}], "postprocess": d => '/' + d[1].join('') + '/'}, {"name": "regex_body_char", "symbols": [/[^\\]/], "postprocess": id}, {"name": "regex_body_char", "symbols": [{"literal":"\\"}, /[^\\]/], "postprocess": d => '\\' + d[1]}, {"name": "regex_flags", "symbols": []}, {"name": "regex_flags$ebnf$1", "symbols": [/[gmiyusd]/]}, {"name": "regex_flags$ebnf$1", "symbols": ["regex_flags$ebnf$1", /[gmiyusd]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "regex_flags", "symbols": ["regex_flags$ebnf$1"], "postprocess": d => d[0].join('')}, {"name": "unquoted_value$ebnf$1", "symbols": [/[a-zA-Z\-_*]/]}, {"name": "unquoted_value$ebnf$1", "symbols": ["unquoted_value$ebnf$1", /[a-zA-Z\-_*]/], "postprocess": (d) => d[0].concat([d[1]])}, {"name": "unquoted_value", "symbols": ["unquoted_value$ebnf$1"], "postprocess": d => d[0].join('')} ], ParserStart: "main", }; export default grammar;
the_stack
import { derived, get, Readable, writable } from 'svelte/store' import { persistent } from '../helpers' import { networkStatus } from '../networkStatus' import { NodePlugin } from '../typings/node' import { MILLISECONDS_PER_SECOND, SECONDS_PER_MILESTONE } from '../time' import { wallet } from '../wallet' import type { WalletAccount } from '../typings/wallet' import { ASSEMBLY_EVENT_ID, SHIMMER_EVENT_ID, STAKING_EVENT_IDS } from './constants' import { AccountParticipationOverview, ParticipateResponsePayload, ParticipationAction, ParticipationEvent, ParticipationEventState, ParticipationOverview, PendingParticipation, StakingAirdrop, } from './types' /** * The store for keeping track of pending participations. */ export const pendingParticipations = writable<PendingParticipation[]>([]) /** * The store for an account that is selected to participate in an event. This is * mostly useful for showing background participation progress, otherwise it can * just be shown within a designated component (i.e. popup or dashboard tile). * * If this store is empty (e.g. undefined or null), then there is NOT an account * currently trying to participate (or stop) in an event. */ export const accountToParticipate = writable<WalletAccount>(null) /** * The store for the participation action to perform for the "accountToParticipate". Similar * to the "accountToParticipate", this is mostly useful for showing background participation * progress. * * If this store is empty (e.g. undefined or null), then there is NOT an account * currently trying to participate (or stop) in an event. */ export const participationAction = writable<ParticipationAction>(null) /** * The overview / statistics about participation. See #AccountParticipationOverview for more details. */ export const participationOverview = writable<ParticipationOverview>([]) /** * Whether the user is currently staking or unstaking */ export const isPerformingParticipation = writable<boolean>(false) /** * The store for accounts that are currently staked. This is NOT to hold accounts * that have been selected for staking / unstaking or have staked in the past. * * This is updated regularly by the polling * in `wallet.rs`. */ export const stakedAccounts: Readable<WalletAccount[]> = derived( [participationOverview], ([$participationOverview]) => { const activeAccountIndices = $participationOverview .filter((overview) => overview.shimmerStakedFunds > 0 || overview.assemblyStakedFunds > 0) // .filter((overview) => overview.participations.length > 0) .map((overview) => overview.accountIndex) /** * CAUTION: Ideally the accounts Svelte store would * be derived, but doing so results in a "cannot * access _ before initialization" error. */ const accounts = get(wallet).accounts if (!get(accounts)) return [] else return get(accounts).filter((wa) => activeAccountIndices.includes(wa.index)) } ) /** * The amount of funds that are currently staked. This amount may differ * between airdrops, so we pick the highest number (this is only possible * because the same funds may be staked for both airdrops). */ export const stakedAmount: Readable<number> = derived(participationOverview, (overview) => { return overview.reduce((total, accountOverview) => { const { shimmerStakedFunds, assemblyStakedFunds } = accountOverview if (shimmerStakedFunds > 0 && assemblyStakedFunds > 0) { total += Math.max(shimmerStakedFunds, assemblyStakedFunds) } else { total += shimmerStakedFunds total += assemblyStakedFunds } return total }, 0) }) /** * The amount of funds that are currently unstaked. This amount may differ * between airdrops, so we pick the lowest number (this is only possible * because the same funds may be staked for both airdrops). */ export const unstakedAmount: Readable<number> = derived(participationOverview, (overview) => { return overview.reduce((total, accountOverview) => { const { shimmerUnstakedFunds, assemblyUnstakedFunds } = accountOverview total += Math.min(shimmerUnstakedFunds, assemblyUnstakedFunds) return total }, 0) }) /** * The store for accounts that contain partially staked funds. * * Accounts are added if upon receiving a new transaction they * are currently staked (checks stakedAccounts). Accounts are removed * within the staking flow. */ export const partiallyStakedAccounts: Readable<WalletAccount[]> = derived( [participationOverview], ([$participationOverview]) => $participationOverview .filter( (apo) => (apo.assemblyStakedFunds > 0 && apo.assemblyUnstakedFunds > 0) || (apo.shimmerStakedFunds > 0 && apo.shimmerUnstakedFunds > 0) ) .map((apo) => get(get(wallet).accounts).find((wa) => wa.index === apo.accountIndex)) ) /** * The store for the total amount of funds that are partially (un)staked for * all accounts. */ export const partiallyUnstakedAmount: Readable<number> = derived( [participationOverview, partiallyStakedAccounts], ([$participationOverview, $partiallyStakedAccounts]) => { if ($partiallyStakedAccounts.length <= 0) return 0 const _eval = (overview: AccountParticipationOverview): number => { const assemblyPartialFunds = overview?.assemblyStakedFunds > 0 && overview?.assemblyUnstakedFunds > 0 ? overview?.assemblyUnstakedFunds : 0 const shimmerPartialFunds = overview?.shimmerStakedFunds > 0 && overview?.shimmerUnstakedFunds > 0 ? overview?.shimmerUnstakedFunds : 0 return Math.max(assemblyPartialFunds, shimmerPartialFunds) } return $partiallyStakedAccounts .map((psa) => $participationOverview.find((apo) => apo.accountIndex === psa.index)) .reduce((total, apo) => total + _eval(apo), 0) } ) const sumStakingRewards = (airdrop: StakingAirdrop, overview: ParticipationOverview): number => { if (!overview || overview?.length < 1) return 0 const rewardsKey = `${airdrop}Rewards` const rewardsBelowMinimumKey = `${airdrop}RewardsBelowMinimum` const rewards = overview.reduce( (total, accountOverview) => total + (rewardsKey in accountOverview ? accountOverview[rewardsKey] : 0), 0 ) const rewardsBelowMinimum = overview.reduce( (total, accountOverview) => total + (rewardsBelowMinimumKey in accountOverview ? accountOverview[rewardsBelowMinimumKey] : 0), 0 ) if (rewards <= 0) { return rewardsBelowMinimum } else { /** * NOTE: We return the sum of rewards and rewardsBelowMinimum here because it is possible that an * account has accumulated more than min rewards for an airdrop, but has unstaked and moved the funds * to another address that has NOT reach the minimum. */ return rewards + rewardsBelowMinimum } } /** * The total accumulated Assembly rewards for all * accounts that have been staked at some point (even * if they are currently unstaked). * * Be cautious that this value is in microASMB, so it is likely to be larger. */ export const assemblyStakingRewards: Readable<number> = derived(participationOverview, (overview) => sumStakingRewards(StakingAirdrop.Assembly, overview) ) /** * The total accumulated Shimmer rewards for all * accounts that have been staked at some point (even * if they are currently unstaked). */ export const shimmerStakingRewards: Readable<number> = derived(participationOverview, (overview) => sumStakingRewards(StakingAirdrop.Shimmer, overview) ) /** * The available participation events (staking AND voting). */ export const participationEvents = writable<ParticipationEvent[]>([]) /** * The status of the staking event, calculated from the milestone information. */ export const stakingEventState: Readable<ParticipationEventState> = derived( [networkStatus, participationEvents], ([$networkStatus, $participationEvents]) => { const stakingEvent = $participationEvents.filter((pe) => STAKING_EVENT_IDS.includes(pe.eventId))[0] if (!stakingEvent || !$networkStatus.nodePlugins.includes(NodePlugin.Participation)) { return ParticipationEventState.Inactive } const { milestoneIndexCommence, milestoneIndexStart, milestoneIndexEnd } = stakingEvent?.information const currentMilestone = $networkStatus?.currentMilestone if (currentMilestone < milestoneIndexCommence) { return ParticipationEventState.Upcoming } else if (currentMilestone < milestoneIndexStart) { return ParticipationEventState.Commencing } else if (currentMilestone < milestoneIndexEnd) { return ParticipationEventState.Holding } else { return ParticipationEventState.Ended } } ) export const calculateRemainingStakingTime = (currentMilestone: number, stakingEvent: ParticipationEvent): number => { if (!stakingEvent) return 0 const commenceMilestone = stakingEvent?.information?.milestoneIndexCommence const startMilestone = stakingEvent?.information?.milestoneIndexStart const endMilestone = stakingEvent?.information?.milestoneIndexEnd const _calculateRemainingTime = (firstMilestone: number, secondMilestone: number): number => Math.abs(secondMilestone - firstMilestone) * SECONDS_PER_MILESTONE * MILLISECONDS_PER_SECOND if (currentMilestone < commenceMilestone) { return _calculateRemainingTime(currentMilestone, commenceMilestone) } else if (currentMilestone < startMilestone) { return _calculateRemainingTime(currentMilestone, startMilestone) } else if (currentMilestone < endMilestone) { return _calculateRemainingTime(currentMilestone, endMilestone) } else { return 0 } } /** * The remaining time until the Assembly staking event ends (in milliseconds). */ export const assemblyStakingRemainingTime: Readable<number> = derived( [networkStatus, participationEvents], ([$networkStatus, $participationEvents]) => calculateRemainingStakingTime( $networkStatus?.currentMilestone, $participationEvents.find((pe) => pe.eventId === ASSEMBLY_EVENT_ID) ) ) /** * The remaining time until the Shimmer staking event ends (in milliseconds). */ export const shimmerStakingRemainingTime: Readable<number> = derived( [networkStatus, participationEvents], ([$networkStatus, $participationEvents]) => calculateRemainingStakingTime( $networkStatus?.currentMilestone, $participationEvents.find((pe) => pe.eventId === SHIMMER_EVENT_ID) ) ) /** * Adds newly broadcasted (yet unconfirmed) participations * * @method addNewPendingParticipation * * @param {ParticipateResponsePayload} payload * @param {string} accountId * @param {ParticipationAction} action * * @returns {void} */ export const addNewPendingParticipation = ( payload: ParticipateResponsePayload, accountId: string, action: ParticipationAction ): void => { const _pendingParticipation = { accountId, action, } pendingParticipations.update((participations) => [ ...participations, ...payload.map((tx) => Object.assign({}, _pendingParticipation, { messageId: tx.id })), ]) } /** * Removes pending participation (after it has confirmed) * * @method removePendingParticipations * * @param {string[]} ids * * @returns {void} */ export const removePendingParticipations = (ids: string[]): void => { pendingParticipations.update((participations) => participations.filter((participation) => !ids.includes(participation.messageId)) ) } /** * Determines if has a pending participation * * @method hasPendingParticipation * * @param {string} id * * @returns {boolean} */ export const hasPendingParticipation = (id: string): boolean => get(pendingParticipations).some((participation) => participation.messageId === id) /** * Gets a pending participation * * @method getPendingParticipation * * @param {string} id * * @returns {PendingParticipation | undefined} */ export const getPendingParticipation = (id: string): PendingParticipation | undefined => get(pendingParticipations).find((participation) => participation.messageId === id)
the_stack
import {CustomMarginsOrientation, Margins, MarginsSetting, MarginsType, MeasurementSystem, MeasurementSystemUnitType, PrintPreviewMarginControlContainerElement, PrintPreviewMarginControlElement, PrintPreviewModelElement, Size, State} from 'chrome://print/print_preview.js'; import {assert, assertNotReached} from 'chrome://resources/js/assert.m.js'; import {flush} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js'; import {assertEquals, assertFalse, assertTrue} from 'chrome://webui-test/chai_assert.js'; import {eventToPromise, fakeDataBind} from 'chrome://webui-test/test_util.js'; const custom_margins_test = { suiteName: 'CustomMarginsTest', TestNames: { ControlsCheck: 'controls check', SetFromStickySettings: 'set from sticky settings', DragControls: 'drag controls', SetControlsWithTextbox: 'set controls with textbox', SetControlsWithTextboxMetric: 'set controls with textbox metric', RestoreStickyMarginsAfterDefault: 'restore sticky margins after default', MediaSizeClearsCustomMargins: 'media size clears custom margins', LayoutClearsCustomMargins: 'layout clears custom margins', IgnoreDocumentMarginsFromPDF: 'ignore document margins from pdf', MediaSizeClearsCustomMarginsPDF: 'media size clears custom margins pdf', RequestScrollToOutOfBoundsTextbox: 'request scroll to out of bounds textbox', ControlsDisabledOnError: 'controls disabled on error', }, }; Object.assign(window, {custom_margins_test: custom_margins_test}); suite(custom_margins_test.suiteName, function() { let container: PrintPreviewMarginControlContainerElement; let model: PrintPreviewModelElement; let sides: CustomMarginsOrientation[] = []; let measurementSystem: MeasurementSystem; const pixelsPerInch: number = 100; const pointsPerInch: number = 72.0; const defaultMarginPts: number = 36; // 0.5 inch // Keys for the custom margins setting, in order. const keys: string[] = ['marginTop', 'marginRight', 'marginBottom', 'marginLeft']; setup(function() { document.body.innerHTML = ''; measurementSystem = new MeasurementSystem(',', '.', MeasurementSystemUnitType.IMPERIAL); model = document.createElement('print-preview-model'); document.body.appendChild(model); model.set('settings.mediaSize.available', true); sides = [ CustomMarginsOrientation.TOP, CustomMarginsOrientation.RIGHT, CustomMarginsOrientation.BOTTOM, CustomMarginsOrientation.LEFT ]; container = document.createElement('print-preview-margin-control-container'); container.previewLoaded = false; // 8.5 x 11, in points container.pageSize = new Size(612, 794); container.documentMargins = new Margins( defaultMarginPts, defaultMarginPts, defaultMarginPts, defaultMarginPts); container.state = State.NOT_READY; }); function getControls(): PrintPreviewMarginControlElement[] { return Array.from( container.shadowRoot!.querySelectorAll('print-preview-margin-control')); } /* * Completes setup of the test by setting the settings and adding the * container to the document body. * @return {!Promise} Promise that resolves when all controls have been * added and initialization is complete. */ function finishSetup() { // Wait for the control elements to be created before updating the state. container.measurementSystem = measurementSystem; document.body.appendChild(container); const controlsAdded = eventToPromise('dom-change', container); return controlsAdded.then(() => { // 8.5 x 11, in pixels const controls = getControls(); assertEquals(4, controls.length); container.settings = model.settings; fakeDataBind(model, container, 'settings'); container.state = State.READY; container.updateClippingMask(new Size(850, 1100)); container.updateScaleTransform(pixelsPerInch / pointsPerInch); container.previewLoaded = true; flush(); }); } /** * @return Promise that resolves when transitionend has fired * for all of the controls. */ function getAllTransitions(controls: PrintPreviewMarginControlElement[]): Promise<any[]> { return Promise.all( controls.map(control => eventToPromise('transitionend', control))); } /** * Simulates dragging the margin control. * @param control The control to move. * @param start The starting position for the control in pixels. * @param end The ending position for the control in pixels. */ function dragControl( control: PrintPreviewMarginControlElement, start: number, end: number) { if (window.getComputedStyle(control).getPropertyValue('pointer-events') === 'none') { return; } let xStart = 0; let yStart = 0; let xEnd = 0; let yEnd = 0; switch (control.side) { case CustomMarginsOrientation.TOP: yStart = start; yEnd = end; break; case CustomMarginsOrientation.RIGHT: xStart = control.clipSize!.width - start; xEnd = control.clipSize!.width - end; break; case CustomMarginsOrientation.BOTTOM: yStart = control.clipSize!.height - start; yEnd = control.clipSize!.height - end; break; case CustomMarginsOrientation.LEFT: xStart = start; xEnd = end; break; default: assertNotReached(); } // Simulate events in the same order they are fired by the browser. // Need to provide a valid |pointerId| for setPointerCapture() to not // throw an error. control.dispatchEvent(new PointerEvent( 'pointerdown', {pointerId: 1, clientX: xStart, clientY: yStart})); control.dispatchEvent(new PointerEvent( 'pointermove', {pointerId: 1, clientX: xEnd, clientY: yEnd})); control.dispatchEvent(new PointerEvent( 'pointerup', {pointerId: 1, clientX: xEnd, clientY: yEnd})); } /** * Tests setting the margin control with its textbox. * @param control The control. * @param key The control's key in the custom margin setting. * @param currentValuePts The current margin value in points. * @param input The new textbox input for the margin. * @param invalid Whether the new value is invalid. * @param newValuePts the new margin value in pts. If not * specified, computes the value assuming it is in bounds and assuming * the default measurement system. * @return Promise that resolves when the test is complete. */ function testControlTextbox( control: PrintPreviewMarginControlElement, key: string, currentValuePts: number, input: string, invalid: boolean, newValuePts?: number): Promise<void> { if (newValuePts === undefined) { newValuePts = invalid ? currentValuePts : Math.round(parseFloat(input) * pointsPerInch); } assertEquals( currentValuePts, container.getSettingValue('customMargins')[key]); const controlTextbox = control.$.input; controlTextbox.value = input; controlTextbox.dispatchEvent( new CustomEvent('input', {composed: true, bubbles: true})); if (!invalid) { return eventToPromise('text-change', control).then(() => { assertEquals( newValuePts, container.getSettingValue('customMargins')[key]); assertFalse(control.invalid); }); } else { return eventToPromise('input-change', control).then(() => { assertTrue(control.invalid); }); } } /* * Initializes the settings custom margins to some test values, and returns * a map with the values. */ function setupCustomMargins(): Map<CustomMarginsOrientation, number> { const orientationEnum = CustomMarginsOrientation; const marginValues = new Map([ [orientationEnum.TOP, 72], [orientationEnum.RIGHT, 36], [orientationEnum.BOTTOM, 108], [orientationEnum.LEFT, 18] ]); model.settings.customMargins.value = { marginTop: marginValues.get(orientationEnum.TOP), marginRight: marginValues.get(orientationEnum.RIGHT), marginBottom: marginValues.get(orientationEnum.BOTTOM), marginLeft: marginValues.get(orientationEnum.LEFT), }; return marginValues; } /* * Tests that the custom margins and margin value are cleared when the * setting |settingName| is set to have value |newValue|. * @param settingName The name of the setting to check. * @param newValue The value to set the setting to. * @return Promise that resolves when the check is complete. */ function validateMarginsClearedForSetting( settingName: string, newValue: any) { const marginValues = setupCustomMargins(); return finishSetup().then(() => { // Simulate setting custom margins. model.set('settings.margins.value', MarginsType.CUSTOM); // Validate control positions are set based on the custom values. const controls = getControls(); controls.forEach((control, index) => { const side = sides[index]!; assertEquals(side, control.side); assertEquals(marginValues.get(side), control.getPositionInPts()); }); // Simulate setting the media size. container.setSetting(settingName, newValue); container.previewLoaded = false; // Custom margins values should be cleared. assertEquals( '{}', JSON.stringify(container.getSettingValue('customMargins'))); // The margins-settings element will also set the margins type to DEFAULT. model.set('settings.margins.value', MarginsType.DEFAULT); // When preview loads, custom margins should still be empty, since // custom margins are not selected. We do not want to set the sticky // values until the user has selected custom margins. container.previewLoaded = true; assertEquals( '{}', JSON.stringify(container.getSettingValue('customMargins'))); }); } // Test that controls correctly appear when custom margins are selected and // disappear when the preview is loading. test(assert(custom_margins_test.TestNames.ControlsCheck), function() { const getCustomMarginsValue = function(): MarginsSetting { return container.getSettingValue('customMargins') as MarginsSetting; }; return finishSetup() .then(() => { const controls = getControls(); assertEquals(4, controls.length); // Controls are not visible when margin type DEFAULT is selected. controls.forEach(control => { assertEquals('0', window.getComputedStyle(control).opacity); }); const onTransitionEnd = getAllTransitions(controls); // Controls become visible when margin type CUSTOM is selected. model.set('settings.margins.value', MarginsType.CUSTOM); // Wait for the opacity transitions to finish. return onTransitionEnd; }) .then(function() { // Verify margins are correctly set based on previous value. assertEquals(defaultMarginPts, getCustomMarginsValue().marginTop); assertEquals(defaultMarginPts, getCustomMarginsValue().marginLeft); assertEquals(defaultMarginPts, getCustomMarginsValue().marginRight); assertEquals(defaultMarginPts, getCustomMarginsValue().marginBottom); // Verify there is one control for each side and that controls are // visible and positioned correctly. const controls = getControls(); controls.forEach((control, index) => { assertFalse(control.invisible); assertFalse(control.disabled); assertEquals('1', window.getComputedStyle(control).opacity); assertEquals(sides[index], control.side); assertEquals(defaultMarginPts, control.getPositionInPts()); }); const onTransitionEnd = getAllTransitions(controls); // Disappears when preview is loading or an error message is shown. // Check that all the controls also disappear. container.previewLoaded = false; // Wait for the opacity transitions to finish. return onTransitionEnd; }) .then(function() { const controls = getControls(); controls.forEach(control => { assertEquals('0', window.getComputedStyle(control).opacity); assertTrue(control.invisible); assertTrue(control.disabled); }); }); }); // Tests that the margin controls can be correctly set from the sticky // settings. test(assert(custom_margins_test.TestNames.SetFromStickySettings), function() { return finishSetup().then(() => { const controls = getControls(); // Simulate setting custom margins from sticky settings. model.set('settings.margins.value', MarginsType.CUSTOM); const marginValues = setupCustomMargins(); model.notifyPath('settings.customMargins.value'); flush(); // Validate control positions have been updated. controls.forEach((control, index) => { const side = sides[index]!; assertEquals(side, control.side); assertEquals(marginValues.get(side), control.getPositionInPts()); }); }); }); // Test that dragging margin controls updates the custom margins setting. test(assert(custom_margins_test.TestNames.DragControls), function() { /** * Tests that the control can be moved from its current position (assumed * to be the default margins) to newPositionInPts by dragging it. * @param {!PrintPreviewMarginControlElement} control The control to test. * @param {number} index The index of this control in the container's list * of controls. * @param {number} newPositionInPts The new position in points. */ const testControl = function( control: PrintPreviewMarginControlElement, index: number, newPositionInPts: number): Promise<void> { const oldValue = container.getSettingValue('customMargins') as {[k: string]: number}; assertEquals(defaultMarginPts, oldValue[keys[index]!]); // Compute positions in pixels. const oldPositionInPixels = defaultMarginPts * pixelsPerInch / pointsPerInch; const newPositionInPixels = newPositionInPts * pixelsPerInch / pointsPerInch; const whenDragChanged = eventToPromise('margin-drag-changed', container); dragControl(control, oldPositionInPixels, newPositionInPixels); return whenDragChanged.then(function() { const newValue = container.getSettingValue('customMargins'); assertEquals(newPositionInPts, newValue[keys[index]!]); }); }; return finishSetup().then(() => { const controls = getControls(); model.set('settings.margins.value', MarginsType.CUSTOM); flush(); // Wait for an animation frame. The position of the controls is set in // an animation frame, and this needs to be initialized before dragging // the control so that the computation of the new location is performed // with the correct initial margin offset. // Set all controls to 108 = 1.5" in points. window.requestAnimationFrame(function() { return testControl(controls[0]!, 0, 108) .then(() => testControl(controls[1]!, 1, 108)) .then(() => testControl(controls[2]!, 2, 108)) .then(() => testControl(controls[3]!, 3, 108)); }); }); }); /** * @param currentValue Current margin value in pts * @param input String to set in margin textboxes * @param invalid Whether the string is invalid * @param newValuePts the new margin value in pts. If not * specified, computes the value assuming it is in bounds and assuming * the default measurement system. * @return Promise that resolves when all controls have been * tested. */ function testAllTextboxes( controls: PrintPreviewMarginControlElement[], currentValue: number, input: string, invalid: boolean, newValuePts?: number): Promise<void> { return testControlTextbox( controls[0]!, keys[0]!, currentValue, input, invalid, newValuePts) .then( () => testControlTextbox( controls[1]!, keys[1]!, currentValue, input, invalid, newValuePts)) .then( () => testControlTextbox( controls[2]!, keys[2]!, currentValue, input, invalid, newValuePts)) .then( () => testControlTextbox( controls[3]!, keys[3]!, currentValue, input, invalid, newValuePts)); } // Test that setting the margin controls with their textbox inputs updates // the custom margins setting. test( assert(custom_margins_test.TestNames.SetControlsWithTextbox), function() { return finishSetup().then(() => { const controls = getControls(); // Set a shorter delay for testing so the test doesn't take too // long. controls.forEach(c => { c.getInput().setAttribute('data-timeout-delay', '1'); }); model.set('settings.margins.value', MarginsType.CUSTOM); flush(); // Verify entering a new value updates the settings. // Then verify entering an invalid value invalidates the control // and does not update the settings. const value1 = '1.75'; // 1.75 inches const newMargin1 = Math.round(parseFloat(value1) * pointsPerInch); const value2 = '.6'; const newMargin2 = Math.round(parseFloat(value2) * pointsPerInch); const value3 = '2'; // 2 inches const newMargin3 = Math.round(parseFloat(value3) * pointsPerInch); const maxTopMargin = container.pageSize.height - newMargin3 - 72 /* MINIMUM_DISTANCE, see margin_control.js */; return testAllTextboxes(controls, defaultMarginPts, value1, false) .then(() => testAllTextboxes(controls, newMargin1, 'abc', true)) .then( () => testAllTextboxes(controls, newMargin1, '1.2abc', true)) .then( () => testAllTextboxes(controls, newMargin1, '1. 2', true)) .then(() => testAllTextboxes(controls, newMargin1, '.', true)) .then(() => testAllTextboxes(controls, newMargin1, value2, false)) .then(() => testAllTextboxes(controls, newMargin2, value3, false)) .then( () => testControlTextbox( controls[0]!, keys[0]!, newMargin3, '100', false, maxTopMargin)) .then( () => testControlTextbox( controls[0]!, keys[0]!, maxTopMargin, '1,000', false, maxTopMargin)); }); }); // Test that setting the margin controls with their textbox inputs updates // the custom margins setting, using a metric measurement system with a ',' // as the decimal delimiter and '.' as the thousands delimiter. Regression // test for https://crbug.com/1005816. test( assert(custom_margins_test.TestNames.SetControlsWithTextboxMetric), function() { measurementSystem = new MeasurementSystem('.', ',', MeasurementSystemUnitType.METRIC); return finishSetup().then(() => { const controls = getControls(); // Set a shorter delay for testing so the test doesn't take too // long. controls.forEach(c => { c.getInput().setAttribute('data-timeout-delay', '1'); }); model.set('settings.margins.value', MarginsType.CUSTOM); flush(); // Verify entering a new value updates the settings. // Then verify entering an invalid value invalidates the control // and does not update the settings. const pointsPerMM = pointsPerInch / 25.4; const newMargin1 = '50,0'; const newMargin1Pts = Math.round(50 * pointsPerMM); const newMargin2 = ',9'; const newMargin2Pts = Math.round(.9 * pointsPerMM); const newMargin3 = '60'; const newMargin3Pts = Math.round(60 * pointsPerMM); const maxTopMargin = container.pageSize.height - newMargin3Pts - 72 /* MINIMUM_DISTANCE, see margin_control.js */; return testAllTextboxes( controls, defaultMarginPts, newMargin1, false, newMargin1Pts) .then( () => testAllTextboxes( controls, newMargin1Pts, 'abc', true, newMargin1Pts)) .then( () => testAllTextboxes( controls, newMargin1Pts, '50,2abc', true, newMargin1Pts)) .then( () => testAllTextboxes( controls, newMargin1Pts, '10, 2', true, newMargin1Pts)) .then( () => testAllTextboxes( controls, newMargin1Pts, ',', true, newMargin1Pts)) .then( () => testAllTextboxes( controls, newMargin1Pts, newMargin2, false, newMargin2Pts)) .then( () => testAllTextboxes( controls, newMargin2Pts, newMargin3, false, newMargin3Pts)) .then( () => testControlTextbox( controls[0]!, keys[0]!, newMargin3Pts, '1.000.000', false, maxTopMargin)) .then( () => testControlTextbox( controls[0]!, keys[0]!, maxTopMargin, '1.000', false, maxTopMargin)); }); }); // Test that if there is a custom margins sticky setting, it is restored // when margin setting changes. test( assert(custom_margins_test.TestNames.RestoreStickyMarginsAfterDefault), function() { const marginValues = setupCustomMargins(); return finishSetup().then(() => { // Simulate setting custom margins. const controls = getControls(); model.set('settings.margins.value', MarginsType.CUSTOM); // Validate control positions are set based on the custom values. controls.forEach((control, index) => { const side = sides[index]!; assertEquals(side, control.side); assertEquals(marginValues.get(side), control.getPositionInPts()); }); // Simulate setting minimum margins. model.set('settings.margins.value', MarginsType.MINIMUM); // Validate control positions still reflect the custom values. controls.forEach((control, index) => { const side = sides[index]!; assertEquals(side, control.side); assertEquals(marginValues.get(side), control.getPositionInPts()); }); }); }); // Test that if the media size changes, the custom margins are cleared. test( assert(custom_margins_test.TestNames.MediaSizeClearsCustomMargins), function() { return validateMarginsClearedForSetting( 'mediaSize', {height_microns: 200000, width_microns: 200000}) .then(() => { // Simulate setting custom margins again. model.set('settings.margins.value', MarginsType.CUSTOM); // Validate control positions are initialized based on the default // values. const controls = getControls(); controls.forEach((control, index) => { const side = sides[index]; assertEquals(side, control.side); assertEquals(defaultMarginPts, control.getPositionInPts()); }); }); }); // Test that if the orientation changes, the custom margins are cleared. test( assert(custom_margins_test.TestNames.LayoutClearsCustomMargins), function() { return validateMarginsClearedForSetting('layout', true).then(() => { // Simulate setting custom margins again model.set('settings.margins.value', MarginsType.CUSTOM); // Validate control positions are initialized based on the default // values. const controls = getControls(); controls.forEach((control, index) => { const side = sides[index]; assertEquals(side, control.side); assertEquals(defaultMarginPts, control.getPositionInPts()); }); }); }); // Test that if the margins are not available, the custom margins setting is // not updated based on the document margins - i.e. PDFs do not change the // custom margins state. test( assert(custom_margins_test.TestNames.IgnoreDocumentMarginsFromPDF), function() { model.set('settings.margins.available', false); return finishSetup().then(() => { assertEquals( '{}', JSON.stringify(container.getSettingValue('customMargins'))); }); }); // Test that if margins are not available but the user changes the media // size, the custom margins are cleared. test( assert(custom_margins_test.TestNames.MediaSizeClearsCustomMarginsPDF), function() { model.set('settings.margins.available', false); return validateMarginsClearedForSetting( 'mediaSize', {height_microns: 200000, width_microns: 200000}); }); function whenAnimationFrameDone() { return new Promise(resolve => window.requestAnimationFrame(resolve)); } // Test that if the user focuses a textbox that is not visible, the // text-focus event is fired with the correct values to scroll by. test( assert(custom_margins_test.TestNames.RequestScrollToOutOfBoundsTextbox), function() { return finishSetup() .then(() => { // Wait for the controls to be set up, which occurs in an // animation frame. return whenAnimationFrameDone(); }) .then(() => { const onTransitionEnd = getAllTransitions(getControls()); // Controls become visible when margin type CUSTOM is selected. model.set('settings.margins.value', MarginsType.CUSTOM); container.notifyPath('settings.customMargins.value'); flush(); return onTransitionEnd; }) .then(() => { // Zoom in by 2x, so that some margin controls will not be // visible. container.updateScaleTransform(pixelsPerInch * 2 / pointsPerInch); flush(); return whenAnimationFrameDone(); }) .then(() => { const controls = getControls(); assertEquals(4, controls.length); // Focus the bottom control, which is currently not visible since // the viewer is showing only the top left quarter of the page. const bottomControl = controls[2]!; const whenEventFired = eventToPromise('text-focus-position', container); bottomControl.$.input.focus(); // Workaround for mac so that this does not need to be an // interactive test: manually fire the focus event from the // control. bottomControl.dispatchEvent(new CustomEvent( 'text-focus', {bubbles: true, composed: true})); return whenEventFired; }) .then((args) => { // Shifts left by padding of 50px to ensure that the full textbox // is visible. assertEquals(50, args.detail.x); // Offset top will be 2097 = 200 px/in / 72 pts/in * (794pts - // 36ptx) - 9px radius of line // Height of the clip box is 200 px/in * 11in = 2200px // Shifts down by offsetTop = 2097 - height / 2 + padding = // 1047px. This will ensure that the textbox is in the visible // area. assertEquals(1047, args.detail.y); }); }); // Tests that the margin controls can be correctly set from the sticky // settings. test( assert(custom_margins_test.TestNames.ControlsDisabledOnError), function() { return finishSetup().then(() => { // Simulate setting custom margins. model.set('settings.margins.value', MarginsType.CUSTOM); const controls = getControls(); controls.forEach(control => assertFalse(control.disabled)); container.state = State.ERROR; // Validate controls are disabled. controls.forEach(control => assertTrue(control.disabled)); container.state = State.READY; controls.forEach(control => assertFalse(control.disabled)); }); }); });
the_stack
import { EventHandler, Value } from "@atomist/automation-client/lib/decorators"; import { automationClientInstance } from "@atomist/automation-client/lib/globals"; import { subscription } from "@atomist/automation-client/lib/graph/graphQL"; import { EventFired, HandleEvent } from "@atomist/automation-client/lib/HandleEvent"; import { HandlerContext } from "@atomist/automation-client/lib/HandlerContext"; import { HandlerResult, Success } from "@atomist/automation-client/lib/HandlerResult"; import { logger } from "@atomist/automation-client/lib/util/logger"; import * as os from "os"; import { executeGoal } from "../../../../../api-helper/goal/executeGoal"; import { descriptionFromState, updateGoal } from "../../../../../api-helper/goal/storeGoals"; import { cancelableGoal, isGoalCanceled } from "../../../../../api-helper/listener/cancelGoals"; import { LoggingProgressLog } from "../../../../../api-helper/log/LoggingProgressLog"; import { WriteToAllProgressLog } from "../../../../../api-helper/log/WriteToAllProgressLog"; import { resolveCredentialsPromise } from "../../../../../api-helper/machine/handlerRegistrations"; import { formatDate } from "../../../../../api-helper/misc/dateFormat"; import { serializeResult } from "../../../../../api-helper/misc/result"; import { addressChannelsFor } from "../../../../../api/context/addressChannels"; import { createSkillContext } from "../../../../../api/context/skillContext"; import { ExecuteGoalResult } from "../../../../../api/goal/ExecuteGoalResult"; import { GoalInvocation } from "../../../../../api/goal/GoalInvocation"; import { SdmGoalEvent } from "../../../../../api/goal/SdmGoalEvent"; import { SdmGoalFulfillmentMethod } from "../../../../../api/goal/SdmGoalMessage"; import { GoalImplementationMapper } from "../../../../../api/goal/support/GoalImplementationMapper"; import { GoalScheduler } from "../../../../../api/goal/support/GoalScheduler"; import { GoalExecutionListener } from "../../../../../api/listener/GoalStatusListener"; import { SoftwareDeliveryMachineConfiguration } from "../../../../../api/machine/SoftwareDeliveryMachineOptions"; import { ProgressLog } from "../../../../../spi/log/ProgressLog"; import { OnAnyRequestedSdmGoal, SdmGoalState } from "../../../../../typings/types"; import { shouldFulfill } from "../../../../delivery/goals/support/validateGoal"; import { CacheEntry, CacheInputGoalDataKey, CacheOutputGoalDataKey, cachePut, cacheRestore, } from "../../../../goal/cache/goalCaching"; import { verifyGoal } from "../../../../signing/goalSigning"; import { toArray } from "../../../../util/misc/array"; import { formatDuration } from "../../../../util/misc/time"; /** * Handle an SDM request goal. Used for many implementation types. */ @EventHandler("Fulfill a goal when it reaches 'requested' state", subscription("OnAnyRequestedSdmGoal")) export class FulfillGoalOnRequested implements HandleEvent<OnAnyRequestedSdmGoal.Subscription> { @Value("") // empty path returns the entire configuration public configuration: SoftwareDeliveryMachineConfiguration; constructor( private readonly implementationMapper: GoalImplementationMapper, private readonly goalExecutionListeners: GoalExecutionListener[], ) {} /* tslint:disable:cyclomatic-complexity */ public async handle( event: EventFired<OnAnyRequestedSdmGoal.Subscription>, ctx: HandlerContext, ): Promise<HandlerResult> { let sdmGoal = event.data.SdmGoal[0] as SdmGoalEvent; if (!shouldFulfill(sdmGoal)) { logger.debug(`Goal ${sdmGoal.uniqueName} skipped because not fulfilled by this SDM`); return Success; } sdmGoal = await verifyGoal(sdmGoal, this.configuration.sdm.goalSigning, ctx); if ((await cancelableGoal(sdmGoal, this.configuration)) && (await isGoalCanceled(sdmGoal, ctx))) { logger.debug(`Goal ${sdmGoal.uniqueName} has been canceled. Not fulfilling`); return Success; } if ( sdmGoal.fulfillment.method === SdmGoalFulfillmentMethod.SideEffect && sdmGoal.fulfillment.registration !== this.configuration.name ) { logger.debug( "Not fulfilling side-effected goal '%s' with method '%s/%s'", sdmGoal.uniqueName, sdmGoal.fulfillment.method, sdmGoal.fulfillment.name, ); return Success; } else if (sdmGoal.fulfillment.method === SdmGoalFulfillmentMethod.Other) { // fail goal with neither Sdm nor SideEffect fulfillment await updateGoal(ctx, sdmGoal, { state: SdmGoalState.failure, description: `No fulfillment for ${sdmGoal.uniqueName}`, }); return Success; } const id = this.configuration.sdm.repoRefResolver.repoRefFromSdmGoal(sdmGoal); const credentials = await resolveCredentialsPromise( this.configuration.sdm.credentialsResolver.eventHandlerCredentials(ctx, id), ); const addressChannels = addressChannelsFor(sdmGoal.push.repo, ctx); const preferences = this.configuration.sdm.preferenceStoreFactory(ctx); const implementation = this.implementationMapper.findImplementationBySdmGoal(sdmGoal); const { goal } = implementation; const progressLog = new WriteToAllProgressLog( sdmGoal.name, new LoggingProgressLog(sdmGoal.name, "debug"), await this.configuration.sdm.logFactory(ctx, sdmGoal), ); const goalInvocation: GoalInvocation = { configuration: this.configuration, goalEvent: sdmGoal, goal, progressLog, context: ctx, addressChannels, preferences, id, credentials, skill: createSkillContext(ctx), parameters: !!event.data.SdmGoal[0].parameters ? JSON.parse(event.data.SdmGoal[0].parameters) : {}, }; const goalScheduler = await findGoalScheduler(goalInvocation, this.configuration); if (!!goalScheduler) { const start = Date.now(); const result = await goalScheduler.schedule(goalInvocation); if (!!result && result.code !== undefined && result.code !== 0) { await updateGoal(ctx, sdmGoal, { state: SdmGoalState.failure, description: `Failed to schedule goal`, url: progressLog.url, }); await reportEndAndClose(result, start, progressLog); } else { await updateGoal(ctx, sdmGoal, { state: !!result && !!result.state ? result.state : SdmGoalState.in_process, phase: !!result && !!result.phase ? result.phase : "scheduled", description: !!result && !!result.description ? result.description : descriptionFromState(goal, SdmGoalState.in_process, sdmGoal), url: progressLog.url, externalUrls: !!result ? result.externalUrls : undefined, }); } return { ...(result as any), // successfully handled event even if goal failed code: 0, }; } else { delete (sdmGoal as any).id; const listeners = []; // Prepare cache project listeners for parameters if (!!goalInvocation.parameters) { if (!!goalInvocation.parameters[CacheInputGoalDataKey]) { const input: Array<{ classifier: string }> = goalInvocation.parameters[CacheInputGoalDataKey]; if (!!input && input.length > 0) { listeners.push(cacheRestore({ entries: input })); } } if (!!goalInvocation.parameters[CacheOutputGoalDataKey]) { const output: CacheEntry[] = goalInvocation.parameters[CacheOutputGoalDataKey]; if (!!output && output.length > 0) { listeners.push(cachePut({ entries: output })); } } } await reportStart(sdmGoal, progressLog); const start = Date.now(); try { const result = await executeGoal( { projectLoader: this.configuration.sdm.projectLoader, goalExecutionListeners: this.goalExecutionListeners, }, { ...implementation, projectListeners: [...toArray(implementation.projectListeners || []), ...listeners], }, goalInvocation, ); const terminatingStates = [ SdmGoalState.canceled, SdmGoalState.failure, SdmGoalState.skipped, SdmGoalState.stopped, SdmGoalState.success, SdmGoalState.waiting_for_approval, ]; if (!result || !result.state || terminatingStates.includes(result.state)) { await reportEndAndClose(result, start, progressLog); } return { ...result, // successfully handled event even if goal failed code: 0, }; } catch (e) { e.message = `Goal executor threw exception: ${e.message}`; const egr: ExecuteGoalResult = { code: 1, message: e.message, state: SdmGoalState.failure, }; await reportEndAndClose(egr, start, progressLog); throw e; } } } /* tslint:enable:cyclomatic-complexity */ } async function findGoalScheduler( gi: GoalInvocation, configuration: SoftwareDeliveryMachineConfiguration, ): Promise<GoalScheduler | undefined> { let goalSchedulers: GoalScheduler[]; if (!configuration.sdm.goalScheduler) { return undefined; } else if (!Array.isArray(configuration.sdm.goalScheduler)) { goalSchedulers = [configuration.sdm.goalScheduler]; } else { goalSchedulers = configuration.sdm.goalScheduler; } for (const gl of goalSchedulers) { if (await gl.supports(gi)) { return gl; } } return undefined; } export async function reportStart(sdmGoal: SdmGoalEvent, progressLog: ProgressLog): Promise<void> { progressLog.write(`/--`); progressLog.write(`Start: ${formatDate(new Date(), "yyyy-mm-dd HH:MM:ss.l")}`); progressLog.write(`Repository: ${sdmGoal.push.repo.owner}/${sdmGoal.push.repo.name}/${sdmGoal.branch}`); progressLog.write(`Sha: ${sdmGoal.sha}`); progressLog.write(`Goal: ${sdmGoal.name} (${sdmGoal.uniqueName})`); progressLog.write(`Environment: ${sdmGoal.environment.slice(2)}`); progressLog.write(`GoalSet: ${sdmGoal.goalSet} - ${sdmGoal.goalSetId}`); progressLog.write(`Host: ${os.hostname()}`); progressLog.write( `SDM: ${automationClientInstance().configuration.name}:${automationClientInstance().configuration.version}`, ); progressLog.write("\\--"); await progressLog.flush(); } export async function reportEndAndClose( result: ExecuteGoalResult, start: number, progressLog: ProgressLog, ): Promise<void> { progressLog.write(`/--`); progressLog.write(`Result: ${serializeResult(result)}`); progressLog.write(`Duration: ${formatDuration(Date.now() - start)}`); progressLog.write(`Finish: ${formatDate(new Date(), "yyyy-mm-dd HH:MM:ss.l")}`); progressLog.write("\\--"); await progressLog.close(); }
the_stack
import {Inject, Injectable} from '@angular/core'; import {APP_CONFIG, AppConfig} from '../../app.config'; import {HttpClient, HttpHeaders, HttpParams} from '@angular/common/http'; import {isArray, isNullOrUndefined} from '../../common/util'; import {Observable} from 'rxjs'; import {map} from 'rxjs/operators'; import {Base64} from 'js-base64'; import toBase64 = Base64.toBase64; import fromBase64 = Base64.fromBase64; /** * Representation of a repository. */ export interface GithubRepo { owner: string; name: string; } /** * Representation of a branch in a repository. */ export interface GithubBranch { repo: GithubRepo; // the repo containing the branch name: string; // name of branch } /** * A file. */ export interface GithubFileContents { type: 'file'; branch: GithubBranch; path: string; // path including name name: string; // only name sha: string; // SHA1 of file (needed for update operations) size: number; content?: string; // decoded content, as part of a directory response this is not filled } export interface GithubDirectory { type: 'dir'; branch: GithubBranch; path: string; // path including name name: string; // only name sha: string; // SHA1 of file (needed for update operations) entries?: (GithubFileContents|GithubDirectory)[]; // absent if directory is not read until now } // subset of the data returned from the GitHub API v3 // Contains only data that is used here. interface RepoFromAPI { name: string; owner: { login: string; }; } // subset of the data returned from the GitHub API v3 // Contains only data that is used here. interface BranchFromAPI { name: string; } interface FileContentsFromAPI { type: 'file'; name: string; path: string; size: number; encoding?: 'base64'; // TODO can there be anything else? content?: string; sha: string; } interface DirectoryEntryContentsFromAPI { type: 'dir'; name: string; path: string; size: number; // always 0 I guess sha: string; } interface SymlinkContentsFromAPI { type: 'symlink'; name: string; path: string; size: number; target: string; sha: string; } interface SubmoduleContentsFromAPI { type: 'submodule'; name: string; path: string; size: number; sha: string; } type ContentsFromAPI = FileContentsFromAPI | DirectoryEntryContentsFromAPI | SymlinkContentsFromAPI | SubmoduleContentsFromAPI; /** * input type of content update call. */ interface ContentsUpdateInputAPI { message: string; // Required. The commit message. content: string; // Required. The new file content, using Base64 encoding. sha: string; // Required. The blob SHA of the file being replaced. branch?: string; // The branch name. Default: the repository’s default branch (usually master) // committer and author not used here } /** * return type of content update call. */ interface ContentsUpdateFromAPI { content: { name: string; path: string; sha: string; size: number; }; } // subset of the data returned from the GitHub API v3 // Contains only data that is used here. // if requested path is a directory, the answer is an array of the directory content, otherwise it is just the object (normally a file) type FileOrDirectoryContentsFromAPI = ContentsFromAPI | [ContentsFromAPI]; @Injectable({ providedIn: 'root' }) export class GithubApiService { private readonly _rootUrl: string; private _apiKey: string; // a setting for tests!! // if set to true, some functions (TODO which functions?) will FAIL BY DESIGN // Used to allow testing of failures. private failByDesign: boolean; constructor(@Inject(APP_CONFIG) app_config: AppConfig, private httpClient: HttpClient) { this._rootUrl = app_config.GITHUB_API_ROOT_URL; // API key is secret, normally it is not configured and will be null // it can be set interactively in the app // but in the karma tests it will be set. It is stored than in environment.secret.ts (not in Git) this.setApiKey(app_config.GITHUB_API_KEY); // must be set explicitly via setApiKey() this.failByDesign = app_config.GITHUB_PROVOKE_FAILURES; } /** * Headers used for every request. */ private headers(apiKey?: string): HttpHeaders { const key = (apiKey) ? apiKey : this.apiKey(); return new HttpHeaders() .append('Accept', 'application/vnd.github.v3+json') .append('Authorization', 'token ' + key); } /** * Send GET request to API. * @param relativeUrl URL relative to API root. * @param apiKey OAuth-Token * @param parameters additional HTTP Parameters. * @return GET result of type T */ private get<T>(relativeUrl: string, apiKey: string, parameters?: HttpParams): Observable<T> { return this.httpClient.get(this.fullUrl(relativeUrl), {headers: this.headers(apiKey), params: parameters}).pipe( map(response => response as T) ); } /** * Send PUT request to API. * Put an Object of type T, return a result of type U. * @param relativeUrl URL relative to API root. * @param apiKey OAuth-Token * @param body request body to send, will be send as a JSON object. * @param parameters additional HTTP Parameters. * @return PUT result of type U */ private put<T, U>(relativeUrl: string, apiKey: string, body: T, parameters?: HttpParams): Observable<U> { return this.httpClient.put(this.fullUrl(relativeUrl), body, {headers: this.headers(apiKey), params: parameters}).pipe( map(response => response as U) ); } private fullUrl(relativeUrl: string): string { return this._rootUrl + relativeUrl; } public apiKey(): string { return this._apiKey; } public setApiKey(key: string) { this._apiKey = key; } /** * Get a list of repos of the authenticated user. * @param apiKey OAuth token of user. */ public repos(apiKey?: string): Observable<GithubRepo[]> { return this.get<RepoFromAPI[]>('user/repos', apiKey).pipe( map(response => response.map((repo: RepoFromAPI) => { return { name: repo.name, owner: repo.owner.login }; })) ); } /** * Get a list of branches of the authenticated user and selected repo. * @param repo the repository to be accessed * @param apiKey OAuth token of user. */ public branches(repo: GithubRepo, apiKey?: string): Observable<GithubBranch[]> { return this.get<BranchFromAPI[]>(`repos/${repo.owner}/${repo.name}/branches`, apiKey).pipe( map(response => response.map((branch: BranchFromAPI) => { return { name: branch.name, repo: repo }; })) ); } /** * Get content of a file or directory. * @param branch branch * @param path path * @param apiKey OAuth token of user. */ public content(branch: GithubBranch, path: string, apiKey?: string): Observable<GithubFileContents|GithubDirectory> { const repo = branch.repo; const url = `repos/${repo.owner}/${repo.name}/contents/${path}`; return this.get<FileOrDirectoryContentsFromAPI>( url, apiKey, new HttpParams().append('ref', branch.name) ).pipe( map((response: FileOrDirectoryContentsFromAPI) => { if (isArray(response)) { // it is a directory return this.toDirectory(branch, path, response as ContentsFromAPI[]); } else { const singleResponse: ContentsFromAPI = response as ContentsFromAPI; switch (singleResponse.type) { case 'file': return this.toFileContents(branch, response as FileContentsFromAPI); case 'symlink': default: // TODO } } }) ); } /** * Update (or create) a file * @param branch the branch * @param newContents the updated content (path and content must be set, sha must be set, if it is an update) * @param message the commit message * @param apiKey OAuth token of user. */ public updateContent( branch: GithubBranch, newContents: GithubFileContents, message: string, apiKey?: string): Observable<GithubFileContents> { const repo = branch.repo; const url = `repos/${repo.owner}/${repo.name}/contents/${newContents.path}`; return this.put<ContentsUpdateInputAPI, ContentsUpdateFromAPI>( url, apiKey, { message: message, content: this.toBase64(newContents.content), sha: newContents.sha, branch: branch.name } as ContentsUpdateInputAPI ).pipe( map((response: ContentsUpdateFromAPI) => { return { type: 'file', branch: branch, path: response.content.path, name: response.content.name, size: response.content.size, content: newContents.content, sha: response.content.sha } as GithubFileContents; }) ); } private toFileContents(branch: GithubBranch, contentApiResponse: FileContentsFromAPI): GithubFileContents { const content = contentApiResponse.content; let decodedContent: string|undefined; if (!isNullOrUndefined(content) && !isNullOrUndefined(contentApiResponse.encoding)) { if (contentApiResponse.encoding === 'base64') { decodedContent = this.fromBase64(content); } else { decodedContent = '??? unknown encoding' + contentApiResponse.encoding; } } else { decodedContent = undefined; } return { type: 'file', branch: branch, path: contentApiResponse.path, name: contentApiResponse.name, size: contentApiResponse.size, content: decodedContent, sha: contentApiResponse.sha }; } private toDirectory(branch: GithubBranch, path: string, contentApiResponse: ContentsFromAPI[]): GithubDirectory { const entries: (GithubDirectory|GithubFileContents|null)[] = contentApiResponse.map((entry: ContentsFromAPI) => { switch (entry.type) { case 'file': return this.toFileContents(branch, entry); case 'symlink': return null; case 'submodule': return null; case 'dir': return { type: 'dir', branch: branch, path: path, name: entry.name, sha: entry.sha } as GithubDirectory; default: return null; } }).filter(entry => entry !== null); return { type: 'dir', branch: branch, path: path, name: this.basename(path), sha: '', entries: entries }; } basename(path: string): string { const index = path.lastIndexOf('/'); if (index >= 0) { return path.substr(index + 1); } else { return path; } } private toBase64(str: string): string { return toBase64(str); } private fromBase64(b64str: string): string { return fromBase64(b64str); } }
the_stack