text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
import _ from 'lodash';
import RowParser from '../../../lib/row_parser';
import { XJson } from '../../../../../opensearch_ui_shared/public';
import * as utils from '../../../lib/utils';
// @ts-ignore
import * as opensearch from '../../../lib/opensearch/opensearch';
import { CoreEditor, Position, Range } from '../../../types';
import { createTokenIterator } from '../../factories';
import Autocomplete from '../../../lib/autocomplete/autocomplete';
const { collapseLiteralStrings } = XJson;
export class SenseEditor {
currentReqRange: (Range & { markerRef: any }) | null;
parser: any;
// @ts-ignore
private readonly autocomplete: any;
constructor(private readonly coreEditor: CoreEditor) {
this.currentReqRange = null;
this.parser = new RowParser(this.coreEditor);
this.autocomplete = new (Autocomplete as any)({
coreEditor,
parser: this.parser,
});
this.coreEditor.registerAutocompleter(this.autocomplete.getCompletions);
this.coreEditor.on(
'tokenizerUpdate',
this.highlightCurrentRequestsAndUpdateActionBar.bind(this)
);
this.coreEditor.on('changeCursor', this.highlightCurrentRequestsAndUpdateActionBar.bind(this));
this.coreEditor.on('changeScrollTop', this.updateActionsBar.bind(this));
}
prevRequestStart = (rowOrPos?: number | Position): Position => {
let curRow: number;
if (rowOrPos == null) {
curRow = this.coreEditor.getCurrentPosition().lineNumber;
} else if (_.isObject(rowOrPos)) {
curRow = (rowOrPos as Position).lineNumber;
} else {
curRow = rowOrPos as number;
}
while (curRow > 0 && !this.parser.isStartRequestRow(curRow, this.coreEditor)) curRow--;
return {
lineNumber: curRow,
column: 1,
};
};
nextRequestStart = (rowOrPos?: number | Position) => {
let curRow: number;
if (rowOrPos == null) {
curRow = this.coreEditor.getCurrentPosition().lineNumber;
} else if (_.isObject(rowOrPos)) {
curRow = (rowOrPos as Position).lineNumber;
} else {
curRow = rowOrPos as number;
}
const maxLines = this.coreEditor.getLineCount();
for (; curRow < maxLines - 1; curRow++) {
if (this.parser.isStartRequestRow(curRow, this.coreEditor)) {
break;
}
}
return {
row: curRow,
column: 0,
};
};
autoIndent = _.debounce(async () => {
await this.coreEditor.waitForLatestTokens();
const reqRange = await this.getRequestRange();
if (!reqRange) {
return;
}
const parsedReq = await this.getRequest();
if (!parsedReq) {
return;
}
if (parsedReq.data && parsedReq.data.length > 0) {
let indent = parsedReq.data.length === 1; // unindent multi docs by default
let formattedData = utils.formatRequestBodyDoc(parsedReq.data, indent);
if (!formattedData.changed) {
// toggle.
indent = !indent;
formattedData = utils.formatRequestBodyDoc(parsedReq.data, indent);
}
parsedReq.data = formattedData.data;
this.replaceRequestRange(parsedReq, reqRange);
}
}, 25);
update = async (data: string, reTokenizeAll = false) => {
return this.coreEditor.setValue(data, reTokenizeAll);
};
replaceRequestRange = (newRequest: any, requestRange: Range) => {
const text = utils.textFromRequest(newRequest);
if (requestRange) {
this.coreEditor.replaceRange(requestRange, text);
} else {
// just insert where we are
this.coreEditor.insert(this.coreEditor.getCurrentPosition(), text);
}
};
getRequestRange = async (lineNumber?: number): Promise<Range | null> => {
await this.coreEditor.waitForLatestTokens();
if (this.parser.isInBetweenRequestsRow(lineNumber)) {
return null;
}
const reqStart = this.prevRequestStart(lineNumber);
const reqEnd = this.nextRequestEnd(reqStart);
return {
start: {
...reqStart,
},
end: {
...reqEnd,
},
};
};
expandRangeToRequestEdges = async (
range = this.coreEditor.getSelectionRange()
): Promise<Range | null> => {
await this.coreEditor.waitForLatestTokens();
let startLineNumber = range.start.lineNumber;
let endLineNumber = range.end.lineNumber;
const maxLine = Math.max(1, this.coreEditor.getLineCount());
if (this.parser.isInBetweenRequestsRow(startLineNumber)) {
/* Do nothing... */
} else {
for (; startLineNumber >= 1; startLineNumber--) {
if (this.parser.isStartRequestRow(startLineNumber)) {
break;
}
}
}
if (startLineNumber < 1 || startLineNumber > endLineNumber) {
return null;
}
// move end row to the previous request end if between requests, otherwise walk forward
if (this.parser.isInBetweenRequestsRow(endLineNumber)) {
for (; endLineNumber >= startLineNumber; endLineNumber--) {
if (this.parser.isEndRequestRow(endLineNumber)) {
break;
}
}
} else {
for (; endLineNumber <= maxLine; endLineNumber++) {
if (this.parser.isEndRequestRow(endLineNumber)) {
break;
}
}
}
if (endLineNumber < startLineNumber || endLineNumber > maxLine) {
return null;
}
const endColumn =
(this.coreEditor.getLineValue(endLineNumber) || '').replace(/\s+$/, '').length + 1;
return {
start: {
lineNumber: startLineNumber,
column: 1,
},
end: {
lineNumber: endLineNumber,
column: endColumn,
},
};
};
getRequestInRange = async (range?: Range) => {
await this.coreEditor.waitForLatestTokens();
if (!range) {
return null;
}
const request: {
method: string;
data: string[];
url: string | null;
range: Range;
} = {
method: '',
data: [],
url: null,
range,
};
const pos = range.start;
const tokenIter = createTokenIterator({ editor: this.coreEditor, position: pos });
let t = tokenIter.getCurrentToken();
if (this.parser.isEmptyToken(t)) {
// if the row starts with some spaces, skip them.
t = this.parser.nextNonEmptyToken(tokenIter);
}
if (t == null) {
return null;
}
request.method = t.value;
t = this.parser.nextNonEmptyToken(tokenIter);
if (!t || t.type === 'method') {
return null;
}
request.url = '';
while (t && t.type && t.type.indexOf('url') === 0) {
request.url += t.value;
t = tokenIter.stepForward();
}
if (this.parser.isEmptyToken(t)) {
// if the url row ends with some spaces, skip them.
t = this.parser.nextNonEmptyToken(tokenIter);
}
let bodyStartLineNumber = (t ? 0 : 1) + tokenIter.getCurrentPosition().lineNumber; // artificially increase end of docs.
let dataEndPos: Position;
while (
bodyStartLineNumber < range.end.lineNumber ||
(bodyStartLineNumber === range.end.lineNumber && 1 < range.end.column)
) {
dataEndPos = this.nextDataDocEnd({
lineNumber: bodyStartLineNumber,
column: 1,
});
const bodyRange: Range = {
start: {
lineNumber: bodyStartLineNumber,
column: 1,
},
end: dataEndPos,
};
const data = this.coreEditor.getValueInRange(bodyRange)!;
request.data.push(data.trim());
bodyStartLineNumber = dataEndPos.lineNumber + 1;
}
return request;
};
getRequestsInRange = async (
range = this.coreEditor.getSelectionRange(),
includeNonRequestBlocks = false
): Promise<any[]> => {
await this.coreEditor.waitForLatestTokens();
if (!range) {
return [];
}
const expandedRange = await this.expandRangeToRequestEdges(range);
if (!expandedRange) {
return [];
}
const requests: any = [];
let rangeStartCursor = expandedRange.start.lineNumber;
const endLineNumber = expandedRange.end.lineNumber;
// move to the next request start (during the second iterations this may not be exactly on a request
let currentLineNumber = expandedRange.start.lineNumber;
const flushNonRequestBlock = () => {
if (includeNonRequestBlocks) {
const nonRequestPrefixBlock = this.coreEditor
.getLines(rangeStartCursor, currentLineNumber - 1)
.join('\n');
if (nonRequestPrefixBlock) {
requests.push(nonRequestPrefixBlock);
}
}
};
while (currentLineNumber <= endLineNumber) {
if (this.parser.isStartRequestRow(currentLineNumber)) {
flushNonRequestBlock();
const request = await this.getRequest(currentLineNumber);
if (!request) {
// Something has probably gone wrong.
return requests;
} else {
requests.push(request);
rangeStartCursor = currentLineNumber = request.range.end.lineNumber + 1;
}
} else {
++currentLineNumber;
}
}
flushNonRequestBlock();
return requests;
};
getRequest = async (row?: number) => {
await this.coreEditor.waitForLatestTokens();
if (this.parser.isInBetweenRequestsRow(row)) {
return null;
}
const range = await this.getRequestRange(row);
return this.getRequestInRange(range!);
};
moveToPreviousRequestEdge = async () => {
await this.coreEditor.waitForLatestTokens();
const pos = this.coreEditor.getCurrentPosition();
for (
pos.lineNumber--;
pos.lineNumber > 1 && !this.parser.isRequestEdge(pos.lineNumber);
pos.lineNumber--
) {
// loop for side effects
}
this.coreEditor.moveCursorToPosition({
lineNumber: pos.lineNumber,
column: 1,
});
};
moveToNextRequestEdge = async (moveOnlyIfNotOnEdge: boolean) => {
await this.coreEditor.waitForLatestTokens();
const pos = this.coreEditor.getCurrentPosition();
const maxRow = this.coreEditor.getLineCount();
if (!moveOnlyIfNotOnEdge) {
pos.lineNumber++;
}
for (
;
pos.lineNumber < maxRow && !this.parser.isRequestEdge(pos.lineNumber);
pos.lineNumber++
) {
// loop for side effects
}
this.coreEditor.moveCursorToPosition({
lineNumber: pos.lineNumber,
column: 1,
});
};
nextRequestEnd = (pos: Position): Position => {
pos = pos || this.coreEditor.getCurrentPosition();
const maxLines = this.coreEditor.getLineCount();
let curLineNumber = pos.lineNumber;
for (; curLineNumber <= maxLines; ++curLineNumber) {
const curRowMode = this.parser.getRowParseMode(curLineNumber);
// eslint-disable-next-line no-bitwise
if ((curRowMode & this.parser.MODE.REQUEST_END) > 0) {
break;
}
// eslint-disable-next-line no-bitwise
if (curLineNumber !== pos.lineNumber && (curRowMode & this.parser.MODE.REQUEST_START) > 0) {
break;
}
}
const column =
(this.coreEditor.getLineValue(curLineNumber) || '').replace(/\s+$/, '').length + 1;
return {
lineNumber: curLineNumber,
column,
};
};
nextDataDocEnd = (pos: Position): Position => {
pos = pos || this.coreEditor.getCurrentPosition();
let curLineNumber = pos.lineNumber;
const maxLines = this.coreEditor.getLineCount();
for (; curLineNumber < maxLines; curLineNumber++) {
const curRowMode = this.parser.getRowParseMode(curLineNumber);
// eslint-disable-next-line no-bitwise
if ((curRowMode & this.parser.MODE.REQUEST_END) > 0) {
break;
}
// eslint-disable-next-line no-bitwise
if ((curRowMode & this.parser.MODE.MULTI_DOC_CUR_DOC_END) > 0) {
break;
}
// eslint-disable-next-line no-bitwise
if (curLineNumber !== pos.lineNumber && (curRowMode & this.parser.MODE.REQUEST_START) > 0) {
break;
}
}
const column =
(this.coreEditor.getLineValue(curLineNumber) || '').length +
1; /* Range goes to 1 after last char */
return {
lineNumber: curLineNumber,
column,
};
};
highlightCurrentRequestsAndUpdateActionBar = _.debounce(async () => {
await this.coreEditor.waitForLatestTokens();
const expandedRange = await this.expandRangeToRequestEdges();
if (expandedRange === null && this.currentReqRange === null) {
return;
}
if (
expandedRange !== null &&
this.currentReqRange !== null &&
expandedRange.start.lineNumber === this.currentReqRange.start.lineNumber &&
expandedRange.end.lineNumber === this.currentReqRange.end.lineNumber
) {
// same request, now see if we are on the first line and update the action bar
const cursorLineNumber = this.coreEditor.getCurrentPosition().lineNumber;
if (cursorLineNumber === this.currentReqRange.start.lineNumber) {
this.updateActionsBar();
}
return; // nothing to do..
}
if (this.currentReqRange) {
this.coreEditor.removeMarker(this.currentReqRange.markerRef);
}
this.currentReqRange = expandedRange as any;
if (this.currentReqRange) {
this.currentReqRange.markerRef = this.coreEditor.addMarker(this.currentReqRange);
}
this.updateActionsBar();
}, 25);
getRequestsAsCURL = async (opensearchBaseUrl: string, range?: Range): Promise<string> => {
const requests = await this.getRequestsInRange(range, true);
const result = _.map(requests, (req) => {
if (typeof req === 'string') {
// no request block
return req;
}
const opensearchPath = req.url;
const opensearchMethod = req.method;
const opensearchData = req.data;
// this is the first url defined in opensearch.hosts
const url = opensearch.constructOpenSearchUrl(opensearchBaseUrl, opensearchPath);
let ret = 'curl -X' + opensearchMethod + ' "' + url + '"';
if (opensearchData && opensearchData.length) {
ret += " -H 'Content-Type: application/json' -d'\n";
const dataAsString = collapseLiteralStrings(opensearchData.join('\n'));
// We escape single quoted strings that that are wrapped in single quoted strings
ret += dataAsString.replace(/'/g, "'\\''");
if (opensearchData.length > 1) {
ret += '\n';
} // end with a new line
ret += "'";
}
return ret;
});
return result.join('\n');
};
updateActionsBar = () => this.coreEditor.legacyUpdateUI(this.currentReqRange);
getCoreEditor() {
return this.coreEditor;
}
} | the_stack |
import {absoluteFrom, AbsoluteFsPath, FileSystem, getFileSystem, PathSegment} from '../../../../src/ngtsc/file_system';
import {runInEachFileSystem} from '../../../../src/ngtsc/file_system/testing';
import {EntryPointPackageJson} from '../../../src/packages/entry_point';
import {BackupFileCleaner, NgccDirectoryCleaner, PackageJsonCleaner} from '../../../src/writing/cleaning/cleaning_strategies';
runInEachFileSystem(() => {
describe('cleaning strategies', () => {
let fs: FileSystem;
let _abs: typeof absoluteFrom;
beforeEach(() => {
fs = getFileSystem();
_abs = absoluteFrom;
});
describe('PackageJsonCleaner', () => {
let packageJsonPath: AbsoluteFsPath;
beforeEach(() => {
packageJsonPath = _abs('/node_modules/pkg/package.json');
});
describe('canClean()', () => {
it('should return true if the basename is package.json', () => {
const strategy = new PackageJsonCleaner(fs);
expect(strategy.canClean(packageJsonPath, fs.basename(packageJsonPath))).toBe(true);
});
it('should return false if the basename is not package.json', () => {
const filePath = _abs('/node_modules/pkg/index.js');
const fileName = fs.basename(filePath);
const strategy = new PackageJsonCleaner(fs);
expect(strategy.canClean(filePath, fileName)).toBe(false);
});
});
describe('clean()', () => {
it('should not touch the file if there is no build marker', () => {
const strategy = new PackageJsonCleaner(fs);
const packageJson: EntryPointPackageJson = {name: 'test-package'};
fs.ensureDir(fs.dirname(packageJsonPath));
fs.writeFile(packageJsonPath, JSON.stringify(packageJson));
strategy.clean(packageJsonPath, fs.basename(packageJsonPath));
const newPackageJson = JSON.parse(fs.readFile(packageJsonPath)) as EntryPointPackageJson;
expect(newPackageJson).toEqual({name: 'test-package'});
});
it('should remove the processed marker', () => {
const strategy = new PackageJsonCleaner(fs);
const packageJson: EntryPointPackageJson = {
name: 'test-package',
__processed_by_ivy_ngcc__: {'fesm2015': '8.0.0'}
};
fs.ensureDir(fs.dirname(packageJsonPath));
fs.writeFile(packageJsonPath, JSON.stringify(packageJson));
strategy.clean(packageJsonPath, fs.basename(packageJsonPath));
const newPackageJson = JSON.parse(fs.readFile(packageJsonPath)) as EntryPointPackageJson;
expect(newPackageJson).toEqual({name: 'test-package'});
});
it('should remove the new entry points', () => {
const strategy = new PackageJsonCleaner(fs);
const packageJson: EntryPointPackageJson = {
name: 'test-package',
__processed_by_ivy_ngcc__: {'fesm2015': '8.0.0'}
};
fs.ensureDir(fs.dirname(packageJsonPath));
fs.writeFile(packageJsonPath, JSON.stringify(packageJson));
strategy.clean(packageJsonPath, fs.basename(packageJsonPath));
const newPackageJson = JSON.parse(fs.readFile(packageJsonPath)) as EntryPointPackageJson;
expect(newPackageJson).toEqual({name: 'test-package'});
});
it('should remove the prepublish script if there was a processed marker', () => {
const strategy = new PackageJsonCleaner(fs);
const packageJson: EntryPointPackageJson = {
name: 'test-package',
__processed_by_ivy_ngcc__: {'fesm2015': '8.0.0'},
scripts: {prepublishOnly: 'added by ngcc', test: 'do testing'},
};
fs.ensureDir(fs.dirname(packageJsonPath));
fs.writeFile(packageJsonPath, JSON.stringify(packageJson));
strategy.clean(packageJsonPath, fs.basename(packageJsonPath));
const newPackageJson = JSON.parse(fs.readFile(packageJsonPath)) as EntryPointPackageJson;
expect(newPackageJson).toEqual({
name: 'test-package',
scripts: {test: 'do testing'},
});
});
it('should revert and remove the backup for the prepublish script if there was a processed marker',
() => {
const strategy = new PackageJsonCleaner(fs);
const packageJson: EntryPointPackageJson = {
name: 'test-package',
__processed_by_ivy_ngcc__: {'fesm2015': '8.0.0'},
scripts: {
prepublishOnly: 'added by ngcc',
prepublishOnly__ivy_ngcc_bak: 'original',
test: 'do testing'
},
};
fs.ensureDir(fs.dirname(packageJsonPath));
fs.writeFile(packageJsonPath, JSON.stringify(packageJson));
strategy.clean(packageJsonPath, fs.basename(packageJsonPath));
const newPackageJson =
JSON.parse(fs.readFile(packageJsonPath)) as EntryPointPackageJson;
expect(newPackageJson).toEqual({
name: 'test-package',
scripts: {prepublishOnly: 'original', test: 'do testing'},
});
});
it('should not touch the scripts if there was not processed marker', () => {
const strategy = new PackageJsonCleaner(fs);
const packageJson: EntryPointPackageJson = {
name: 'test-package',
scripts: {
prepublishOnly: 'added by ngcc',
prepublishOnly__ivy_ngcc_bak: 'original',
test: 'do testing'
},
};
fs.ensureDir(fs.dirname(packageJsonPath));
fs.writeFile(packageJsonPath, JSON.stringify(packageJson));
strategy.clean(packageJsonPath, fs.basename(packageJsonPath));
const newPackageJson = JSON.parse(fs.readFile(packageJsonPath)) as EntryPointPackageJson;
expect(newPackageJson).toEqual({
name: 'test-package',
scripts: {
prepublishOnly: 'added by ngcc',
prepublishOnly__ivy_ngcc_bak: 'original',
test: 'do testing'
}
});
});
});
});
describe('BackupFileCleaner', () => {
let filePath: AbsoluteFsPath;
let backupFilePath: AbsoluteFsPath;
beforeEach(() => {
filePath = _abs('/node_modules/pkg/index.js');
backupFilePath = _abs('/node_modules/pkg/index.js.__ivy_ngcc_bak');
});
describe('canClean()', () => {
it('should return true if the file name ends in .__ivy_ngcc_bak and the processed file exists',
() => {
const strategy = new BackupFileCleaner(fs);
fs.ensureDir(fs.dirname(filePath));
fs.writeFile(filePath, 'processed file');
fs.writeFile(backupFilePath, 'original file');
expect(strategy.canClean(backupFilePath, fs.basename(backupFilePath))).toBe(true);
});
it('should return false if the file does not end in .__ivy_ngcc_bak', () => {
const strategy = new BackupFileCleaner(fs);
fs.ensureDir(fs.dirname(filePath));
fs.writeFile(filePath, 'processed file');
fs.writeFile(backupFilePath, 'original file');
expect(strategy.canClean(filePath, fs.basename(filePath))).toBe(false);
});
it('should return false if the file ends in .__ivy_ngcc_bak but the processed file does not exist',
() => {
const strategy = new BackupFileCleaner(fs);
fs.ensureDir(fs.dirname(filePath));
fs.writeFile(backupFilePath, 'original file');
expect(strategy.canClean(backupFilePath, fs.basename(backupFilePath))).toBe(false);
});
});
describe('clean()', () => {
it('should move the backup file back to its original file path', () => {
const strategy = new BackupFileCleaner(fs);
fs.ensureDir(fs.dirname(filePath));
fs.writeFile(filePath, 'processed file');
fs.writeFile(backupFilePath, 'original file');
strategy.clean(backupFilePath, fs.basename(backupFilePath));
expect(fs.exists(backupFilePath)).toBe(false);
expect(fs.readFile(filePath)).toEqual('original file');
});
});
});
describe('NgccDirectoryCleaner', () => {
let ivyDirectory: AbsoluteFsPath;
beforeEach(() => {
ivyDirectory = _abs('/node_modules/pkg/__ivy_ngcc__');
});
describe('canClean()', () => {
it('should return true if the path is a directory and is called __ivy_ngcc__', () => {
const strategy = new NgccDirectoryCleaner(fs);
fs.ensureDir(ivyDirectory);
expect(strategy.canClean(ivyDirectory, fs.basename(ivyDirectory))).toBe(true);
});
it('should return false if the path is a directory and not called __ivy_ngcc__', () => {
const strategy = new NgccDirectoryCleaner(fs);
const filePath = _abs('/node_modules/pkg/other');
fs.ensureDir(ivyDirectory);
expect(strategy.canClean(filePath, fs.basename(filePath))).toBe(false);
});
it('should return false if the path is called __ivy_ngcc__ but does not exist', () => {
const strategy = new NgccDirectoryCleaner(fs);
expect(strategy.canClean(ivyDirectory, fs.basename(ivyDirectory))).toBe(false);
});
it('should return false if the path is called __ivy_ngcc__ but is not a directory', () => {
const strategy = new NgccDirectoryCleaner(fs);
fs.ensureDir(fs.dirname(ivyDirectory));
fs.writeFile(ivyDirectory, 'some contents');
expect(strategy.canClean(ivyDirectory, fs.basename(ivyDirectory))).toBe(false);
});
});
describe('clean()', () => {
it('should remove the __ivy_ngcc__ directory', () => {
const strategy = new NgccDirectoryCleaner(fs);
fs.ensureDir(ivyDirectory);
fs.ensureDir(fs.resolve(ivyDirectory, 'subfolder'));
fs.writeFile(fs.resolve(ivyDirectory, 'subfolder', 'file.txt'), 'file contents');
strategy.clean(ivyDirectory, fs.basename(ivyDirectory));
expect(fs.exists(ivyDirectory)).toBe(false);
});
});
});
});
}); | the_stack |
let globalCurrentView: string[] = [];
let previouslyActive: string = '';
let toRemove = '';
let order = new Map<string, number>();
let isSideBarClick = false;
let isFirstOpen = true; // is true on the first observation of all entries
let everyEntry: any[] = []; // a list of all attributes/properties etc.
interface SidebarIds {
name: string, subcategory: string, category: string
}
function activateSidebar(sidebarIds: SidebarIds) {
document.querySelector(`div[id=${sidebarIds.name}]`)!.classList.add('active');
document.querySelector(`h4[id=${sidebarIds.subcategory}]`)!.classList.add(
'active');
document.querySelector(`h3[id=${sidebarIds.category}]`)!.classList.add(
'active');
}
function deactivateSidebar(sidebarIds: SidebarIds) {
document.querySelector(`div[id=${sidebarIds.name}]`)!.classList.remove(
'active');
document.querySelector(`h4[id=${sidebarIds.subcategory}]`)!.classList.remove(
'active');
document.querySelector(`h3[id=${sidebarIds.category}]`)!.classList.remove(
'active');
}
function addDeactive(sidebarIds: SidebarIds) {
document.querySelector(`div[id=${sidebarIds.name}]`)!.classList.add(
'de-active');
}
function addDeactiveCategory(sidebarIds: SidebarIds) {
document.querySelector(`h4[id=${sidebarIds.subcategory}]`)!.classList.add(
'de-active');
}
function removeDeactive(sidebarIds: SidebarIds) {
document.querySelector(`div[id=${sidebarIds.name}]`)!.classList.remove(
'de-active');
}
function removeDeactiveCategory(sidebarIds: SidebarIds) {
document.querySelector(`h4[id=${sidebarIds.subcategory}]`)!.classList.remove(
'de-active');
}
export function getSidebarCategoryForNewPage(): string {
return previouslyActive.split('-')[0];
}
function getSidebarIdsFromSidebarName(name: string): SidebarIds {
const sb = 'sidebar';
const sidebarName = name;
let sidebarSub = sidebarName.split('-').slice(0, 2);
let sidebarCat = sidebarName.split('-').slice(0, 1);
sidebarSub.push(sb);
const sidebarSubcategory = sidebarSub.join('-');
sidebarCat.push(sb);
const sidebarCategory = sidebarCat.join('-');
return {
name: sidebarName,
subcategory: sidebarSubcategory,
category: sidebarCategory
};
}
function getSidebarIdsFromId(id: string): SidebarIds {
const sb = 'sidebar';
const sidebarName = id.split('-').slice(1, 10).join('-');
let sidebarSub = id.split('-').slice(1, 3);
let sidebarCat = id.split('-').slice(1, 2);
sidebarSub.push(sb);
const sidebarSubcategory = sidebarSub.join('-');
sidebarCat.push(sb);
const sidebarCategory = sidebarCat.join('-');
return {
name: sidebarName,
subcategory: sidebarSubcategory,
category: sidebarCategory
};
}
/*
* sidebarSubcategory: string of the old subcategory being replaced
* newSidebarSubcategory: string of the new subcategory
* example:
* sidebarSubcategory = loading-attributes-sidebar
* newSidebarSubcategory = loading-cssProperties-sidebar
*/
function updateSidebarView(
sidebarSubcategory: string, newSidebarSubcategory: string) {
const newCategoryList = newSidebarSubcategory.split('-');
const newSidebarCategory = newCategoryList[0].concat('-sidebar');
if (sidebarSubcategory !== newSidebarSubcategory) {
for (const entry of everyEntry) {
const id = entry.target.getAttribute('id');
const sidebarIds = getSidebarIdsFromId(id);
if (sidebarIds.subcategory !== newSidebarSubcategory) {
addDeactive(sidebarIds);
} else {
removeDeactive(sidebarIds);
}
if (sidebarIds.category !== newSidebarCategory) {
addDeactiveCategory(sidebarIds);
} else {
removeDeactiveCategory(sidebarIds);
}
}
}
}
/*
* Hide all of the entries not within the current subcategory
* entries should be every entry on the page when this is called
*/
function updateSidebarViewFirstTime(entries: any[]) {
isFirstOpen = false; // global
everyEntry = entries; // Sets global variable for use in updateSidebarView
const sidebarIds = getSidebarIdsFromSidebarName(previouslyActive);
updateSidebarView('', sidebarIds.subcategory);
}
function updateFromOldToNew(prev: string, sidebarIds: SidebarIds) {
const prevSidebarIds = getSidebarIdsFromSidebarName(prev);
deactivateSidebar(prevSidebarIds);
activateSidebar(sidebarIds);
updateSidebarView(prevSidebarIds.subcategory, sidebarIds.subcategory);
}
function removeActiveEntry(sidebarIds: SidebarIds) {
deactivateSidebar(sidebarIds);
if (globalCurrentView.length >= 2) {
const newSidebarIds = getSidebarIdsFromSidebarName(globalCurrentView[1]);
activateSidebar(newSidebarIds);
updateSidebarView(sidebarIds.subcategory, newSidebarIds.subcategory);
previouslyActive = newSidebarIds.name;
}
}
function updateHeader() {
const sidebarIds = getSidebarIdsFromSidebarName(previouslyActive);
const subCat = document.querySelector(`h4[id=${
sidebarIds.subcategory}]`)!.firstElementChild!.innerHTML;
const cat = document.querySelector(`h3[id=${
sidebarIds.category}]`)!.firstElementChild!.innerHTML;
const outerHeaderId = sidebarIds.category.split('-')[0];
const outerHeader = document.querySelector(`h1[id=${outerHeaderId}]`)!;
outerHeader.innerHTML = cat.concat(': ', subCat);
}
function handleHTMLEntry(htmlEntry: IntersectionObserverEntry) {
const id = htmlEntry.target.getAttribute('id')!;
const sidebarIds = getSidebarIdsFromId(id);
// entry inside viewing window
if (htmlEntry.intersectionRatio > 0) {
if (toRemove.length > 0) {
// inside a large div
updateFromOldToNew(toRemove, sidebarIds);
toRemove = '';
} else if (globalCurrentView.length === 0) {
// empty globalCurrentView, add to view
activateSidebar(sidebarIds);
previouslyActive = sidebarIds.name;
globalCurrentView.push(sidebarIds.name);
} else if (order.get(previouslyActive)! > order.get(sidebarIds.name)!) {
// scrolling up
updateFromOldToNew(globalCurrentView[0], sidebarIds);
globalCurrentView.unshift(sidebarIds.name);
previouslyActive = sidebarIds.name;
} else {
// an entry is in view under the current active entry
globalCurrentView.push(sidebarIds.name);
}
} else if (globalCurrentView.length === 1) {
// entry outside viewing window, but entry is the only element
toRemove = previouslyActive;
} else {
// entry outside viewing window, active entry now out of view
if (previouslyActive === sidebarIds.name) {
// entry being removed from view is currently active
removeActiveEntry(sidebarIds);
}
// always remove entry when out of view
globalCurrentView = globalCurrentView.filter(e => e !== sidebarIds.name);
}
}
/*
* for page jump its just easier to restart, so deactivate everything, clear
* the global view, then only update with whats in view
*/
function handlePageJump(entries: IntersectionObserverEntry[]) {
isSideBarClick = false;
toRemove = '';
updateSidebarView('', 'null');
// deactivate all of the entries
for (const entry of everyEntry) {
const id = entry.target.getAttribute('id');
const sidebarIds = getSidebarIdsFromId(id);
deactivateSidebar(sidebarIds);
}
// remove entries not in view, add entries that are in view
for (const entry of entries) {
const id = entry.target.getAttribute('id')!;
const sidebarIds = getSidebarIdsFromId(id);
if (!entry.isIntersecting) {
globalCurrentView = globalCurrentView.filter(e => e !== sidebarIds.name);
} else {
globalCurrentView.push(sidebarIds.name);
}
}
// sort current view
globalCurrentView.sort(function(nameA, nameB) {
if (order.get(nameA)! < order.get(nameB)!) {
return -1;
} else if (order.get(nameA)! > order.get(nameB)!) {
return 1;
} else {
return 0;
}
});
// update current view based on the current highest view
const sidebarIds = getSidebarIdsFromSidebarName(globalCurrentView[0]);
const prevSidebarIds = getSidebarIdsFromSidebarName(previouslyActive);
deactivateSidebar(prevSidebarIds);
activateSidebar(sidebarIds);
updateSidebarView('', sidebarIds.subcategory);
previouslyActive = sidebarIds.name;
}
let intersectionRatios = new Map<string, number>();
function handleExamples(entries: IntersectionObserverEntry[], _observer: any) {
if (isFirstOpen) {
everyEntry = entries;
isFirstOpen = false;
document.querySelector(`h3[id="active-container-sidebar"`)!.classList.add(
'active');
}
for (const entry of entries) {
const id = entry.target.getAttribute('id')!;
intersectionRatios.set(id, entry.intersectionRatio);
}
let maxRatio = 0;
let maxName = '';
for (const name of intersectionRatios.keys()) {
const ratio = intersectionRatios.get(name)!;
if (ratio > maxRatio) {
maxRatio = ratio;
maxName = name;
}
}
for (const entry of everyEntry) {
const id = entry.target.getAttribute('id')!;
const sidebarName = `container-${id}-sidebar`;
const sidebarElement = document.querySelector(`h4[id=${sidebarName}`);
if (sidebarElement == null) {
return;
}
if (id === maxName) {
sidebarElement.classList.add('active');
} else {
sidebarElement.classList.remove('active');
}
}
}
/*
* Update the table of contents based on how the page is viewed.
*/
export function sidebarObserver(docsOrExample: string) {
if (docsOrExample === 'docs') {
const observer = new IntersectionObserver(entries => {
if (isSideBarClick) { // sidebar click
handlePageJump(entries);
} else { // scroll
for (const htmlEntry of entries) {
handleHTMLEntry(htmlEntry);
}
}
if (isFirstOpen) { // page load
updateSidebarViewFirstTime(entries);
}
updateHeader();
});
// i.e. attributes, properties, events, methods, slots, custom css.
let orderIndex = 0;
document.querySelectorAll('div[id*="entrydocs"]').forEach((section) => {
const idSplitList = section.getAttribute('id')!.split('-');
const id = idSplitList.slice(1, 10).join('-');
order.set(id, orderIndex);
orderIndex += 1;
observer.observe(section);
});
} else {
const options = {
root: null,
rootMargin: '0px',
threshold: [0, 0.25, 0.5, 0.75, 1],
};
const observer = new IntersectionObserver(handleExamples, options);
document.querySelectorAll('div[class="demo"]').forEach((section) => {
observer.observe(section);
});
}
}
export function sidebarClick() {
isSideBarClick = true;
// close sidebar if click in sidebar on mobile
if (window.innerWidth <= 800) {
const root = document.documentElement;
root.style.setProperty('--sidebar-width', '0px');
}
}
(self as any).sidebarClick = sidebarClick; | the_stack |
import { TestProjectSpec } from '../framework/frameworkTestSupport';
import { VariablesWizard } from '../wizards/variables/variablesWizard';
import { LaunchProject } from '../fixtures/launchProject';
import { testUsing } from '../fixtures/testUsing';
// Scopes' kinds: 'global' | 'local' | 'with' | 'closure' | 'catch' | 'block' | 'script' | 'eval' | 'module'
// TODO: Test several scopes at the same time. They can be repeated, and the order does matter
suite('Variables scopes', function () {
testUsing('local', context => LaunchProject.create(context, TestProjectSpec.fromTestPath('variablesScopes/localScope')), async (launchProject) => {
await launchProject.pausedWizard.waitUntilPausedOnDebuggerStatement();
await new VariablesWizard(launchProject.debugClient).assertTopFrameVariablesAre({
local: `
this = Window (Object)
arguments = Arguments(0) [] (Object)
b = body {text: "", link: "", vLink: "", …} (Object)
bool = true (boolean)
buffer = ArrayBuffer(8) {} (Object)
buffView = Int32Array(2) [234, 0] (Object)
consoleDotLog = function consoleDotLog(m) { … } (Function)
e = Error: hi (Object)
element = body {text: "", link: "", vLink: "", …} (Object)
fn = () => { … } (Function)
fn2 = function () { … } (Function)
globalCode = "page loaded" (string)
inf = Infinity (number)
infStr = "Infinity" (string)
longStr = "this is a\nstring with\nnewlines" (string)
m = Map(1) {} (Object)
manyPropsObj = Object {0: 1, 1: 3, 2: 5, …} (Object)
myVar = Object {num: 1, str: "Global", obj: Object, …} (Object)
nan = NaN (number)
obj = Object {a: 2, thing: <accessor>} (Object)
qqq = undefined (undefined)
r = /^asdf.*$/g {lastIndex: 0} (Object)
s = Symbol(hi) (symbol)
str = "hello" (string)
xyz = 4 (number)`}
);
});
testUsing('globals', context => LaunchProject.create(context, TestProjectSpec.fromTestPath('variablesScopes/globalScope')), async (launchProject) => {
await launchProject.pausedWizard.waitUntilPausedOnDebuggerStatement();
await new VariablesWizard(launchProject.debugClient).assertNewGlobalVariariablesAre(async () => {
await launchProject.pausedWizard.resume();
await launchProject.pausedWizard.waitUntilPausedOnDebuggerStatement();
},
// The variables declared with const, and let aren't global variables so they won't appear here
`
b = body {text: "", link: "", vLink: "", …} (Object)
bool = true (boolean)
buffer = ArrayBuffer(8) {} (Object)
buffView = Int32Array(2) [234, 0] (Object)
consoleDotLog = function consoleDotLog(m) { … } (Function)
e = Error: hi (Object)
element = p {align: "", title: "", lang: "", …} (Object)
evalVar1 = 16 (number)
evalVar2 = "sdlfk" (string)
evalVar3 = Array(3) [1, 2, 3] (Object)
fn = () => { … } (Function)
fn2 = function () { … } (Function)
globalCode = "page loaded" (string)
i = 101 (number)
inf = Infinity (number)
infStr = "Infinity" (string)
longStr = "this is a\nstring with\nnewlines" (string)
m = Map(1) {} (Object)
manyPropsObj = Object {0: 1, 1: 3, 2: 5, …} (Object)
myVar = Object {num: 1, str: "Global", obj: Object, …} (Object)
nan = NaN (number)
obj = Object {a: 2, thing: <accessor>} (Object)
qqq = undefined (undefined)
r = /^asdf.*$/g {lastIndex: 0} (Object) // TODO: This and other types seems wrong. Investigate
s = Symbol(hi) (symbol)
str = "hello" (string)
xyz = 4 (number)`);
});
testUsing('script', context => LaunchProject.create(context, TestProjectSpec.fromTestPath('variablesScopes/scriptScope')), async (launchProject) => {
await launchProject.pausedWizard.waitUntilPausedOnDebuggerStatement();
await new VariablesWizard(launchProject.debugClient).assertTopFrameVariablesAre({
script: `
this = Window (Object)
b = body {text: "", link: "", vLink: "", …} (Object)
bool = true (boolean)
buffer = ArrayBuffer(8) {} (Object)
buffView = Int32Array(2) [234, 0] (Object)
e = Error: hi (Object)
element = body {text: "", link: "", vLink: "", …} (Object)
fn = () => { … } (Function)
fn2 = function () { … } (Function)
globalCode = "page loaded" (string)
inf = Infinity (number)
infStr = "Infinity" (string)
longStr = "this is a\nstring with\nnewlines" (string)
m = Map(1) {} (Object)
manyPropsObj = Object {0: 1, 1: 3, 2: 5, …} (Object)
myVar = Object {num: 1, str: "Global", obj: Object, …} (Object)
nan = NaN (number)
obj = Object {a: 2, thing: <accessor>} (Object)
qqq = undefined (undefined)
r = /^asdf.*$/g {lastIndex: 0} (Object)
s = Symbol(hi) (symbol)
str = "hello" (string)
xyz = 4 (number)`}
);
});
testUsing('block', context => LaunchProject.create(context, TestProjectSpec.fromTestPath('variablesScopes/blockScope')), async (launchProject) => {
await launchProject.pausedWizard.waitUntilPausedOnDebuggerStatement();
await new VariablesWizard(launchProject.debugClient).assertTopFrameVariablesAre(
{
block: `
this = Window (Object)
b = body {text: "", link: "", vLink: "", …} (Object)
bool = true (boolean)
buffer = ArrayBuffer(8) {} (Object)
buffView = Int32Array(2) [234, 0] (Object)
consoleDotLog = function consoleDotLog(m) { … } (Function)
e = Error: hi (Object)
element = body {text: "", link: "", vLink: "", …} (Object)
fn = () => { … } (Function)
fn2 = function () { … } (Function)
globalCode = "page loaded" (string)
inf = Infinity (number)
infStr = "Infinity" (string)
longStr = "this is a\nstring with\nnewlines" (string)
m = Map(1) {} (Object)
manyPropsObj = Object {0: 1, 1: 3, 2: 5, …} (Object)
myVar = Object {num: 1, str: "Global", obj: Object, …} (Object)
nan = NaN (number)
obj = Object {a: 2, thing: <accessor>} (Object)
qqq = undefined (undefined)
r = /^asdf.*$/g {lastIndex: 0} (Object)
s = Symbol(hi) (symbol)
str = "hello" (string)
xyz = 4 (number)`
}
);
});
testUsing('catch', context => LaunchProject.create(context, TestProjectSpec.fromTestPath('variablesScopes/catchScope')), async (launchProject) => {
await launchProject.pausedWizard.waitUntilPausedOnDebuggerStatement();
await new VariablesWizard(launchProject.debugClient).assertTopFrameVariablesAre({
catch: `
exception = Error: Something went wrong (Object)`}
);
});
testUsing('closure', context => LaunchProject.create(context, TestProjectSpec.fromTestPath('variablesScopes/closureScope')), async (launchProject) => {
await launchProject.pausedWizard.waitUntilPausedOnDebuggerStatement();
await new VariablesWizard(launchProject.debugClient).assertTopFrameVariablesAre({
closure: `
arguments = Arguments(0) [] (Object)
b = body {text: "", link: "", vLink: "", …} (Object)
bool = true (boolean)
buffer = ArrayBuffer(8) {} (Object)
buffView = Int32Array(2) [234, 0] (Object)
consoleDotLog = function consoleDotLog(m) { … } (Function)
e = Error: hi (Object)
element = body {text: "", link: "", vLink: "", …} (Object)
fn = () => { … } (Function)
fn2 = function () { … } (Function)
globalCode = "page loaded" (string)
inf = Infinity (number)
infStr = "Infinity" (string)
longStr = "this is a\nstring with\nnewlines" (string)
m = Map(1) {} (Object)
manyPropsObj = Object {0: 1, 1: 3, 2: 5, …} (Object)
myVar = Object {num: 1, str: "Global", obj: Object, …} (Object)
nan = NaN (number)
obj = Object {a: 2, thing: <accessor>} (Object)
pauseInside = function pauseInside() { … } (Function)
qqq = undefined (undefined)
r = /^asdf.*$/g {lastIndex: 0} (Object)
s = Symbol(hi) (symbol)
str = "hello" (string)
xyz = 4 (number)`}
);
});
testUsing('eval', context => LaunchProject.create(context, TestProjectSpec.fromTestPath('variablesScopes/evalScope')), async (launchProject) => {
await launchProject.pausedWizard.waitUntilPausedOnDebuggerStatement();
await new VariablesWizard(launchProject.debugClient).assertTopFrameVariablesAre({
eval: `
this = Window (Object)
b = body {text: "", link: "", vLink: "", …} (Object)
bool = true (boolean)
buffer = ArrayBuffer(8) {} (Object)
buffView = Int32Array(2) [234, 0] (Object)
e = Error: hi (Object)
element = body {text: "", link: "", vLink: "", …} (Object)
fn = () => { … } (Function)
fn2 = function () { … } (Function)
globalCode = "page loaded" (string)
inf = Infinity (number)
infStr = "Infinity" (string)
longStr = "this is a\nstring with\nnewlines" (string)
m = Map(1) {} (Object)
manyPropsObj = Object {0: 1, 1: 3, 2: 5, …} (Object)
myVar = Object {num: 1, str: "Global", obj: Object, …} (Object)
nan = NaN (number)
obj = Object {a: 2, thing: <accessor>} (Object)
qqq = undefined (undefined)
r = /^asdf.*$/g {lastIndex: 0} (Object)
s = Symbol(hi) (symbol)
str = "hello" (string)
xyz = 4 (number)`}
);
});
testUsing('with', context => LaunchProject.create(context, TestProjectSpec.fromTestPath('variablesScopes/withScope')), async (launchProject) => {
await launchProject.pausedWizard.waitUntilPausedOnDebuggerStatement();
await new VariablesWizard(launchProject.debugClient).assertTopFrameVariablesAre({
with: `
this = Window (Object)
b = body {text: "", link: "", vLink: "", …} (Object)
bool = true (boolean)
buffer = ArrayBuffer(8) {} (Object)
buffView = Int32Array(2) [234, 0] (Object)
consoleDotLog = function (m) { … } (Function)
e = Error: hi (Object)
element = body {text: "", link: "", vLink: "", …} (Object)
evalVar1 = 16 (number)
evalVar2 = "sdlfk" (string)
evalVar3 = Array(3) [1, 2, 3] (Object)
fn = () => { … } (Function)
fn2 = function () { … } (Function)
globalCode = "page loaded" (string)
i = 101 (number)
inf = Infinity (number)
infStr = "Infinity" (string)
longStr = "this is a
string with
newlines" (string)
m = Map(1) {} (Object)
manyPropsObj = Object {0: 1, 1: 3, 2: 5, …} (Object)
myVar = Object {num: 1, str: "Global", obj: Object, …} (Object)
nan = NaN (number)
obj = Object {a: 2, thing: <accessor>} (Object)
r = /^asdf.*$/g {lastIndex: 0} (Object)
s = Symbol(hi) (symbol)
str = "hello" (string)
xyz = 4 (number)
__proto__ = Object {constructor: , __defineGetter__: , __defineSetter__: , …} (Object)`
});
});
testUsing('module', context => LaunchProject.create(context, TestProjectSpec.fromTestPath('variablesScopes/moduleScope')), async (launchProject) => {
await launchProject.pausedWizard.waitUntilPausedOnDebuggerStatement();
await new VariablesWizard(launchProject.debugClient).assertTopFrameVariablesAre({
module: `
this = undefined (undefined)
b = body {text: "", link: "", vLink: "", …} (Object)
bool = true (boolean)
buffer = ArrayBuffer(8) {} (Object)
buffView = Int32Array(2) [234, 0] (Object)
consoleDotLog = function consoleDotLog(m2) { … } (Function)
e = Error: hi (Object)
element = body {text: "", link: "", vLink: "", …} (Object)
fn = () => { … } (Function)
fn2 = function (param) { … } (Function)
globalCode = "page loaded" (string)
inf = Infinity (number)
infStr = "Infinity" (string)
longStr = "this is a
string with
newlines" (string)
m = Map(1) {} (Object)
manyPropsObj = Object {0: 1, 1: 3, 2: 5, …} (Object)
myVar = Object {num: 1, str: "Global", obj: Object, …} (Object)
nan = NaN (number)
obj = Object {a: 2, thing: <accessor>} (Object)
qqq = undefined (undefined)
r = /^asdf.*$/g {lastIndex: 0} (Object)
s = Symbol(hi) (symbol)
str = "hello" (string)
xyz = 4 (number)`
});
});
}); | the_stack |
import { when } from 'jest-when';
import { KVStore } from '@liskhq/lisk-db';
import { codec } from '@liskhq/lisk-codec';
import { Block, Chain } from '@liskhq/lisk-chain';
import { BFT } from '@liskhq/lisk-bft';
import { getAddressFromPublicKey, getRandomBytes } from '@liskhq/lisk-cryptography';
import { FastChainSwitchingMechanism, Errors } from '../../../../../src/node/synchronizer';
import { Processor } from '../../../../../src/node/processor';
import { constants } from '../../../../utils';
import {
defaultNetworkIdentifier,
genesisBlock as getGenesisBlock,
createValidDefaultBlock,
createFakeBlockHeader,
defaultAccountSchema,
} from '../../../../fixtures';
import { TokenModule } from '../../../../../src/modules';
import {
getHighestCommonBlockRequestSchema,
getHighestCommonBlockResponseSchema,
} from '../../../../../src/node/transport/schemas';
const { InMemoryChannel: ChannelMock } = jest.createMockFromModule(
'../../../../../src/controller/channels/in_memory_channel',
);
jest.mock('@liskhq/lisk-db');
describe('fast_chain_switching_mechanism', () => {
const genesisBlock = getGenesisBlock();
const finalizedHeight = genesisBlock.header.height + 1;
let finalizedBlock: Block;
let lastBlock: Block;
let bftModule: any;
let chainModule: any;
let processorModule: any;
let fastChainSwitchingMechanism: FastChainSwitchingMechanism;
let channelMock: any;
let loggerMock: any;
let networkMock: any;
let dataAccessMock;
beforeEach(() => {
loggerMock = {
info: jest.fn(),
debug: jest.fn(),
error: jest.fn(),
trace: jest.fn(),
};
networkMock = {
requestFromPeer: jest.fn(),
applyPenaltyOnPeer: jest.fn(),
};
channelMock = new ChannelMock();
const blockchainDB = new KVStore('blockchain.db');
chainModule = new Chain({
networkIdentifier: defaultNetworkIdentifier,
db: blockchainDB,
genesisBlock,
accountSchemas: defaultAccountSchema,
maxPayloadLength: constants.maxPayloadLength,
rewardDistance: constants.rewards.distance,
rewardOffset: constants.rewards.offset,
rewardMilestones: constants.rewards.milestones,
blockTime: constants.blockTime,
minFeePerByte: constants.minFeePerByte,
baseFees: constants.baseFees,
});
chainModule['_numberOfValidators'] = 103;
dataAccessMock = {
getConsensusState: jest.fn(),
setConsensusState: jest.fn(),
getTempBlocks: jest.fn(),
clearTempBlocks: jest.fn(),
getBlockHeadersWithHeights: jest.fn(),
getBlockByID: jest.fn(),
getBlockHeaderByHeight: jest.fn(),
getBlockHeaderByID: jest.fn(),
getLastBlock: jest.fn(),
getBlockHeadersByHeightBetween: jest.fn(),
addBlockHeader: jest.fn(),
getLastBlockHeader: jest.fn(),
decode: chainModule.dataAccess.decode.bind(chainModule.dataAccess),
decodeBlockHeader: chainModule.dataAccess.decodeBlockHeader.bind(chainModule.dataAccess),
encodeBlockHeader: chainModule.dataAccess.encodeBlockHeader.bind(chainModule.dataAccess),
decodeTransaction: chainModule.dataAccess.decodeTransaction.bind(chainModule.dataAccess),
};
chainModule.dataAccess = dataAccessMock;
bftModule = new BFT({
chain: chainModule,
threshold: constants.bftThreshold,
genesisHeight: genesisBlock.header.height,
});
Object.defineProperty(bftModule, 'finalizedHeight', {
get: jest.fn(() => finalizedHeight),
});
processorModule = new Processor({
channel: channelMock,
chainModule,
logger: loggerMock,
bftModule,
});
processorModule.processValidated = jest.fn().mockImplementation(block => {
chainModule._lastBlock = block;
});
processorModule.validate = jest.fn();
processorModule.deleteLastBlock = jest.fn();
processorModule.register(new TokenModule(constants));
fastChainSwitchingMechanism = new FastChainSwitchingMechanism({
logger: loggerMock,
channel: channelMock,
chain: chainModule,
bft: bftModule,
processor: processorModule,
networkModule: networkMock,
});
});
describe('isValidFor', () => {
const defaultGenerator = {
address: Buffer.from('76986142c56e589a35ac2a78c64f6cc4d5df2d28', 'hex'),
publicKey: Buffer.from(
'20d381308d9a809455567af249dddd68bd2e23753e69913961fe04ac07732594',
'hex',
),
};
beforeEach(() => {
jest.spyOn(chainModule, 'getValidators');
chainModule._lastBlock = { header: { height: 310 } };
});
describe('when receivedBlock is within the two rounds of the last block', () => {
it('should return true when the receivedBlock is from consensus participant', async () => {
chainModule.getValidators.mockResolvedValue([
{
address: getAddressFromPublicKey(defaultGenerator.publicKey),
isConsensusParticipant: true,
},
]);
const isValid = await fastChainSwitchingMechanism.isValidFor(
{
header: {
generatorPublicKey: defaultGenerator.publicKey,
height: 515,
},
} as Block,
'peer-id',
);
expect(isValid).toEqual(true);
});
it('should return true when the receivedBlock is not from consensus participant', async () => {
chainModule.getValidators.mockResolvedValue([
{
address: getAddressFromPublicKey(defaultGenerator.publicKey),
isConsensusParticipant: false,
},
]);
const isValid = await fastChainSwitchingMechanism.isValidFor(
{
header: {
generatorPublicKey: defaultGenerator.publicKey,
height: 515,
},
} as Block,
'peer-id',
);
expect(isValid).toEqual(false);
});
it('should return true when the receivedBlock is not current validator', async () => {
chainModule.getValidators.mockResolvedValue([
{ address: getRandomBytes(20), isConsensusParticipant: false },
]);
const isValid = await fastChainSwitchingMechanism.isValidFor(
{
header: {
generatorPublicKey: defaultGenerator.publicKey,
height: 515,
},
} as Block,
'peer-id',
);
expect(isValid).toEqual(false);
});
});
describe('when receivedBlock is not within two rounds of the last block', () => {
it('should return false even when the block is from consensus participant', async () => {
chainModule.getValidators.mockResolvedValue([
{
address: getAddressFromPublicKey(defaultGenerator.publicKey),
isConsensusParticipant: true,
},
]);
const isValid = await fastChainSwitchingMechanism.isValidFor(
{
header: {
generatorPublicKey: defaultGenerator.publicKey,
height: 619,
},
} as Block,
'peer-id',
);
expect(isValid).toEqual(false);
});
});
});
describe('async run()', () => {
const aPeerId = '127.0.0.1:5000';
let aBlock: Block;
const checkApplyPenaltyAndAbortIsCalled = (peerId: string, err: any) => {
expect(loggerMock.info).toHaveBeenCalledWith(
{ peerId, reason: err.reason },
'Applying penalty to peer and restarting synchronizer',
);
expect(networkMock.applyPenaltyOnPeer).toHaveBeenCalledWith({
peerId,
penalty: 100,
});
};
const checkIfAbortIsCalled = (error: any) => {
expect(loggerMock.info).toHaveBeenCalledWith(
{
err: error,
reason: error.reason,
},
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Aborting synchronization mechanism with reason: ${error.reason}`,
);
};
beforeEach(async () => {
finalizedBlock = await createValidDefaultBlock({
header: { height: finalizedHeight },
});
aBlock = await createValidDefaultBlock();
// chainModule.init will check whether the genesisBlock in storage matches the genesisBlock in
// memory. The following mock fakes this to be true
// chainModule.init will load the last block from storage and store it in ._lastBlock variable. The following mock
// simulates the last block in storage. So the storage has 2 blocks, the genesis block + a new one.
lastBlock = await createValidDefaultBlock({
header: { height: finalizedHeight + 1 },
});
jest.spyOn(chainModule, 'getValidators').mockResolvedValue([
{
address: getAddressFromPublicKey(aBlock.header.generatorPublicKey),
isConsensusParticipant: true,
},
]);
chainModule._lastBlock = lastBlock;
when(chainModule.dataAccess.getBlockHeaderByID)
.calledWith(genesisBlock.header.id)
.mockResolvedValue(genesisBlock.header as never);
when(chainModule.dataAccess.getBlockHeaderByID)
.calledWith(finalizedBlock.header.id)
.mockResolvedValue(finalizedBlock.header as never);
when(chainModule.dataAccess.getLastBlock)
.calledWith()
.mockResolvedValue(lastBlock as never);
when(chainModule.dataAccess.getBlockHeadersByHeightBetween)
.calledWith(genesisBlock.header.height, lastBlock.header.height)
.mockResolvedValue([genesisBlock, finalizedBlock, lastBlock] as never);
when(chainModule.dataAccess.addBlockHeader)
.calledWith(lastBlock)
.mockResolvedValue([] as never);
when(chainModule.dataAccess.getLastBlockHeader)
.calledWith()
.mockResolvedValue(lastBlock as never);
when(chainModule.dataAccess.getBlockHeadersWithHeights)
.calledWith([2, 1])
.mockResolvedValue([genesisBlock.header, lastBlock.header] as never);
// Simulate finalized height stored in ConsensusState table is 0
jest.spyOn(fastChainSwitchingMechanism, '_queryBlocks' as never);
jest.spyOn(fastChainSwitchingMechanism, '_switchChain' as never);
jest.spyOn(fastChainSwitchingMechanism, '_validateBlocks' as never);
await chainModule.init(genesisBlock);
});
describe('when fail to request the common block', () => {
it('should give up after trying 10 times, apply penalty and restart the mechanism', async () => {
// Arrange
const storageReturnValue = [
{
id: genesisBlock.header.id,
},
{
id: chainModule.lastBlock.header.id,
},
];
const blockIds = codec.encode(getHighestCommonBlockRequestSchema, {
ids: storageReturnValue.map(blocks => blocks.id),
});
// Simulate peer not sending back a common block
when(networkMock.requestFromPeer)
.calledWith({
procedure: 'getHighestCommonBlock',
peerId: aPeerId,
data: blockIds,
})
.mockResolvedValue({
data: codec.encode(getHighestCommonBlockResponseSchema, { id: Buffer.alloc(0) }),
} as never);
// Act
try {
await fastChainSwitchingMechanism.run(aBlock, aPeerId);
} catch (err) {
// Expected Error
}
// Assert
expect(networkMock.requestFromPeer).toHaveBeenCalledTimes(9);
expect(networkMock.applyPenaltyOnPeer).toHaveBeenCalledTimes(1);
checkApplyPenaltyAndAbortIsCalled(
aPeerId,
new Errors.ApplyPenaltyAndAbortError(aPeerId, "Peer didn't return a common block"),
);
});
});
describe('given that the highest common block is found', () => {
it('should apply penalty to the peer and restart syncing mechanisms if the height of the common block is smaller than the finalized height', async () => {
// Arrange
const storageReturnValue = [
{
id: genesisBlock.header.id,
},
{
id: chainModule.lastBlock.header.id,
},
];
const blockIds = codec.encode(getHighestCommonBlockRequestSchema, {
ids: storageReturnValue.map(blocks => blocks.id),
});
// height of the common block is smaller than the finalized height:
const highestCommonBlock = createFakeBlockHeader({
height: bftModule.finalizedHeight - 1,
});
when(networkMock.requestFromPeer)
.calledWith({
procedure: 'getHighestCommonBlock',
peerId: aPeerId,
data: blockIds,
})
.mockResolvedValue({
data: codec.encode(getHighestCommonBlockResponseSchema, { id: highestCommonBlock.id }),
} as never);
when(chainModule.dataAccess.getBlockHeaderByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue(highestCommonBlock as never);
// Act
try {
await fastChainSwitchingMechanism.run(aBlock, aPeerId);
} catch (err) {
// Expected error
}
// Assert
checkApplyPenaltyAndAbortIsCalled(
aPeerId,
new Errors.ApplyPenaltyAndAbortError(
aPeerId,
'Common block height 0 is lower than the finalized height of the chain 1',
),
);
expect(fastChainSwitchingMechanism['_queryBlocks']).toHaveBeenCalledWith(
aBlock,
highestCommonBlock,
aPeerId,
);
});
it('should abort the syncing mechanism if the difference in height between the common block and the received block is > delegatesPerRound*2', async () => {
// Arrange
const storageReturnValue = [
{
id: genesisBlock.header.id,
},
{
id: chainModule.lastBlock.header.id,
},
];
// Common block between system and peer corresponds to last block in system (To make things easier)
const highestCommonBlock = createFakeBlockHeader({
height: chainModule.lastBlock.header.height,
});
const blockIds = codec.encode(getHighestCommonBlockRequestSchema, {
ids: storageReturnValue.map(blocks => blocks.id),
});
when(networkMock.requestFromPeer)
.calledWith({
procedure: 'getHighestCommonBlock',
peerId: aPeerId,
data: blockIds,
})
.mockResolvedValue({
data: codec.encode(getHighestCommonBlockResponseSchema, { id: highestCommonBlock.id }),
} as never);
when(chainModule.dataAccess.getBlockHeaderByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue(highestCommonBlock as never);
// Act
// the difference in height between the common block and the received block is > delegatesPerRound*2
const receivedBlock = await createValidDefaultBlock({
header: {
height: highestCommonBlock.height + chainModule.numberOfValidators * 2 + 1,
},
});
await fastChainSwitchingMechanism.run(receivedBlock, aPeerId);
// Assert
checkIfAbortIsCalled(
new Errors.AbortError(
`Height difference between both chains is higher than ${
chainModule.numberOfValidators * 2
}`,
),
);
expect(fastChainSwitchingMechanism['_queryBlocks']).toHaveBeenCalledWith(
receivedBlock,
highestCommonBlock,
aPeerId,
);
});
it('should abort the syncing mechanism if the difference in height between the common block and the last block is > delegatesPerRound*2', async () => {
// Arrange
const highestCommonBlock = createFakeBlockHeader({
height: lastBlock.header.height + 1,
});
// Difference in height between the common block and the last block is > delegatesPerRound*2
lastBlock = await createValidDefaultBlock({
header: {
height: highestCommonBlock.height + chainModule.numberOfValidators * 2 + 1,
},
});
when(chainModule.dataAccess.getBlockHeaderByHeight)
.calledWith(1)
.mockResolvedValue(genesisBlock.header as never);
when(chainModule.dataAccess.getLastBlock)
.calledWith()
.mockResolvedValue(lastBlock as never);
when(chainModule.dataAccess.getBlockHeadersByHeightBetween)
.calledWith(
expect.objectContaining({
fromHeight: expect.any(Number),
toHeight: expect.any(Number),
}),
)
.mockResolvedValue([lastBlock] as never);
when(chainModule.dataAccess.addBlockHeader)
.calledWith(lastBlock)
.mockResolvedValue([lastBlock] as never);
when(chainModule.dataAccess.getLastBlockHeader)
.calledWith()
.mockResolvedValue(lastBlock as never);
when(chainModule.dataAccess.getBlockHeadersWithHeights)
.calledWith([2, 1])
.mockResolvedValue([genesisBlock.header, lastBlock.header] as never);
when(chainModule.dataAccess.getBlockHeadersByHeightBetween)
.calledWith(1, 205)
.mockResolvedValue([lastBlock] as never);
const heightList = new Array(
Math.min(chainModule.numberOfValidators * 2, chainModule.lastBlock.header.height),
)
.fill(0)
.map((_, index) => chainModule.lastBlock.header.height - index);
const storageReturnValue = heightList.map(height => createFakeBlockHeader({ height }));
when(chainModule.dataAccess.getBlockHeadersWithHeights)
.calledWith(heightList)
.mockResolvedValue(storageReturnValue as never);
const blockIds = codec.encode(getHighestCommonBlockRequestSchema, {
ids: storageReturnValue.map(blocks => blocks.id),
});
when(networkMock.requestFromPeer)
.calledWith({
procedure: 'getHighestCommonBlock',
peerId: aPeerId,
data: blockIds,
})
.mockResolvedValue({
data: codec.encode(getHighestCommonBlockResponseSchema, { id: highestCommonBlock.id }),
} as never);
when(chainModule.dataAccess.getBlockHeaderByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue(highestCommonBlock as never);
// Act
const receivedBlock = await createValidDefaultBlock({
header: {
height: highestCommonBlock.height + chainModule.numberOfValidators * 2 + 1,
},
});
await fastChainSwitchingMechanism.run(receivedBlock, aPeerId);
// Assert
checkIfAbortIsCalled(
new Errors.AbortError(
`Height difference between both chains is higher than ${
chainModule.numberOfValidators * 2
}`,
),
);
expect(fastChainSwitchingMechanism['_queryBlocks']).toHaveBeenCalledWith(
receivedBlock,
highestCommonBlock,
aPeerId,
);
});
});
describe('request and validate blocks', () => {
it('should retry to request blocks for 10 times then apply penalty and restart', async () => {
// Arrange
const storageReturnValue = [
{
id: genesisBlock.header.id,
},
{
id: finalizedBlock.header.id,
},
{
id: chainModule.lastBlock.header.id,
},
];
const highestCommonBlock = createFakeBlockHeader({
height: finalizedBlock.header.height,
});
when(chainModule.dataAccess.getBlockHeadersWithHeights)
.calledWith([2, 1])
.mockResolvedValue(storageReturnValue as never);
const blockIds = codec.encode(getHighestCommonBlockRequestSchema, {
ids: storageReturnValue.map(blocks => blocks.id),
});
when(networkMock.requestFromPeer)
.calledWith({
procedure: 'getHighestCommonBlock',
peerId: aPeerId,
data: blockIds,
})
.mockResolvedValue({
data: codec.encode(getHighestCommonBlockResponseSchema, { id: highestCommonBlock.id }),
} as never)
.calledWith({
procedure: 'getBlocksFromId',
peerId: aPeerId,
data: expect.anything(),
})
.mockRejectedValue(new Error('Invalid connection') as never);
when(chainModule.dataAccess.getBlockHeaderByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue(highestCommonBlock as never);
// Act
await fastChainSwitchingMechanism.run(aBlock, aPeerId);
// Assert
checkApplyPenaltyAndAbortIsCalled(
aPeerId,
new Errors.ApplyPenaltyAndAbortError(
aPeerId,
`Peer didn't return any requested block within IDs ${highestCommonBlock.id.toString(
'hex',
)} and ${aBlock.header.id.toString('hex')}`,
),
);
// 10 times with getBlocksFromId and 1 time with getHighestCommonBlock
expect(networkMock.requestFromPeer).toHaveBeenCalledTimes(11);
});
it('should request blocks within a range of IDs [commonBlock.id <-> receivedBlock.id] and validate them', async () => {
// Arrange
const storageReturnValue = [
{
id: genesisBlock.header.id,
},
{
id: finalizedBlock.header.id,
},
{
id: chainModule.lastBlock.header.id,
},
];
const highestCommonBlock = createFakeBlockHeader({
height: finalizedBlock.header.height,
});
const requestedBlocks = [
await createValidDefaultBlock({
header: {
height: highestCommonBlock.height + 1,
previousBlockID: highestCommonBlock.id,
},
}),
...(await Promise.all(new Array(34).fill(0).map(async () => createValidDefaultBlock()))),
aBlock,
];
fastChainSwitchingMechanism['_requestBlocksWithinIDs'] = jest
.fn()
.mockResolvedValue(requestedBlocks);
when(chainModule.dataAccess.getBlockHeadersWithHeights)
.calledWith([2, 1])
.mockResolvedValue(storageReturnValue as never);
const blockIds = codec.encode(getHighestCommonBlockRequestSchema, {
ids: storageReturnValue.map(blocks => blocks.id),
});
when(networkMock.requestFromPeer)
.calledWith({
procedure: 'getHighestCommonBlock',
peerId: aPeerId,
data: blockIds,
})
.mockResolvedValue({
data: codec.encode(getHighestCommonBlockResponseSchema, { id: highestCommonBlock.id }),
} as never);
when(chainModule.dataAccess.getBlockHeaderByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue(highestCommonBlock as never);
when(processorModule.deleteLastBlock)
.calledWith({
saveTempBlock: true,
})
.mockImplementation(() => {
chainModule._lastBlock = genesisBlock;
});
when(chainModule.dataAccess.getBlockByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue({
header: highestCommonBlock,
payload: [],
} as never);
// Act
await fastChainSwitchingMechanism.run(aBlock, aPeerId);
// Assert
for (const block of requestedBlocks) {
expect(processorModule.validate).toHaveBeenCalledWith(block);
expect(loggerMock.trace).toHaveBeenCalledWith(
{ blockId: block.header.id, height: block.header.height },
'Validating block',
);
}
expect(loggerMock.debug).toHaveBeenCalledWith('Successfully validated blocks');
expect(fastChainSwitchingMechanism['_validateBlocks']).toHaveBeenCalledWith(
requestedBlocks,
aPeerId,
);
});
it('should apply penalty and abort if any of the blocks fail to validate', async () => {
// Arrange
const storageReturnValue = [
{
id: genesisBlock.header.id,
},
{
id: chainModule.lastBlock.header.id,
},
];
const highestCommonBlock = createFakeBlockHeader({
height: finalizedBlock.header.height,
});
const requestedBlocks = [
await createValidDefaultBlock({
header: {
height: highestCommonBlock.height + 1,
previousBlockID: highestCommonBlock.id,
},
}),
...(await Promise.all(new Array(34).fill(0).map(async () => createValidDefaultBlock()))),
aBlock,
];
fastChainSwitchingMechanism['_requestBlocksWithinIDs'] = jest
.fn()
.mockResolvedValue(requestedBlocks);
const blockIds = codec.encode(getHighestCommonBlockRequestSchema, {
ids: storageReturnValue.map(blocks => blocks.id),
});
when(networkMock.requestFromPeer)
.calledWith({
procedure: 'getHighestCommonBlock',
peerId: aPeerId,
data: blockIds,
})
.mockResolvedValue({
data: codec.encode(getHighestCommonBlockResponseSchema, { id: highestCommonBlock.id }),
} as never);
when(chainModule.dataAccess.getBlockHeaderByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue(highestCommonBlock as never);
processorModule.validate.mockImplementation(() => {
throw new Error('validation error');
});
// Act
try {
await fastChainSwitchingMechanism.run(aBlock, aPeerId);
} catch (err) {
// Expected error
}
// Assert
checkApplyPenaltyAndAbortIsCalled(
aPeerId,
new Errors.ApplyPenaltyAndAbortError(aPeerId, 'Block validation failed'),
);
expect(fastChainSwitchingMechanism['_validateBlocks']).toHaveBeenCalledWith(
requestedBlocks,
aPeerId,
);
});
});
describe('switch to a different chain', () => {
it('should switch to a different chain (apply list of blocks returned by the peer) and cleanup blocks temp table', async () => {
// Arrange
const storageReturnValue = [
{
id: genesisBlock.header.id,
},
{ id: finalizedBlock.header.id },
{
id: chainModule.lastBlock.header.id,
},
];
const highestCommonBlock = createFakeBlockHeader({
height: finalizedBlock.header.height,
});
const requestedBlocks = [
await createValidDefaultBlock({
header: {
height: highestCommonBlock.height + 1,
previousBlockID: highestCommonBlock.id,
},
}),
...(await Promise.all(new Array(34).fill(0).map(async () => createValidDefaultBlock()))),
aBlock,
];
fastChainSwitchingMechanism['_requestBlocksWithinIDs'] = jest
.fn()
.mockResolvedValue(requestedBlocks);
const blockIds = codec.encode(getHighestCommonBlockRequestSchema, {
ids: storageReturnValue.map(blocks => blocks.id),
});
when(networkMock.requestFromPeer)
.calledWith({
procedure: 'getHighestCommonBlock',
peerId: aPeerId,
data: blockIds,
})
.mockResolvedValue({
data: codec.encode(getHighestCommonBlockResponseSchema, { id: highestCommonBlock.id }),
} as never);
when(chainModule.dataAccess.getBlockHeaderByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue(highestCommonBlock as never);
when(processorModule.deleteLastBlock)
.calledWith({
saveTempBlock: true,
})
.mockImplementation(() => {
chainModule._lastBlock = genesisBlock;
});
when(chainModule.dataAccess.getBlockHeadersWithHeights)
.calledWith([2, 1])
.mockResolvedValue(storageReturnValue as never);
when(chainModule.dataAccess.getBlockByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue({
header: highestCommonBlock,
payload: [],
} as never);
// Act
await fastChainSwitchingMechanism.run(aBlock, aPeerId);
// Assert
expect(fastChainSwitchingMechanism['_switchChain']).toHaveBeenCalledWith(
highestCommonBlock,
requestedBlocks,
aPeerId,
);
expect(loggerMock.info).toHaveBeenCalledWith('Switching chain');
expect(loggerMock.debug).toHaveBeenCalledWith(
{ height: highestCommonBlock.height },
`Deleting blocks after height ${highestCommonBlock.height}`,
);
expect(processorModule.deleteLastBlock).toHaveBeenCalledWith({
saveTempBlock: true,
});
expect(processorModule.deleteLastBlock).toHaveBeenCalledTimes(1);
expect(loggerMock.debug).toHaveBeenCalledWith(
{
blocks: requestedBlocks.map(block => ({
blockId: block.header.id,
height: block.header.height,
})),
},
'Applying blocks',
);
for (const block of requestedBlocks) {
expect(loggerMock.trace).toHaveBeenCalledWith(
{
blockId: block.header.id,
height: block.header.height,
},
'Applying blocks',
);
expect(processorModule.processValidated).toHaveBeenCalledWith(block);
expect(loggerMock.debug).toHaveBeenCalledWith('Cleaning blocks temp table');
expect(chainModule.dataAccess.clearTempBlocks).toHaveBeenCalled();
expect(loggerMock.info).toHaveBeenCalledWith(
{
currentHeight: chainModule.lastBlock.header.height,
highestCommonBlockHeight: highestCommonBlock.height,
},
'Successfully switched chains. Node is now up to date',
);
}
});
it('should delete blocks after highest common block height, restore blocks from temp table and cleanup temp table if any of the blocks returned by peer fails to apply', async () => {
// Arrange
const storageReturnValue = [
{
id: genesisBlock.header.id,
},
{
id: finalizedBlock.header.id,
},
{
id: chainModule.lastBlock.header.id,
},
];
const highestCommonBlock = createFakeBlockHeader({
height: finalizedBlock.header.height,
});
const requestedBlocks = [
await createValidDefaultBlock({
header: {
height: highestCommonBlock.height + 1,
previousBlockID: highestCommonBlock.id,
},
}),
...(await Promise.all(new Array(34).fill(0).map(async () => createValidDefaultBlock()))),
aBlock,
];
fastChainSwitchingMechanism['_requestBlocksWithinIDs'] = jest
.fn()
.mockResolvedValue(requestedBlocks);
const blockIds = codec.encode(getHighestCommonBlockRequestSchema, {
ids: storageReturnValue.map(blocks => blocks.id),
});
when(networkMock.requestFromPeer)
.calledWith({
procedure: 'getHighestCommonBlock',
peerId: aPeerId,
data: blockIds,
})
.mockResolvedValue({
data: codec.encode(getHighestCommonBlockResponseSchema, { id: highestCommonBlock.id }),
} as never);
when(chainModule.dataAccess.getBlockHeaderByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue(highestCommonBlock as never);
when(chainModule.dataAccess.getBlockHeadersWithHeights)
.calledWith([2, 1])
.mockResolvedValue(storageReturnValue as never);
when(chainModule.dataAccess.getBlockByID)
.calledWith(highestCommonBlock.id)
.mockResolvedValue(highestCommonBlock as never);
processorModule.deleteLastBlock.mockImplementation(async () => {
chainModule._lastBlock = await createValidDefaultBlock({
header: { height: chainModule._lastBlock.header.height - 1 },
});
});
const blocksInTempTable = [chainModule.lastBlock];
chainModule.dataAccess.getTempBlocks.mockResolvedValue(blocksInTempTable);
const processingError = new Errors.BlockProcessingError();
processorModule.processValidated.mockRejectedValueOnce(processingError);
// Act
try {
await fastChainSwitchingMechanism.run(aBlock, aPeerId);
} catch (err) {
// Expected error
}
// Assert
expect(fastChainSwitchingMechanism['_switchChain']).toHaveBeenCalledWith(
highestCommonBlock,
requestedBlocks,
aPeerId,
);
expect(processorModule.processValidated).toHaveBeenCalled();
expect(loggerMock.error).toHaveBeenCalledWith(
{ err: processingError },
'Error while processing blocks',
);
expect(loggerMock.debug).toHaveBeenCalledWith(
{
height: highestCommonBlock.height,
},
'Deleting blocks after height',
);
expect(loggerMock.debug).toHaveBeenCalledWith('Restoring blocks from temporary table');
expect(loggerMock.debug).toHaveBeenCalledWith('Cleaning blocks temp table');
// Restore blocks from temp table:
expect(processorModule.processValidated).toHaveBeenCalledWith(blocksInTempTable[0], {
removeFromTempTable: true,
});
// Clear temp table:
expect(chainModule.dataAccess.clearTempBlocks).toHaveBeenCalled();
});
});
});
}); | the_stack |
import { Component } from "react"
import * as React from "react"
import { createFragmentContainer, graphql } from "react-relay"
import { Title } from "react-head"
import { Router, Match } from "found"
import {
Button,
Flex,
Join,
Message,
Text,
Spacer,
media,
} from "@artsy/palette"
import styled from "styled-components"
import { RouterLink } from "v2/System/Router/RouterLink"
import { TransactionDetailsSummaryItemFragmentContainer as TransactionDetailsSummaryItem } from "v2/Apps/Order/Components/TransactionDetailsSummaryItem"
import { TwoColumnLayout } from "v2/Apps/Order/Components/TwoColumnLayout"
import createLogger from "v2/Utils/logger"
import { ArtworkSummaryItemFragmentContainer as ArtworkSummaryItem } from "../../Components/ArtworkSummaryItem"
import { CreditCardSummaryItemFragmentContainer as CreditCardSummaryItem } from "../../Components/CreditCardSummaryItem"
import { ShippingSummaryItemFragmentContainer as ShippingSummaryItem } from "../../Components/ShippingSummaryItem"
import { SystemContextConsumer } from "v2/System/SystemContext"
import { Status_order } from "v2/__generated__/Status_order.graphql"
const logger = createLogger("Order/Routes/Status/index.tsx")
interface StatusPageConfig {
title: React.ReactNode
description: React.ReactNode
// default showTransactionSummary is true
showTransactionSummary?: boolean
}
interface ShipmentData {
shipperName: string | null
trackingId: string | null
trackingUrl: string | null
estimatedDelivery: string | null
}
export interface StatusProps {
order: Status_order
router: Router
match: Match
}
export class StatusRoute extends Component<StatusProps> {
getStatusCopy(): StatusPageConfig {
const {
displayState,
state,
requestedFulfillment,
mode,
stateReason,
stateExpiresAt,
} = this.props.order
const isOfferFlow = mode === "OFFER"
const isPickup = requestedFulfillment?.__typename === "CommercePickup"
const isArtaShipped: boolean =
requestedFulfillment?.__typename === "CommerceShipArta"
switch (displayState) {
case "SUBMITTED":
return isOfferFlow
? {
title: "Your offer has been submitted",
description: (
<>
The seller will respond to your offer by {stateExpiresAt}.
Keep in mind making an offer doesn’t guarantee you the work.
</>
),
}
: {
title: "Your order has been submitted",
description: (
<>
Thank you for your purchase. You will receive a confirmation
email by {stateExpiresAt}.{this.covidNote()}
</>
),
}
case "APPROVED":
return {
title: this.approvedTitle(isOfferFlow),
description: isPickup ? (
<>
Thank you for your purchase. A specialist will contact you within
2 business days to coordinate pickup.
{this.covidNote()}
</>
) : (
<>
Thank you for your purchase. You will be notified when the work
has shipped, typically within 5–7 business days.
{this.covidNote()}
</>
),
}
case "PROCESSING":
return {
title: this.approvedTitle(isOfferFlow),
description: (
<>
Thank you for your purchase. {this.deliverText()}More delivery
information will be available once your order ships.
{this.covidNote()}
</>
),
}
case "IN_TRANSIT":
return {
title: "Your order has shipped",
description: this.shipmentDescription(isArtaShipped, false),
}
case "FULFILLED": {
return isPickup
? {
title: "Your order has been picked up",
description: null,
}
: {
title: isArtaShipped
? "Your order is complete"
: "Your order has shipped",
description: this.shipmentDescription(isArtaShipped, true),
}
}
case "CANCELED":
case "REFUNDED":
if (!isOfferFlow || state === "REFUNDED" || stateReason === null) {
// stateReason === null for offer orders only if the order was rejected
// after the offer was accepted.
return {
title: "Your order was canceled and refunded",
description: (
<>
Please allow 5–7 business days for the refund to appear on your
bank statement. Contact{" "}
<a href="mailto:orders@artsy.net">orders@artsy.net</a> with any
questions.
</>
),
}
}
// otherwise this was an offer order that was rejected before being
// accepted
return this.canceledOfferOrderCopy()
default:
// This should not happen. Check the order displayState here:
// https://github.com/artsy/exchange/blob/master/app/models/order.rb
// (Aside from PENDING and ABANDONED)
logger.error(`Unhandled order state: ${displayState} in ${state} state`)
return {
title: "Your order",
description: null,
}
}
}
covidNote(): React.ReactNode {
return (
<>
<Spacer mb={1} />
Disruptions caused by COVID-19 may cause delays — we appreciate your
understanding.
</>
)
}
approvedTitle(isOferFlow): string {
return isOferFlow ? "Offer accepted" : "Your order is confirmed"
}
deliverText(): React.ReactNode {
const selectedShipping = this.props.order?.lineItems?.edges?.[0]?.node
?.selectedShippingQuote?.displayName
let daysToDeliver: string | null = null
switch (selectedShipping) {
case "Rush":
daysToDeliver = "1 business day "
break
case "Express":
daysToDeliver = "2 business days"
break
case "Standard":
daysToDeliver = "3-5 business days"
}
return daysToDeliver
? `Your order will be delivered in ${daysToDeliver} once shipped, plus up to 7 days processing time. `
: null
}
trackingInfo(trackingId, trackingUrl): React.ReactNode | null {
const node = trackingUrl ? (
<RouterLink to={trackingUrl} target="_blank">
{trackingId ? trackingId : "info"}
</RouterLink>
) : (
trackingId
)
return (
<>
Tracking: {node}
<Spacer mb={1} />
</>
)
}
shipmentDescription(
isArtaShipped: boolean,
isDelivered: boolean
): React.ReactNode {
const shipmentData: ShipmentData | null = this.getShipmentInfo()
if (!shipmentData) {
return null
}
const hasTrackingInfo =
shipmentData.trackingId?.length || shipmentData.trackingUrl?.length
return (
<>
{isArtaShipped && isDelivered
? "Your order has been delivered."
: "Your work is on its way."}
{isArtaShipped &&
!hasTrackingInfo &&
!isDelivered &&
" " +
"Our delivery provider will call you to provide a delivery window when it arrives in your area."}
<Spacer mb={2} />
{shipmentData.shipperName && (
<>
Shipper: {shipmentData.shipperName}
<Spacer mb={1} />
</>
)}
{hasTrackingInfo &&
this.trackingInfo(shipmentData.trackingId, shipmentData.trackingUrl)}
{shipmentData.estimatedDelivery && (
<>
{isArtaShipped && isDelivered
? "Delivery date:"
: "Estimated delivery:"}{" "}
{shipmentData.estimatedDelivery}
</>
)}
</>
)
}
getShipmentInfo(): ShipmentData | null {
const fulfillment = this.props.order?.lineItems?.edges?.[0]?.node
?.fulfillments?.edges?.[0]?.node
const shipment = this.props.order?.lineItems?.edges?.[0]?.node?.shipment
if (!fulfillment && !shipment) return null
return {
shipperName: shipment?.carrierName || fulfillment?.courier || null,
trackingId: shipment?.trackingNumber || fulfillment?.trackingId || null,
trackingUrl: shipment?.trackingUrl || null,
estimatedDelivery:
shipment?.estimatedDeliveryWindow ||
fulfillment?.estimatedDelivery ||
null,
}
}
canceledOfferOrderCopy(): StatusPageConfig {
const { stateReason } = this.props.order
switch (stateReason) {
case "buyer_rejected":
return {
title: "Offer declined",
description: (
<>
Thank you for your response. The seller will be informed of your
decision to end the negotiation process.
<Spacer mb={2} />
We’d love to get your feedback. Contact{" "}
<a href="mailto:orders@artsy.net">orders@artsy.net</a> with any
comments you have.
</>
),
showTransactionSummary: false,
}
case "seller_rejected_offer_too_low":
case "seller_rejected_shipping_unavailable":
case "seller_rejected":
case "seller_rejected_artwork_unavailable":
case "seller_rejected_other":
return {
title: "Offer declined",
description: (
<>
Sorry, the seller declined your offer and has ended the
negotiation process.
</>
),
showTransactionSummary: false,
}
case "buyer_lapsed":
return {
title: "Offer expired",
description: (
<>The seller’s offer expired because you didn’t respond in time.</>
),
showTransactionSummary: false,
}
case "seller_lapsed":
return {
title: "Offer expired",
description: (
<>
Your offer expired because the seller didn’t respond to your offer
in time.
</>
),
showTransactionSummary: false,
}
default:
// This should not happen. Check the cancel reasons are all accounted for:
// https://github.com/artsy/exchange/blob/master/app/models/order.rb
logger.error(`Unhandled cancellation reason: ${stateReason}`)
return {
title: "Offer declined",
description: null,
showTransactionSummary: false,
}
}
}
shouldButtonDisplay(): React.ReactNode | null {
const {
match,
order: { stateReason },
} = this.props
const isModal = !!match?.location.query.isModal
const declinedStatuses = [
"buyer_rejected",
"seller_rejected_offer_too_low",
"seller_rejected_shipping_unavailable",
"seller_rejected",
"seller_rejected_artwork_unavailable",
"seller_rejected_other",
]
const isDeclined = declinedStatuses.includes(stateReason!)
if (isModal || isDeclined) {
return null
}
return (
// @ts-ignore
<Button as={RouterLink} to="/" variant="primaryBlack" width="100%">
Back to Artsy
</Button>
)
}
render() {
const { order } = this.props
const flowName = order.mode === "OFFER" ? "Offer" : "Order"
const {
title,
description,
showTransactionSummary = true,
} = this.getStatusCopy()
const showOfferNote = order.mode === "OFFER" && order.state === "SUBMITTED"
return (
<SystemContextConsumer>
{({ isEigen }) => {
return (
<>
<Text variant="lg" fontWeight="regular" color="black100">
{title}
</Text>
<Text
variant="xs"
fontWeight="regular"
color="black60"
mb={[2, 4]}
>
{flowName} <span data-test="OrderCode">#{order.code}</span>
</Text>
<TwoColumnLayout
Content={
<>
<Title>{flowName} status | Artsy</Title>
<Join separator={<Spacer mb={[2, 4]} />}>
{description && (
<Message p={[2, 4]}>{description}</Message>
)}
{showTransactionSummary ? (
<Flex flexDirection="column">
<ArtworkSummaryItem order={order} />
<StyledTransactionDetailsSummaryItem
order={order}
useLastSubmittedOffer
showOfferNote={showOfferNote}
showCongratulationMessage={
order.state === "SUBMITTED"
}
/>
</Flex>
) : (
isEigen && this.shouldButtonDisplay()
)}
</Join>
</>
}
Sidebar={
showTransactionSummary && (
<Flex flexDirection="column">
<Flex flexDirection="column">
<StyledShippingSummaryItem order={order} />
<CreditCardSummaryItem order={order} />
</Flex>
</Flex>
)
}
/>
</>
)
}}
</SystemContextConsumer>
)
}
}
const StyledShippingSummaryItem = styled(ShippingSummaryItem)`
${media.xs`
&&& {
border-top-left-radius: 0;
border-top-right-radius: 0;
}
`};
`
const StyledTransactionDetailsSummaryItem = styled(
TransactionDetailsSummaryItem
)`
${media.xs`
&&& {
border-bottom: none;
border-bottom-left-radius: 0;
border-bottom-right-radius: 0;
}
`};
`
export const StatusFragmentContainer = createFragmentContainer(StatusRoute, {
order: graphql`
fragment Status_order on CommerceOrder {
__typename
internalID
code
displayState
state
mode
stateReason
stateExpiresAt(format: "MMM D")
requestedFulfillment {
... on CommerceShip {
__typename
}
... on CommercePickup {
__typename
}
... on CommerceShipArta {
__typename
}
}
...ArtworkSummaryItem_order
...TransactionDetailsSummaryItem_order
...ShippingSummaryItem_order
...CreditCardSummaryItem_order
lineItems {
edges {
node {
shipment {
trackingNumber
trackingUrl
carrierName
estimatedDeliveryWindow
}
selectedShippingQuote {
displayName
}
fulfillments {
edges {
node {
courier
trackingId
estimatedDelivery(format: "MMM Do, YYYY")
}
}
}
}
}
}
... on CommerceOfferOrder {
myLastOffer {
internalID
amount(precision: 2)
amountCents
shippingTotal(precision: 2)
shippingTotalCents
taxTotal(precision: 2)
taxTotalCents
}
}
}
`,
}) | the_stack |
* This file was automatically generated by https://github.com/Bolisov/google-api-typings-generator. Please do not edit it manually.
* In case of any problems please post issue to https://github.com/Bolisov/google-api-typings-generator
**/
gapi.load('client', () => {
/** now we can use gapi.client */
gapi.client.load('sqladmin', 'v1beta4', () => {
/** now we can use gapi.client.sqladmin */
/** don't forget to authenticate your client before sending any request to resources: */
/** declare client_id registered in Google Developers Console */
const client_id = '<<PUT YOUR CLIENT ID HERE>>';
const scope = [
/** View and manage your data across Google Cloud Platform services */
'https://www.googleapis.com/auth/cloud-platform',
/** Manage your Google SQL Service instances */
'https://www.googleapis.com/auth/sqlservice.admin',
];
const immediate = true;
gapi.auth.authorize({ client_id, scope, immediate }, authResult => {
if (authResult && !authResult.error) {
/** handle succesfull authorization */
run();
} else {
/** handle authorization error */
}
});
run();
});
async function run() {
/** Deletes the backup taken by a backup run. */
await gapi.client.backupRuns.delete({
id: "id",
instance: "instance",
project: "project",
});
/** Retrieves a resource containing information about a backup run. */
await gapi.client.backupRuns.get({
id: "id",
instance: "instance",
project: "project",
});
/** Creates a new backup run on demand. This method is applicable only to Second Generation instances. */
await gapi.client.backupRuns.insert({
instance: "instance",
project: "project",
});
/** Lists all backup runs associated with a given instance and configuration in the reverse chronological order of the enqueued time. */
await gapi.client.backupRuns.list({
instance: "instance",
maxResults: 2,
pageToken: "pageToken",
project: "project",
});
/** Deletes a database from a Cloud SQL instance. */
await gapi.client.databases.delete({
database: "database",
instance: "instance",
project: "project",
});
/** Retrieves a resource containing information about a database inside a Cloud SQL instance. */
await gapi.client.databases.get({
database: "database",
instance: "instance",
project: "project",
});
/** Inserts a resource containing information about a database inside a Cloud SQL instance. */
await gapi.client.databases.insert({
instance: "instance",
project: "project",
});
/** Lists databases in the specified Cloud SQL instance. */
await gapi.client.databases.list({
instance: "instance",
project: "project",
});
/** Updates a resource containing information about a database inside a Cloud SQL instance. This method supports patch semantics. */
await gapi.client.databases.patch({
database: "database",
instance: "instance",
project: "project",
});
/** Updates a resource containing information about a database inside a Cloud SQL instance. */
await gapi.client.databases.update({
database: "database",
instance: "instance",
project: "project",
});
/** List all available database flags for Google Cloud SQL instances. */
await gapi.client.flags.list({
databaseVersion: "databaseVersion",
});
/** Creates a Cloud SQL instance as a clone of the source instance. The API is not ready for Second Generation instances yet. */
await gapi.client.instances.clone({
instance: "instance",
project: "project",
});
/** Deletes a Cloud SQL instance. */
await gapi.client.instances.delete({
instance: "instance",
project: "project",
});
/** Exports data from a Cloud SQL instance to a Google Cloud Storage bucket as a MySQL dump file. */
await gapi.client.instances.export({
instance: "instance",
project: "project",
});
/** Failover the instance to its failover replica instance. */
await gapi.client.instances.failover({
instance: "instance",
project: "project",
});
/** Retrieves a resource containing information about a Cloud SQL instance. */
await gapi.client.instances.get({
instance: "instance",
project: "project",
});
/** Imports data into a Cloud SQL instance from a MySQL dump file in Google Cloud Storage. */
await gapi.client.instances.import({
instance: "instance",
project: "project",
});
/** Creates a new Cloud SQL instance. */
await gapi.client.instances.insert({
project: "project",
});
/** Lists instances under a given project in the alphabetical order of the instance name. */
await gapi.client.instances.list({
filter: "filter",
maxResults: 2,
pageToken: "pageToken",
project: "project",
});
/**
* Updates settings of a Cloud SQL instance. Caution: This is not a partial update, so you must include values for all the settings that you want to
* retain. For partial updates, use patch.. This method supports patch semantics.
*/
await gapi.client.instances.patch({
instance: "instance",
project: "project",
});
/** Promotes the read replica instance to be a stand-alone Cloud SQL instance. */
await gapi.client.instances.promoteReplica({
instance: "instance",
project: "project",
});
/**
* Deletes all client certificates and generates a new server SSL certificate for the instance. The changes will not take effect until the instance is
* restarted. Existing instances without a server certificate will need to call this once to set a server certificate.
*/
await gapi.client.instances.resetSslConfig({
instance: "instance",
project: "project",
});
/** Restarts a Cloud SQL instance. */
await gapi.client.instances.restart({
instance: "instance",
project: "project",
});
/** Restores a backup of a Cloud SQL instance. */
await gapi.client.instances.restoreBackup({
instance: "instance",
project: "project",
});
/** Starts the replication in the read replica instance. */
await gapi.client.instances.startReplica({
instance: "instance",
project: "project",
});
/** Stops the replication in the read replica instance. */
await gapi.client.instances.stopReplica({
instance: "instance",
project: "project",
});
/** Truncate MySQL general and slow query log tables */
await gapi.client.instances.truncateLog({
instance: "instance",
project: "project",
});
/**
* Updates settings of a Cloud SQL instance. Caution: This is not a partial update, so you must include values for all the settings that you want to
* retain. For partial updates, use patch.
*/
await gapi.client.instances.update({
instance: "instance",
project: "project",
});
/** Retrieves an instance operation that has been performed on an instance. */
await gapi.client.operations.get({
operation: "operation",
project: "project",
});
/** Lists all instance operations that have been performed on the given Cloud SQL instance in the reverse chronological order of the start time. */
await gapi.client.operations.list({
instance: "instance",
maxResults: 2,
pageToken: "pageToken",
project: "project",
});
/**
* Generates a short-lived X509 certificate containing the provided public key and signed by a private key specific to the target instance. Users may use
* the certificate to authenticate as themselves when connecting to the database.
*/
await gapi.client.sslCerts.createEphemeral({
instance: "instance",
project: "project",
});
/** Deletes the SSL certificate. The change will not take effect until the instance is restarted. */
await gapi.client.sslCerts.delete({
instance: "instance",
project: "project",
sha1Fingerprint: "sha1Fingerprint",
});
/**
* Retrieves a particular SSL certificate. Does not include the private key (required for usage). The private key must be saved from the response to
* initial creation.
*/
await gapi.client.sslCerts.get({
instance: "instance",
project: "project",
sha1Fingerprint: "sha1Fingerprint",
});
/**
* Creates an SSL certificate and returns it along with the private key and server certificate authority. The new certificate will not be usable until the
* instance is restarted.
*/
await gapi.client.sslCerts.insert({
instance: "instance",
project: "project",
});
/** Lists all of the current SSL certificates for the instance. */
await gapi.client.sslCerts.list({
instance: "instance",
project: "project",
});
/** Lists all available service tiers for Google Cloud SQL, for example D1, D2. For related information, see Pricing. */
await gapi.client.tiers.list({
project: "project",
});
/** Deletes a user from a Cloud SQL instance. */
await gapi.client.users.delete({
host: "host",
instance: "instance",
name: "name",
project: "project",
});
/** Creates a new user in a Cloud SQL instance. */
await gapi.client.users.insert({
instance: "instance",
project: "project",
});
/** Lists users in the specified Cloud SQL instance. */
await gapi.client.users.list({
instance: "instance",
project: "project",
});
/** Updates an existing user in a Cloud SQL instance. */
await gapi.client.users.update({
host: "host",
instance: "instance",
name: "name",
project: "project",
});
}
}); | the_stack |
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
import ServicePaths from '../services/service.paths';
import Logger from '../tools/env.logger';
import ServicePackage from './service.package';
import ServiceElectron, { Subscription, IPCMessages } from './service.electron';
import ServiceProduction from './service.production';
import GitHubClient from '../tools/env.github.client';
import ServiceSettings from './service.settings';
import { IReleaseAsset, IReleaseData } from '../tools/env.github.client';
import { getPlatform, EPlatforms } from '../tools/env.os';
import { IService } from '../interfaces/interface.service';
import { StandardBoolean } from '../controllers/settings/settings.standard.boolean';
import { IApplication, EExitCodes } from '../interfaces/interface.app';
import { sequences } from '../tools/sequences';
import { Entry, ESettingType } from '../../../common/settings/field.store';
const CHooks = {
alias: '<alias>',
version: '<version>',
platform: '<platform>',
};
const CReleaseNameAliases = [ 'logviewer', 'chipmunk' ];
const CAssetFilePattern = `${CHooks.alias}@${CHooks.version}-${CHooks.platform}-portable.tgz`;
const CSettings: {
repo: string,
} = {
repo: 'chipmunk',
};
/**
* @class ServiceUpdate
* @description Log information about state of application
*/
class ServiceUpdate implements IService {
private _logger: Logger = new Logger('ServiceUpdate');
private _target: string | undefined;
private _tgzfile: string | undefined;
private _subscription: { [key: string]: Subscription } = {};
private _app: IApplication | undefined;
private _settings: {
updates: StandardBoolean,
} = {
updates: new StandardBoolean({
key: 'ApplicationUpdates',
name: 'Automatically update chipmunk',
desc: 'Update chipmunk automatically if update is available',
path: 'general.updates',
type: ESettingType.standard },
'ApplicationUpdates',
),
};
/**
* Initialization function
* @returns Promise<void>
*/
public init(app: IApplication): Promise<void> {
return new Promise((resolve, reject) => {
if (app === undefined) {
return reject(new Error(`Instance of main process is required.`));
}
this._app = app;
Promise.all([
ServiceElectron.IPC.subscribe(ServiceElectron.IPCMessages.RenderState, this._onRenderState.bind(this)).then((subscription: Subscription) => {
this._subscription.RenderState = subscription;
}),
ServiceElectron.IPC.subscribe(ServiceElectron.IPCMessages.UpdateRequest, this._onUpdateRequest.bind(this)).then((subscription: Subscription) => {
this._subscription.UpdateRequest = subscription;
}),
]).catch((error: Error) => {
this._logger.warn(`Fail to make subscriptions to due error: ${error.message}`);
}).finally(() => {
this._setSettings().catch((settingErr: Error) => {
this._logger.warn(`Fail to register settings due error: ${settingErr.message}`);
}).finally(() => {
resolve();
});
});
});
}
public destroy(): Promise<void> {
return new Promise((resolve) => {
resolve();
});
}
public getName(): string {
return 'ServiceUpdate';
}
private _setSettings(): Promise<void> {
return new Promise((resolve, reject) => {
sequences([
ServiceSettings.register.bind(ServiceSettings, new Entry({ key: 'general', name: 'General', desc: 'General setting of chipmunk', path: '', type: ESettingType.standard })),
ServiceSettings.register.bind(ServiceSettings, new Entry({ key: 'updates', name: 'Updates', desc: 'Configure application update workflow', path: 'general', type: ESettingType.standard })),
]).then(() => {
Promise.all([
ServiceSettings.register(this._settings.updates).catch((regErr: Error) => {
this._logger.error(regErr.message);
}),
]).then(() => {
resolve();
}).catch((error: Error) => {
reject(error);
});
}).catch((structErr: Error) => {
reject(structErr);
});
});
}
private _check() {
if (!this._settings.updates.get()) {
// Autoupdate is off
return;
}
if (!ServiceProduction.isProduction()) {
// In dev mode do not check for updates
this._logger.debug(`Updates would not check becuase developing mode is ON`);
return;
}
GitHubClient.getReleases({ repo: CSettings.repo }).then((releases: IReleaseData[]) => {
const current: string | undefined = ServicePackage.get().version;
if (typeof current !== 'string' || current.trim() === '') {
return this._logger.warn(`Fail to detect version of current app.`);
}
let latest: string = current;
let info: IReleaseData | undefined;
releases.forEach((release: IReleaseData) => {
if (this._isVersionNewer(latest, release.name)) {
latest = release.name;
info = release;
}
});
if (info === undefined) {
// No update found
this._logger.debug(`Current version "${current}" is newest, no update needed.`);
return;
}
this._logger.debug(`New version is released...: ${info.name}`);
const targets: string[] | Error = this._getAssetFileName(latest);
this._logger.debug(`Asset file names:...: ${info.name}`);
if (targets instanceof Error) {
return this._logger.warn(`Fail to get targets due error: ${targets.message}`);
}
let compressedFile: string | undefined;
info.assets.forEach((asset: IReleaseAsset) => {
if (targets.indexOf(asset.name) !== -1) {
compressedFile = asset.name;
this._logger.debug(`package to download:...: ${compressedFile}`);
}
});
if (compressedFile === undefined) {
return this._logger.warn(`Fail to find archive-file with release for current platform.`);
}
this._target = latest;
const file: string = path.resolve(ServicePaths.getDownloads(), compressedFile);
if (fs.existsSync(file)) {
// File was already downloaded
this._logger.debug(`File was already downloaded "${file}". latest: ${latest}.`);
this._tgzfile = file;
this._notify(latest);
} else {
this._logger.debug(`Found new version "${latest}". Starting downloading: ${compressedFile}.`);
GitHubClient.download({
repo: CSettings.repo,
}, {
version: latest,
name: compressedFile,
dest: ServicePaths.getDownloads(),
}).then((_tgzfile: string) => {
this._tgzfile = _tgzfile;
this._notify(latest);
this._logger.debug(`File ${compressedFile} is downloaded into: ${_tgzfile}.`);
}).catch((downloadError: Error) => {
this._logger.error(`Fail to download "${compressedFile}" due error: ${downloadError.message}`);
});
}
}).catch((gettingReleasesError: Error) => {
this._logger.warn(`Fail to get releases list due error: ${gettingReleasesError.message}`);
});
}
private _notify(version: string) {
ServiceElectron.IPC.send(new ServiceElectron.IPCMessages.Notification({
caption: `Update`,
message: `New version of chipmunk "${version}" is available.`,
type: ServiceElectron.IPCMessages.Notification.Types.info,
session: '*',
actions: [
{
type: ServiceElectron.IPCMessages.ENotificationActionType.ipc,
value: 'UpdateRequest',
caption: 'Update',
},
],
})).catch((error: Error) => {
this._logger.warn(`Fail send Notification due error: ${error.message}`);
});
}
private _getAssetFileName(version: string): string[] | Error {
const platform: EPlatforms = getPlatform();
if (platform === EPlatforms.undefined) {
return new Error(`Fail to detect supported platform for (${os.platform()}).`);
}
return CReleaseNameAliases.map((alias: string) => {
const pattern = CAssetFilePattern;
return pattern.replace(CHooks.alias, alias).replace(CHooks.version, version).replace(CHooks.platform, platform);
});
}
private _versplit(version: string): number[] {
return version.split('.').map((part: string) => {
return parseInt(part, 10);
}).filter((value: number) => {
return isNaN(value) ? false : isFinite(value);
});
}
private _isVersionNewer(current: string, target: string): boolean {
const cParts: number[] = this._versplit(current);
const tParts: number[] = this._versplit(target);
if (cParts.length !== 3 || tParts.length !== 3) {
return false;
}
const diff: number[] = cParts.map((xxx: number, i: number) => {
return tParts[i] - xxx;
});
if (diff[0] > 0) {
return true;
}
if (diff[0] === 0 && diff[1] > 0) {
return true;
}
if (diff[0] === 0 && diff[1] === 0 && diff[2] > 0) {
return true;
}
return false;
}
private _onRenderState(message: IPCMessages.TMessage) {
if ((message as IPCMessages.RenderState).state !== IPCMessages.ERenderState.ready) {
return;
}
this._check();
}
private _getLauncherFile(): Promise<string> {
return new Promise((resolve, reject) => {
// process.noAsar = true;
const updater: string = path.resolve(ServicePaths.getRoot(), `apps/${os.platform() === 'win32' ? 'updater.exe' : 'updater'}`);
if (!fs.existsSync(updater)) {
return reject(new Error(`Fail to find an updater in package "${updater}".`));
}
const existed: string = path.resolve(ServicePaths.getApps(), (os.platform() === 'win32' ? 'updater.exe' : 'updater'));
if (fs.existsSync(existed)) {
try {
this._logger.debug(`Found existed updater "${existed}". It will be removed.`);
fs.unlinkSync(existed);
} catch (e) {
return reject(e);
}
}
fs.copyFile(updater, existed, (error: NodeJS.ErrnoException | null) => {
if (error) {
return reject(error);
}
this._logger.debug(`Updater "${existed}" is delivered.`);
resolve(existed);
});
});
}
private _onUpdateRequest(message: IPCMessages.TMessage) {
if (this._tgzfile === undefined) {
return;
}
this._getLauncherFile().then((updater: string) => {
this._update(updater);
}).catch((gettingLauncherErr: Error) => {
this._logger.error(`Fail to get updater due error: ${gettingLauncherErr.message}`);
});
}
private _update(updater: string) {
if (this._tgzfile === undefined || this._app === undefined) {
return;
}
this._app?.destroy(EExitCodes.update).catch((error: Error) => {
this._logger.warn(`Fail destroy app due error: ${error.message}`);
});
}
}
export default (new ServiceUpdate()); | the_stack |
import { balance, ether, expectEvent, expectRevert } from '@openzeppelin/test-helpers'
import BN from 'bn.js'
import chai from 'chai'
import { toBN } from 'web3-utils'
import { decodeRevertReason, getEip712Signature, removeHexPrefix } from '@opengsn/common/dist/Utils'
import { RelayRequest, cloneRelayRequest } from '@opengsn/common/dist/EIP712/RelayRequest'
import { defaultEnvironment } from '@opengsn/common/dist/Environments'
import { TypedRequestData } from '@opengsn/common/dist/EIP712/TypedRequestData'
import {
RelayHubInstance,
PenalizerInstance,
StakeManagerInstance,
TestRecipientInstance,
ForwarderInstance,
TestPaymasterEverythingAcceptedInstance,
TestPaymasterConfigurableMisbehaviorInstance,
GatewayForwarderInstance, TestTokenInstance
} from '@opengsn/contracts/types/truffle-contracts'
import { deployHub, encodeRevertReason, revert, snapshot } from './TestUtils'
import { registerForwarderForGsn } from '@opengsn/common/dist/EIP712/ForwarderUtil'
import chaiAsPromised from 'chai-as-promised'
import { RelayRegistrarInstance } from '@opengsn/contracts'
import { constants, splitRelayUrlForRegistrar } from '@opengsn/common'
const { expect, assert } = chai.use(chaiAsPromised)
const RelayHub = artifacts.require('RelayHub')
const StakeManager = artifacts.require('StakeManager')
const Forwarder = artifacts.require('Forwarder')
const Penalizer = artifacts.require('Penalizer')
const GatewayForwarder = artifacts.require('GatewayForwarder')
const TestPaymasterEverythingAccepted = artifacts.require('TestPaymasterEverythingAccepted')
const TestToken = artifacts.require('TestToken')
const TestRelayHub = artifacts.require('TestRelayHub')
const TestRecipient = artifacts.require('TestRecipient')
const TestPaymasterStoreContext = artifacts.require('TestPaymasterStoreContext')
const TestPaymasterConfigurableMisbehavior = artifacts.require('TestPaymasterConfigurableMisbehavior')
const RelayRegistrar = artifacts.require('RelayRegistrar')
contract('RelayHub', function ([paymasterOwner, relayOwner, relayManager, relayWorker, senderAddress, other, dest, incorrectWorker]) { // eslint-disable-line no-unused-vars
const baseRelayFee = '10000'
const pctRelayFee = '10'
const gasPrice = 1e9.toString()
const maxFeePerGas = 1e9.toString()
const maxPriorityFeePerGas = 1e9.toString()
const gasLimit = '1000000'
const senderNonce = '0'
let sharedRelayRequestData: RelayRequest
const paymasterData = '0x'
const clientId = '1'
const RelayCallStatusCodes = {
OK: new BN('0'),
RelayedCallFailed: new BN('1'),
RejectedByPreRelayed: new BN('2'),
RejectedByForwarder: new BN('3'),
RejectedByRecipientRevert: new BN('4'),
PostRelayedFailed: new BN('5'),
PaymasterBalanceChanged: new BN('6')
}
const chainId = defaultEnvironment.chainId
const oneEther = ether('1')
let relayHub: string
let testToken: TestTokenInstance
let stakeManager: StakeManagerInstance
let penalizer: PenalizerInstance
let relayHubInstance: RelayHubInstance
let relayRegistrar: RelayRegistrarInstance
let recipientContract: TestRecipientInstance
let paymasterContract: TestPaymasterEverythingAcceptedInstance
let forwarderInstance: ForwarderInstance
let target: string
let paymaster: string
let forwarder: string
async function mintApproveSetOwnerStake (token: TestTokenInstance = testToken, stake: BN = oneEther, unstakeDelay: number = 15000): Promise<void> {
await token.mint(stake, { from: relayOwner })
await token.approve(stakeManager.address, stake, { from: relayOwner })
await stakeManager.setRelayManagerOwner(relayOwner, { from: relayManager })
await stakeManager.stakeForRelayManager(token.address, relayManager, unstakeDelay, stake, {
from: relayOwner
})
}
beforeEach(async function () {
testToken = await TestToken.new()
stakeManager = await StakeManager.new(defaultEnvironment.maxUnstakeDelay, 0, 0, constants.BURN_ADDRESS, constants.BURN_ADDRESS)
penalizer = await Penalizer.new(defaultEnvironment.penalizerConfiguration.penalizeBlockDelay, defaultEnvironment.penalizerConfiguration.penalizeBlockExpiration)
relayHubInstance = await deployHub(stakeManager.address, penalizer.address, constants.ZERO_ADDRESS, testToken.address, oneEther.toString())
relayRegistrar = await RelayRegistrar.at(await relayHubInstance.getRelayRegistrar())
paymasterContract = await TestPaymasterEverythingAccepted.new()
forwarderInstance = await Forwarder.new()
forwarder = forwarderInstance.address
recipientContract = await TestRecipient.new(forwarder)
// register hub's RelayRequest with forwarder, if not already done.
await registerForwarderForGsn(forwarderInstance)
target = recipientContract.address
paymaster = paymasterContract.address
relayHub = relayHubInstance.address
await paymasterContract.setTrustedForwarder(forwarder)
await paymasterContract.setRelayHub(relayHub)
})
it('should retrieve version number', async function () {
const version = await relayHubInstance.versionHub()
assert.match(version, /2\.\d*\.\d*-?.*\+opengsn\.hub\.irelayhub/)
})
describe('balances', function () {
async function testDeposit (sender: string, paymaster: string, amount: BN): Promise<void> {
const senderBalanceTracker = await balance.tracker(sender)
const relayHubBalanceTracker = await balance.tracker(relayHub)
const gasPrice = new BN(1e9)
const res = await relayHubInstance.depositFor(paymaster, {
from: sender,
value: amount,
gasPrice
})
expectEvent.inLogs(res.logs, 'Deposited', {
paymaster,
from: sender,
amount
})
const txCost = (new BN(res.receipt.gasUsed)).mul(gasPrice)
expect(await relayHubInstance.balanceOf(paymaster)).to.be.bignumber.equal(amount)
expect(await senderBalanceTracker.delta()).to.be.bignumber.equal(amount.neg().sub(txCost))
expect(await relayHubBalanceTracker.delta()).to.be.bignumber.equal(amount)
}
it('can deposit for a valid IPaymaster', async function () {
await testDeposit(other, paymaster, ether('1'))
})
it('can deposit multiple times and have a total deposit larger than the limit', async function () {
await relayHubInstance.depositFor(paymaster, {
from: other,
value: ether('1'),
gasPrice: 1e9
})
await relayHubInstance.depositFor(paymaster, {
from: other,
value: ether('1'),
gasPrice: 1e9
})
await relayHubInstance.depositFor(paymaster, {
from: other,
value: ether('1'),
gasPrice: 1e9
})
expect(await relayHubInstance.balanceOf(paymaster)).to.be.bignumber.equals(ether('3'))
})
it('accounts with deposits can withdraw partially', async function () {
const amount = ether('1')
await testDeposit(other, paymaster, amount)
const { tx } = await paymasterContract.withdrawRelayHubDepositTo(amount.divn(2), dest, { from: paymasterOwner })
await expectEvent.inTransaction(tx, RelayHub, 'Withdrawn', {
account: paymaster,
dest,
amount: amount.divn(2)
})
})
it('accounts with deposits can withdraw all their balance', async function () {
const amount = ether('1')
await testDeposit(other, paymaster, amount)
const { tx } = await paymasterContract.withdrawRelayHubDepositTo(amount, dest, { from: paymasterOwner })
await expectEvent.inTransaction(tx, RelayHub, 'Withdrawn', {
account: paymaster,
dest,
amount
})
})
it('accounts with deposits can withdraw to multiple destinations', async function () {
const testRelayHubInstance = await deployHub(stakeManager.address, penalizer.address, constants.ZERO_ADDRESS, testToken.address, oneEther.toString(), undefined, undefined, TestRelayHub)
const amount = ether('1')
await testRelayHubInstance.depositFor(paymasterOwner, {
from: paymasterOwner,
value: amount
})
const address1BalanceTracker = await balance.tracker(other)
const address2BalanceTracker = await balance.tracker(dest)
const balanceBefore = await testRelayHubInstance.balanceOf(paymasterOwner)
const withdrawAmount1 = amount.divn(3).toString()
const withdrawAmount2 = amount.divn(7).toString()
const { tx } = await testRelayHubInstance.withdrawMultiple(
[other, dest],
[withdrawAmount1, withdrawAmount2],
{ from: paymasterOwner })
const balanceAfter = await testRelayHubInstance.balanceOf(paymasterOwner)
assert.equal(balanceAfter.toString(), balanceBefore.sub(toBN(withdrawAmount1)).sub(toBN(withdrawAmount2)).toString())
await expectEvent.inTransaction(tx, RelayHub, 'Withdrawn', {
account: paymasterOwner,
dest: other,
amount: withdrawAmount1
})
await expectEvent.inTransaction(tx, RelayHub, 'Withdrawn', {
account: paymasterOwner,
dest: dest,
amount: withdrawAmount2
})
expect(await address1BalanceTracker.delta()).to.be.bignumber.equal(withdrawAmount1)
expect(await address2BalanceTracker.delta()).to.be.bignumber.equal(withdrawAmount2)
})
it('accounts cannot withdraw more than their balance', async function () {
const amount = ether('1')
await testDeposit(other, paymaster, amount)
await expectRevert(paymasterContract.withdrawRelayHubDepositTo(amount.addn(1), dest, { from: paymasterOwner }), 'insufficient funds')
})
it('should reject depositFor for an address that does not implement IPaymaster', async function () {
await expectRevert(relayHubInstance.depositFor(target, {
value: ether('1')
}), 'target is not a valid IPaymaster')
})
})
describe('relayCall', function () {
beforeEach(function () {
sharedRelayRequestData = {
request: {
to: target,
data: '',
from: senderAddress,
nonce: senderNonce,
value: '0',
gas: gasLimit,
validUntilTime: '0'
},
relayData: {
pctRelayFee,
baseRelayFee,
transactionCalldataGasUsed: 7e6.toString(),
maxFeePerGas,
maxPriorityFeePerGas,
relayWorker,
forwarder,
paymaster,
paymasterData,
clientId
}
}
})
context('with unknown worker', function () {
const signature = '0xdeadbeef'
const approvalData = '0x'
const gas = 4e6
let relayRequest: RelayRequest
beforeEach(async function () {
relayRequest = cloneRelayRequest(sharedRelayRequestData)
relayRequest.request.data = '0xdeadbeef'
await relayHubInstance.depositFor(paymaster, {
from: other,
value: ether('1'),
gasPrice: 1e9
})
})
it('should not accept a relay call', async function () {
await expectRevert(
relayHubInstance.relayCall(10e6, relayRequest, signature, approvalData, {
from: relayWorker,
gas
}),
'Unknown relay worker')
})
context('#setMinimumStakes()', function () {
it('should assign values correctly with arrays of any size', async function () {
const tokens = [
'0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
'0x1f9840a85d5af5bf1d1762f925bdaddc4201f984',
'0xc944e90c64b2c07662a292be6244bdf05cda44a7',
'0x6b175474e89094c44da98b954eedeac495271d0f',
'0xeb4c2781e4eba804ce9a9803c67d0893436bb27d',
'0x8dae6cb04688c62d939ed9b68d32bc62e49970b1',
'0xba100000625a3754423978a60c9317c58a424e3d',
'0x111111111117dc0aa78b770fa6a738034120c302'
]
const minimums = [100, 200, 300, 400, 500, 600, 700, 8000]
assert.equal(tokens.length, minimums.length)
await relayHubInstance.setMinimumStakes(tokens, minimums)
for (let i = 0; i < tokens.length; i++) {
const min = await relayHubInstance.getMinimumStakePerToken(tokens[i])
assert.equal(min.toNumber(), minimums[i])
}
})
it('should revert if array lengths do not match', async function () {
await expectRevert(
relayHubInstance.setMinimumStakes([relayOwner], [0, 0]),
'setMinimumStakes: wrong length'
)
})
})
context('#verifyRelayManagerStaked()', function () {
let id: string
function testRejectsAddRelayWorkers (expectedError: string): void {
it('should not accept a relay call with error: ' + expectedError, async function () {
await expectRevert(
relayHubInstance.addRelayWorkers([relayWorker], {
from: relayManager
}),
expectedError
)
})
}
afterEach(async function () {
await revert(id)
})
context('with no stake at all', function () {
testRejectsAddRelayWorkers('relay manager not staked')
})
context('with manager stake in forbidden token', function () {
beforeEach(async function () {
id = (await snapshot()).result
const forbiddenToken = await TestToken.new()
await mintApproveSetOwnerStake(forbiddenToken)
})
testRejectsAddRelayWorkers('staking this token is forbidden')
})
context('with manager stake that is too small', function () {
beforeEach(async function () {
id = (await snapshot()).result
await mintApproveSetOwnerStake(testToken, ether('0.001'))
await relayHubInstance.setMinimumStakes([testToken.address], [oneEther])
})
testRejectsAddRelayWorkers('stake amount is too small')
})
context('with manager stake that unlocks too soon', function () {
beforeEach(async function () {
id = (await snapshot()).result
await mintApproveSetOwnerStake(testToken, ether('1'), 10)
await relayHubInstance.setMinimumStakes([testToken.address], [oneEther])
})
testRejectsAddRelayWorkers('unstake delay is too small')
})
context('with manager stake with authorized hub', function () {
let unauthorizedHub: RelayHubInstance
beforeEach(async function () {
id = (await snapshot()).result
unauthorizedHub = await deployHub(stakeManager.address, penalizer.address, constants.ZERO_ADDRESS, testToken.address, oneEther.toString())
await mintApproveSetOwnerStake()
await relayHubInstance.setMinimumStakes([testToken.address], [oneEther])
})
it('should not accept a relay call', async function () {
await expectRevert(
unauthorizedHub.addRelayWorkers([relayWorker], {
from: relayManager
}),
'this hub is not authorized by SM'
)
})
})
context('with manager stake unlocked', function () {
beforeEach(async function () {
id = (await snapshot()).result
await mintApproveSetOwnerStake()
await relayHubInstance.setMinimumStakes([testToken.address], [oneEther])
await stakeManager.authorizeHubByOwner(relayManager, relayHub, { from: relayOwner })
await stakeManager.unlockStake(relayManager, { from: relayOwner })
})
testRejectsAddRelayWorkers('stake has been withdrawn')
})
})
})
context('with staked and registered relay', function () {
const url = 'http://relay.com'
const message = 'GSN RelayHub'
const messageWithNoParams = 'Method with no parameters'
let relayRequest: RelayRequest
let encodedFunction: string
let signatureWithPermissivePaymaster: string
beforeEach(async function () {
await testToken.mint(ether('2'), { from: relayOwner })
await testToken.approve(stakeManager.address, ether('2'), { from: relayOwner })
await stakeManager.setRelayManagerOwner(relayOwner, { from: relayManager })
await stakeManager.stakeForRelayManager(testToken.address, relayManager, 15000, ether('2'), {
from: relayOwner
})
await stakeManager.authorizeHubByOwner(relayManager, relayHub, { from: relayOwner })
// truffle-contract doesn't let us create method data from the class, we need an actual instance
encodedFunction = recipientContract.contract.methods.emitMessage(message).encodeABI()
await relayHubInstance.addRelayWorkers([relayWorker], { from: relayManager })
await relayRegistrar.registerRelayServer(relayHub, baseRelayFee, pctRelayFee, splitRelayUrlForRegistrar(url), { from: relayManager })
relayRequest = cloneRelayRequest(sharedRelayRequestData)
relayRequest.request.data = encodedFunction
const dataToSign = new TypedRequestData(
chainId,
forwarder,
relayRequest
)
signatureWithPermissivePaymaster = await getEip712Signature(
web3,
dataToSign
)
await relayHubInstance.depositFor(paymaster, {
value: ether('1'),
from: other
})
})
context('with relay worker that is not externally-owned account', function () {
it('should not accept relay requests', async function () {
const signature = '0xdeadbeef'
const gas = 4e6
const TestRelayWorkerContract = artifacts.require('TestRelayWorkerContract')
const testRelayWorkerContract = await TestRelayWorkerContract.new()
await relayHubInstance.addRelayWorkers([testRelayWorkerContract.address], {
from: relayManager
})
await expectRevert(
testRelayWorkerContract.relayCall(
relayHubInstance.address,
10e6,
relayRequest,
signature,
{
gas
}),
'relay worker must be EOA')
})
})
context('with view functions only', function () {
let misbehavingPaymaster: TestPaymasterConfigurableMisbehaviorInstance
let relayRequestMisbehavingPaymaster: RelayRequest
beforeEach(async function () {
misbehavingPaymaster = await TestPaymasterConfigurableMisbehavior.new()
await misbehavingPaymaster.setTrustedForwarder(forwarder)
await misbehavingPaymaster.setRelayHub(relayHub)
await relayHubInstance.depositFor(misbehavingPaymaster.address, {
value: ether('1'),
from: other
})
relayRequestMisbehavingPaymaster = cloneRelayRequest(relayRequest)
relayRequestMisbehavingPaymaster.relayData.paymaster = misbehavingPaymaster.address
})
context('in dry-run view mode without requiring client signature or valid worker', function () {
function clearRelayRequest (relayRequest: RelayRequest): RelayRequest {
const clone = cloneRelayRequest(relayRequest)
clone.relayData.relayWorker = constants.ZERO_ADDRESS
clone.relayData.pctRelayFee = ''
clone.relayData.baseRelayFee = ''
clone.relayData.transactionCalldataGasUsed = ''
return clone
}
it('should get \'paymasterAccepted = true\' and no revert reason as view call result of \'relayCall\' for a valid transaction', async function () {
const clearedRelayRequest = clearRelayRequest(relayRequest)
const relayCallView = await relayHubInstance.contract.methods.relayCall(
10e6,
clearedRelayRequest,
'0x', '0x')
.call({
from: constants.DRY_RUN_ADDRESS,
gas: 7e6,
gasPrice: 1e9
})
assert.equal(relayCallView.returnValue, null)
assert.equal(relayCallView.paymasterAccepted, true)
})
it('should return failure if forwarder rejects for incorrect nonce', async function () {
const relayRequestWrongNonce = clearRelayRequest(relayRequest)
relayRequestWrongNonce.request.nonce = (parseInt(relayRequestWrongNonce.request.nonce) - 1).toString()
const relayCallView =
await relayHubInstance.contract.methods
.relayCall(10e6, relayRequestWrongNonce, '0x', '0x')
.call({ from: constants.DRY_RUN_ADDRESS, gas: 7e6, gasPrice: 1e9 })
assert.equal(relayCallView.paymasterAccepted, false)
assert.equal(decodeRevertReason(relayCallView.returnValue), 'FWD: nonce mismatch')
})
})
it('should get \'paymasterAccepted = true\' and no revert reason as view call result of \'relayCall\' for a valid transaction', async function () {
const relayCallView = await relayHubInstance.contract.methods.relayCall(
10e6,
relayRequest,
signatureWithPermissivePaymaster, '0x')
.call({
from: relayWorker,
gas: 7e6,
gasPrice: 1e9
})
assert.equal(relayCallView.returnValue, null)
assert.equal(relayCallView.paymasterAccepted, true)
})
it('should get Paymaster\'s reject reason from view call result of \'relayCall\' for a transaction before checking signature', async function () {
await misbehavingPaymaster.setReturnInvalidErrorCode(true)
const relayCallView =
await relayHubInstance.contract.methods
.relayCall(10e6, relayRequestMisbehavingPaymaster, '0x00', '0x')
.call({ from: relayWorker, gas: 7e6, gasPrice: 1e9 })
assert.equal(relayCallView.paymasterAccepted, false)
assert.equal(relayCallView.returnValue, encodeRevertReason('invalid code'))
assert.equal(decodeRevertReason(relayCallView.returnValue), 'invalid code')
})
})
context('with funded paymaster', function () {
let signature
let paymasterWithContext
let misbehavingPaymaster: TestPaymasterConfigurableMisbehaviorInstance
let relayRequestPaymasterWithContext: RelayRequest
let signatureWithContextPaymaster: string
let signatureWithMisbehavingPaymaster: string
let relayRequestMisbehavingPaymaster: RelayRequest
const gas = 4e6
beforeEach(async function () {
paymasterWithContext = await TestPaymasterStoreContext.new()
misbehavingPaymaster = await TestPaymasterConfigurableMisbehavior.new()
await paymasterWithContext.setTrustedForwarder(forwarder)
await misbehavingPaymaster.setTrustedForwarder(forwarder)
await paymasterWithContext.setRelayHub(relayHub)
await misbehavingPaymaster.setRelayHub(relayHub)
await relayHubInstance.depositFor(paymasterWithContext.address, {
value: ether('1'),
from: other
})
await relayHubInstance.depositFor(misbehavingPaymaster.address, {
value: ether('1'),
from: other
})
let dataToSign = new TypedRequestData(
chainId,
forwarder,
relayRequest
)
signature = await getEip712Signature(
web3,
dataToSign
)
relayRequestMisbehavingPaymaster = cloneRelayRequest(relayRequest)
relayRequestMisbehavingPaymaster.relayData.paymaster = misbehavingPaymaster.address
dataToSign = new TypedRequestData(
chainId,
forwarder,
relayRequestMisbehavingPaymaster
)
signatureWithMisbehavingPaymaster = await getEip712Signature(
web3,
dataToSign
)
relayRequestPaymasterWithContext = cloneRelayRequest(relayRequest)
relayRequestPaymasterWithContext.relayData.paymaster = paymasterWithContext.address
dataToSign = new TypedRequestData(
chainId,
forwarder,
relayRequestPaymasterWithContext
)
signatureWithContextPaymaster = await getEip712Signature(
web3,
dataToSign
)
})
it('should revert if encoded function contains extra bytes', async () => {
const encoded = await relayHubInstance.contract.methods.relayCall(10e6, relayRequest, signatureWithPermissivePaymaster, '0x').encodeABI() as string
await expectRevert(web3.eth.call({
data: encoded + '1234',
from: relayWorker,
to: relayHubInstance.address,
gas,
gasPrice
}), 'Error: VM Exception while processing transaction: reverted with reason string \'extra msg.data bytes\'')
})
it('relayCall executes the transaction and increments sender nonce on hub', async function () {
const nonceBefore = await forwarderInstance.getNonce(senderAddress)
const {
tx,
logs
} = await relayHubInstance.relayCall(10e6, relayRequest, signatureWithPermissivePaymaster, '0x', {
from: relayWorker,
gas,
gasPrice
})
const nonceAfter = await forwarderInstance.getNonce(senderAddress)
assert.equal(nonceBefore.addn(1).toNumber(), nonceAfter.toNumber())
await expectEvent.inTransaction(tx, TestRecipient, 'SampleRecipientEmitted', {
message,
realSender: senderAddress,
msgSender: forwarder,
origin: relayWorker
})
const expectedReturnValue = web3.eth.abi.encodeParameter('string', 'emitMessage return value')
expectEvent.inLogs(logs, 'TransactionResult', {
status: RelayCallStatusCodes.OK,
returnValue: expectedReturnValue
})
expectEvent.inLogs(logs, 'TransactionRelayed', {
status: RelayCallStatusCodes.OK
})
})
it('relayCall executes type 2 transaction and increments sender nonce on hub', async function () {
const nonceBefore = await forwarderInstance.getNonce(senderAddress)
const eip1559relayRequest = cloneRelayRequest(relayRequest)
eip1559relayRequest.relayData.maxFeePerGas = 1e12.toString()
eip1559relayRequest.relayData.maxPriorityFeePerGas = 1e9.toString()
const gasPrice = 1e10.toString()
const dataToSign = new TypedRequestData(
chainId,
eip1559relayRequest.relayData.forwarder,
eip1559relayRequest
)
const signature = await getEip712Signature(
web3,
dataToSign
)
const {
tx,
logs
} = await relayHubInstance.relayCall(10e6, eip1559relayRequest, signature, '0x', {
from: relayWorker,
gas,
gasPrice
})
const nonceAfter = await forwarderInstance.getNonce(senderAddress)
assert.equal(nonceBefore.addn(1).toNumber(), nonceAfter.toNumber())
await expectEvent.inTransaction(tx, TestRecipient, 'SampleRecipientEmitted', {
message,
realSender: senderAddress,
msgSender: forwarder,
origin: relayWorker
})
const expectedReturnValue = web3.eth.abi.encodeParameter('string', 'emitMessage return value')
expectEvent.inLogs(logs, 'TransactionResult', {
status: RelayCallStatusCodes.OK,
returnValue: expectedReturnValue
})
expectEvent.inLogs(logs, 'TransactionRelayed', {
status: RelayCallStatusCodes.OK
})
})
it('relayCall should refuse to re-send transaction with same nonce', async function () {
const { tx } = await relayHubInstance.relayCall(10e6, relayRequest, signatureWithPermissivePaymaster, '0x', {
from: relayWorker,
gas,
gasPrice
})
await expectEvent.inTransaction(tx, TestRecipient, 'SampleRecipientEmitted')
const ret = await relayHubInstance.relayCall(10e6, relayRequest, signatureWithPermissivePaymaster, '0x', {
from: relayWorker,
gas,
gasPrice
})
await expectEvent(ret, 'TransactionRejectedByPaymaster', { reason: encodeRevertReason('FWD: nonce mismatch') })
})
// This test is added due to a regression that almost slipped to production.
it('relayCall executes the transaction with no parameters', async function () {
const encodedFunction = recipientContract.contract.methods.emitMessageNoParams().encodeABI()
const relayRequestNoCallData = cloneRelayRequest(relayRequest)
relayRequestNoCallData.request.data = encodedFunction
const dataToSign = new TypedRequestData(
chainId,
forwarder,
relayRequestNoCallData
)
signature = await getEip712Signature(
web3,
dataToSign
)
const { tx } = await relayHubInstance.relayCall(10e6, relayRequestNoCallData, signature, '0x', {
from: relayWorker,
gas,
gasPrice
})
await expectEvent.inTransaction(tx, TestRecipient, 'SampleRecipientEmitted', {
message: messageWithNoParams,
realSender: senderAddress,
msgSender: forwarder,
origin: relayWorker
})
})
it('relayCall executes a transaction even if recipient call reverts', async function () {
const encodedFunction = recipientContract.contract.methods.testRevert().encodeABI()
const relayRequestRevert = cloneRelayRequest(relayRequest)
relayRequestRevert.request.data = encodedFunction
const dataToSign = new TypedRequestData(
chainId,
forwarder,
relayRequestRevert
)
signature = await getEip712Signature(
web3,
dataToSign
)
const { logs } = await relayHubInstance.relayCall(10e6, relayRequestRevert, signature, '0x', {
from: relayWorker,
gas,
gasPrice
})
const expectedReturnValue = '0x08c379a0' + removeHexPrefix(web3.eth.abi.encodeParameter('string', 'always fail'))
expectEvent.inLogs(logs, 'TransactionResult', {
status: RelayCallStatusCodes.RelayedCallFailed,
returnValue: expectedReturnValue
})
expectEvent.inLogs(logs, 'TransactionRelayed', {
status: RelayCallStatusCodes.RelayedCallFailed
})
})
it('postRelayedCall receives values returned in preRelayedCall', async function () {
const { tx } = await relayHubInstance.relayCall(10e6, relayRequestPaymasterWithContext,
signatureWithContextPaymaster, '0x', {
from: relayWorker,
gas,
gasPrice
})
await expectEvent.inTransaction(tx, TestPaymasterStoreContext, 'SampleRecipientPostCallWithValues', {
context: 'context passed from preRelayedCall to postRelayedCall'
})
})
it('relaying is aborted if the paymaster reverts the preRelayedCall', async function () {
await misbehavingPaymaster.setReturnInvalidErrorCode(true)
const { logs } = await relayHubInstance.relayCall(10e6, relayRequestMisbehavingPaymaster,
signatureWithMisbehavingPaymaster, '0x', {
from: relayWorker,
gas,
gasPrice
})
expectEvent.inLogs(logs, 'TransactionRejectedByPaymaster', { reason: encodeRevertReason('invalid code') })
})
it('should revert with out-of-gas if gas limit is too low for a relayed transaction', async function () {
const gas = '200000' // not enough for a 'relayCall' transaction
await expectRevert(
relayHubInstance.relayCall(10e6, relayRequestMisbehavingPaymaster, signatureWithMisbehavingPaymaster, '0x', {
from: relayWorker,
gasPrice,
gas: gas
}),
'revert')
})
it('should not accept relay requests with incorrect relay worker', async function () {
await relayHubInstance.addRelayWorkers([incorrectWorker], { from: relayManager })
await expectRevert(
relayHubInstance.relayCall(10e6, relayRequestMisbehavingPaymaster, signatureWithMisbehavingPaymaster, '0x', {
from: incorrectWorker,
gasPrice,
gas
}),
'Not a right worker')
})
it('should not accept relay requests if destination recipient doesn\'t have a balance to pay for it',
async function () {
const paymaster2 = await TestPaymasterEverythingAccepted.new()
await paymaster2.setTrustedForwarder(forwarder)
await paymaster2.setRelayHub(relayHub)
const maxPossibleCharge = (await relayHubInstance.calculateCharge(gasLimit, {
maxFeePerGas,
maxPriorityFeePerGas,
pctRelayFee,
baseRelayFee,
transactionCalldataGasUsed: 7e6.toString(),
relayWorker,
forwarder,
paymaster: paymaster2.address,
paymasterData: '0x',
clientId: '1'
})).toNumber()
await paymaster2.deposit({ value: (maxPossibleCharge - 1).toString() }) // TODO: replace with correct margin calculation
const relayRequestPaymaster2 = cloneRelayRequest(relayRequest)
relayRequestPaymaster2.relayData.paymaster = paymaster2.address
await expectRevert(
relayHubInstance.relayCall(10e6, relayRequestPaymaster2, signatureWithMisbehavingPaymaster, '0x', {
from: relayWorker,
gas,
gasPrice
}),
'Paymaster balance too low')
})
it('should not execute the \'relayedCall\' if \'preRelayedCall\' reverts', async function () {
await misbehavingPaymaster.setRevertPreRelayCall(true)
// @ts-ignore (there is a problem with web3 types annotations that must be solved)
const startBlock = await web3.eth.getBlockNumber()
const { logs } = await relayHubInstance.relayCall(10e6, relayRequestMisbehavingPaymaster,
signatureWithMisbehavingPaymaster, '0x', {
from: relayWorker,
gas,
gasPrice: gasPrice
})
// There should not be an event emitted, which means the result of 'relayCall' was indeed reverted
const logsMessages = await recipientContract.contract.getPastEvents('SampleRecipientEmitted', {
fromBlock: startBlock,
toBlock: 'latest'
})
assert.equal(0, logsMessages.length)
// const expectedReturnValue = '0x08c379a0' + removeHexPrefix(web3.eth.abi.encodeParameter('string', 'You asked me to revert, remember?'))
expectEvent.inLogs(logs, 'TransactionRejectedByPaymaster', {
reason: encodeRevertReason('You asked me to revert, remember?')
})
})
it('should fail a transaction if paymaster.getGasAndDataLimits is too expensive', async function () {
await misbehavingPaymaster.setExpensiveGasLimits(true)
await expectRevert(relayHubInstance.relayCall(10e6, relayRequestMisbehavingPaymaster,
signatureWithMisbehavingPaymaster, '0x', {
from: relayWorker,
gas,
gasPrice: gasPrice
}), 'revert')
})
it('should revert the \'relayedCall\' if \'postRelayedCall\' reverts', async function () {
await misbehavingPaymaster.setRevertPostRelayCall(true)
const { logs } = await relayHubInstance.relayCall(10e6, relayRequestMisbehavingPaymaster,
signatureWithMisbehavingPaymaster, '0x', {
from: relayWorker,
gas,
gasPrice: gasPrice
})
// @ts-ignore (there is a problem with web3 types annotations that must be solved)
const startBlock = await web3.eth.getBlockNumber()
// There should not be an event emitted, which means the result of 'relayCall' was indeed reverted
const logsMessages = await recipientContract.contract.getPastEvents('SampleRecipientEmitted', {
fromBlock: startBlock,
toBlock: 'latest'
})
assert.equal(0, logsMessages.length)
expectEvent.inLogs(logs, 'TransactionRelayed', { status: RelayCallStatusCodes.PostRelayedFailed })
})
describe('recipient balance withdrawal ban', function () {
let misbehavingPaymaster: TestPaymasterConfigurableMisbehaviorInstance
let relayRequestMisbehavingPaymaster: RelayRequest
let signature: string
beforeEach(async function () {
misbehavingPaymaster = await TestPaymasterConfigurableMisbehavior.new()
await misbehavingPaymaster.setTrustedForwarder(forwarder)
await misbehavingPaymaster.setRelayHub(relayHub)
await relayHubInstance.depositFor(misbehavingPaymaster.address, {
value: ether('1'),
from: other
})
relayRequestMisbehavingPaymaster = cloneRelayRequest(relayRequest)
relayRequestMisbehavingPaymaster.relayData.paymaster = misbehavingPaymaster.address
const dataToSign = new TypedRequestData(
chainId,
forwarder,
relayRequestMisbehavingPaymaster
)
signature = await getEip712Signature(
web3,
dataToSign
)
})
it('reverts relayed call if recipient withdraws balance during preRelayedCall', async function () {
await misbehavingPaymaster.setWithdrawDuringPreRelayedCall(true)
await assertRevertWithPaymasterBalanceChanged()
})
it('reverts relayed call if recipient withdraws balance during the relayed call', async function () {
await recipientContract.setWithdrawDuringRelayedCall(misbehavingPaymaster.address)
await assertRevertWithPaymasterBalanceChanged()
})
it('reverts relayed call if recipient withdraws balance during postRelayedCall', async function () {
await misbehavingPaymaster.setWithdrawDuringPostRelayedCall(true)
await assertRevertWithPaymasterBalanceChanged()
})
async function assertRevertWithPaymasterBalanceChanged (): Promise<void> {
const { logs } = await relayHubInstance.relayCall(10e6, relayRequestMisbehavingPaymaster, signature, '0x', {
from: relayWorker,
gas,
gasPrice
})
expectEvent.inLogs(logs, 'TransactionRelayed', { status: RelayCallStatusCodes.PaymasterBalanceChanged })
}
})
context('with BatchGateway configured', function () {
const batchGateway = other
let gatewayForwarder: GatewayForwarderInstance
let relayHubInstance: RelayHubInstance
let recipientContract: TestRecipientInstance
let relayRequest: RelayRequest
before(async function () {
relayRequest = cloneRelayRequest(sharedRelayRequestData)
gatewayForwarder = await GatewayForwarder.new()
await registerForwarderForGsn(gatewayForwarder)
relayHubInstance = await deployHub(stakeManager.address, penalizer.address, batchGateway, testToken.address, oneEther.toString())
recipientContract = await TestRecipient.new(gatewayForwarder.address)
await gatewayForwarder.setTrustedRelayHub(relayHubInstance.address)
await paymasterContract.setTrustedForwarder(gatewayForwarder.address)
await paymasterContract.setRelayHub(relayHubInstance.address)
await relayHubInstance.depositFor(paymasterContract.address, {
from: senderAddress,
value: ether('1')
})
// register relay manager and worker
await stakeManager.authorizeHubByOwner(relayManager, relayHubInstance.address, { from: relayOwner })
await relayHubInstance.addRelayWorkers([relayWorker], {
from: relayManager
})
relayRequest.request.to = recipientContract.address
relayRequest.request.data = recipientContract.contract.methods.emitMessageNoParams().encodeABI()
relayRequest.relayData.paymaster = paymasterContract.address
relayRequest.relayData.forwarder = gatewayForwarder.address
})
it('should reject relayCall with incorrect non-empty signature coming from the BatchGateway', async function () {
const {
logs
} = await relayHubInstance.relayCall(10e6, relayRequest, '0xdeadbeef', '0x', {
from: batchGateway,
gas
})
// @ts-ignore
const reasonHex: string = logs[1].args?.reason as string
const rejectReason = decodeRevertReason(reasonHex)
assert.equal(rejectReason, 'ECDSA: invalid signature length')
})
it('should relay relayCall with correct non-empty signature coming from the BatchGateway', async function () {
const dataToSign = new TypedRequestData(
chainId,
gatewayForwarder.address,
relayRequest
)
signatureWithPermissivePaymaster = await getEip712Signature(
web3,
dataToSign
)
const {
tx
} = await relayHubInstance.relayCall(10e6, relayRequest, signatureWithPermissivePaymaster, '0x', {
from: batchGateway,
gas
})
await expectEvent.inTransaction(tx, TestRecipient, 'SampleRecipientEmitted', {
message: 'Method with no parameters'
})
})
it('should reject relayCall with empty signature coming from a valid worker', async function () {
await expectRevert(
relayHubInstance.relayCall(10e6, relayRequest, '0x', '0x', {
from: relayWorker,
gas
}),
'missing signature or bad gateway')
})
it('should reject relayCall that reimburses an invalid worker', async function () {
const relayRequestWithInvalidWorker = cloneRelayRequest(relayRequest)
relayRequestWithInvalidWorker.relayData.relayWorker = incorrectWorker
await expectRevert(
relayHubInstance.relayCall(10e6, relayRequestWithInvalidWorker, signatureWithPermissivePaymaster, '0x', {
from: batchGateway,
gas
}),
'Unknown relay worker')
})
it('should accept relayCall with empty signature coming from the BatchGateway', async function () {
const relayRequestWithNonce = cloneRelayRequest(relayRequest)
relayRequestWithNonce.request.nonce = (await gatewayForwarder.getNonce(relayRequest.request.from)).toString()
const dataToSign = new TypedRequestData(
chainId,
gatewayForwarder.address,
relayRequestWithNonce
)
signatureWithPermissivePaymaster = await getEip712Signature(
web3,
dataToSign
)
const {
tx
} = await relayHubInstance.relayCall(10e6, relayRequestWithNonce, '0x', '0x', {
from: batchGateway,
gas
})
await expectEvent.inTransaction(tx, TestRecipient, 'SampleRecipientEmitted', {
message: 'Method with no parameters',
realSender: senderAddress,
msgSender: gatewayForwarder.address,
origin: batchGateway
})
})
})
})
})
})
}) | the_stack |
import { MessageItem, QuickInputButtons, Uri, window } from 'vscode';
import { configuration } from '../../configuration';
import { Container } from '../../container';
import { PlusFeatures } from '../../features';
import {
WorktreeCreateError,
WorktreeCreateErrorReason,
WorktreeDeleteError,
WorktreeDeleteErrorReason,
} from '../../git/errors';
import { GitReference, GitWorktree, Repository } from '../../git/models';
import { Messages } from '../../messages';
import { QuickPickItemOfT, QuickPickSeparator } from '../../quickpicks/items/common';
import { Directive } from '../../quickpicks/items/directive';
import { FlagsQuickPickItem } from '../../quickpicks/items/flags';
import { basename, isDescendent } from '../../system/path';
import { pluralize, truncateLeft } from '../../system/string';
import { OpenWorkspaceLocation } from '../../system/utils';
import { ViewsWithRepositoryFolders } from '../../views/viewBase';
import { GitActions } from '../gitCommands.actions';
import {
appendReposToTitle,
AsyncStepResultGenerator,
CustomStep,
ensureAccessStep,
inputBranchNameStep,
PartialStepState,
pickBranchOrTagStep,
pickRepositoryStep,
pickWorktreesStep,
pickWorktreeStep,
QuickCommand,
QuickPickStep,
StepGenerator,
StepResult,
StepResultGenerator,
StepSelection,
StepState,
} from '../quickCommand';
interface Context {
repos: Repository[];
associatedView: ViewsWithRepositoryFolders;
defaultUri?: Uri;
pickedUri?: Uri;
showTags: boolean;
title: string;
worktrees?: GitWorktree[];
}
type CreateFlags = '--force' | '-b' | '--detach' | '--direct';
interface CreateState {
subcommand: 'create';
repo: string | Repository;
uri: Uri;
reference?: GitReference;
createBranch: string;
flags: CreateFlags[];
}
type DeleteFlags = '--force';
interface DeleteState {
subcommand: 'delete';
repo: string | Repository;
uris: Uri[];
flags: DeleteFlags[];
}
type OpenFlags = '--new-window' | '--reveal-explorer';
interface OpenState {
subcommand: 'open';
repo: string | Repository;
uri: Uri;
flags: OpenFlags[];
}
type State = CreateState | DeleteState | OpenState;
type WorktreeStepState<T extends State> = SomeNonNullable<StepState<T>, 'subcommand'>;
type CreateStepState<T extends CreateState = CreateState> = WorktreeStepState<ExcludeSome<T, 'repo', string>>;
type DeleteStepState<T extends DeleteState = DeleteState> = WorktreeStepState<ExcludeSome<T, 'repo', string>>;
type OpenStepState<T extends OpenState = OpenState> = WorktreeStepState<ExcludeSome<T, 'repo', string>>;
const subcommandToTitleMap = new Map<State['subcommand'], string>([
['create', 'Create'],
['delete', 'Delete'],
['open', 'Open'],
]);
function getTitle(title: string, subcommand: State['subcommand'] | undefined) {
return subcommand == null ? title : `${subcommandToTitleMap.get(subcommand)} ${title}`;
}
export interface WorktreeGitCommandArgs {
readonly command: 'worktree';
confirm?: boolean;
state?: Partial<State>;
}
export class WorktreeGitCommand extends QuickCommand<State> {
private subcommand: State['subcommand'] | undefined;
private canSkipConfirmOverride: boolean | undefined;
constructor(container: Container, args?: WorktreeGitCommandArgs) {
super(container, 'worktree', 'worktree', 'Worktree', {
description: 'open, create, or delete worktrees',
});
let counter = 0;
if (args?.state?.subcommand != null) {
counter++;
switch (args.state.subcommand) {
case 'create':
if (args.state.uri != null) {
counter++;
}
if (args.state.reference != null) {
counter++;
}
break;
case 'delete':
if (args.state.uris != null && (!Array.isArray(args.state.uris) || args.state.uris.length !== 0)) {
counter++;
}
break;
case 'open':
if (args.state.uri != null) {
counter++;
}
break;
}
}
if (args?.state?.repo != null) {
counter++;
}
this.initialState = {
counter: counter,
confirm: args?.confirm,
...args?.state,
};
}
override get canConfirm(): boolean {
return this.subcommand != null;
}
override get canSkipConfirm(): boolean {
return this.canSkipConfirmOverride ?? false;
}
override get skipConfirmKey() {
return `${this.key}${this.subcommand == null ? '' : `-${this.subcommand}`}:${this.pickedVia}`;
}
protected async *steps(state: PartialStepState<State>): StepGenerator {
const context: Context = {
repos: Container.instance.git.openRepositories,
associatedView: Container.instance.worktreesView,
showTags: false,
title: this.title,
};
let skippedStepTwo = false;
while (this.canStepsContinue(state)) {
context.title = this.title;
if (state.counter < 1 || state.subcommand == null) {
this.subcommand = undefined;
const result = yield* this.pickSubcommandStep(state);
// Always break on the first step (so we will go back)
if (result === StepResult.Break) break;
state.subcommand = result;
}
this.subcommand = state.subcommand;
if (state.counter < 2 || state.repo == null || typeof state.repo === 'string') {
skippedStepTwo = false;
if (context.repos.length === 1) {
skippedStepTwo = true;
state.counter++;
state.repo = context.repos[0];
} else {
const result = yield* pickRepositoryStep(state, context);
if (result === StepResult.Break) continue;
state.repo = result;
}
}
const result = yield* ensureAccessStep(state as any, context, PlusFeatures.Worktrees);
if (result === StepResult.Break) break;
context.title = getTitle(state.subcommand === 'delete' ? 'Worktrees' : this.title, state.subcommand);
switch (state.subcommand) {
case 'create': {
yield* this.createCommandSteps(state as CreateStepState, context);
// Clear any chosen path, since we are exiting this subcommand
state.uri = undefined;
break;
}
case 'delete': {
if (state.uris != null && !Array.isArray(state.uris)) {
state.uris = [state.uris];
}
yield* this.deleteCommandSteps(state as DeleteStepState, context);
break;
}
case 'open': {
yield* this.openCommandSteps(state as OpenStepState, context);
break;
}
default:
QuickCommand.endSteps(state);
break;
}
// If we skipped the previous step, make sure we back up past it
if (skippedStepTwo) {
state.counter--;
}
}
return state.counter < 0 ? StepResult.Break : undefined;
}
private *pickSubcommandStep(state: PartialStepState<State>): StepResultGenerator<State['subcommand']> {
const step = QuickCommand.createPickStep<QuickPickItemOfT<State['subcommand']>>({
title: this.title,
placeholder: `Choose a ${this.label} command`,
items: [
{
label: 'open',
description: 'opens the specified worktree',
picked: state.subcommand === 'open',
item: 'open',
},
{
label: 'create',
description: 'creates a new worktree',
picked: state.subcommand === 'create',
item: 'create',
},
{
label: 'delete',
description: 'deletes the specified worktrees',
picked: state.subcommand === 'delete',
item: 'delete',
},
],
buttons: [QuickInputButtons.Back],
});
const selection: StepSelection<typeof step> = yield step;
return QuickCommand.canPickStepContinue(step, state, selection) ? selection[0].item : StepResult.Break;
}
private async *createCommandSteps(state: CreateStepState, context: Context): AsyncStepResultGenerator<void> {
if (context.defaultUri == null) {
context.defaultUri = await state.repo.getWorktreesDefaultUri();
}
if (state.flags == null) {
state.flags = [];
}
context.pickedUri = undefined;
// Don't allow skipping the confirm step
state.confirm = true;
this.canSkipConfirmOverride = undefined;
while (this.canStepsContinue(state)) {
if (state.counter < 3 || state.reference == null) {
const result = yield* pickBranchOrTagStep(state, context, {
placeholder: context =>
`Choose a branch${context.showTags ? ' or tag' : ''} to create the new worktree for`,
picked: state.reference?.ref ?? (await state.repo.getBranch())?.ref,
titleContext: ' for',
value: GitReference.isRevision(state.reference) ? state.reference.ref : undefined,
});
// Always break on the first step (so we will go back)
if (result === StepResult.Break) break;
state.reference = result;
}
if (state.counter < 4 || state.uri == null) {
if (
state.reference != null &&
!configuration.get('worktrees.promptForLocation', state.repo.folder) &&
context.defaultUri != null
) {
state.uri = context.defaultUri;
} else {
const result = yield* this.createCommandChoosePathStep(state, context, {
titleContext: ` for ${GitReference.toString(state.reference, {
capitalize: true,
icon: false,
label: state.reference.refType !== 'branch',
})}`,
});
if (result === StepResult.Break) continue;
state.uri = result;
// Keep track of the actual uri they picked, because we will modify it in later steps
context.pickedUri = state.uri;
}
}
if (this.confirm(state.confirm)) {
const result = yield* this.createCommandConfirmStep(state, context);
if (result === StepResult.Break) continue;
[state.uri, state.flags] = result;
}
// Reset any confirmation overrides
state.confirm = true;
this.canSkipConfirmOverride = undefined;
if (state.flags.includes('-b') && state.createBranch == null) {
const result = yield* inputBranchNameStep(state, context, {
placeholder: 'Please provide a name for the new branch',
titleContext: ` from ${GitReference.toString(state.reference, {
capitalize: true,
icon: false,
label: state.reference.refType !== 'branch',
})}`,
value: state.createBranch ?? GitReference.getNameWithoutRemote(state.reference),
});
if (result === StepResult.Break) {
// Clear the flags, since we can backup after the confirm step below (which is non-standard)
state.flags = [];
continue;
}
state.createBranch = result;
}
const uri = state.flags.includes('--direct')
? state.uri
: Uri.joinPath(
state.uri,
...(state.createBranch ?? state.reference.name).replace(/\\/g, '/').split('/'),
);
try {
await state.repo.createWorktree(uri, {
commitish: state.reference?.name,
createBranch: state.flags.includes('-b') ? state.createBranch : undefined,
detach: state.flags.includes('--detach'),
force: state.flags.includes('--force'),
});
} catch (ex) {
if (
WorktreeCreateError.is(ex, WorktreeCreateErrorReason.AlreadyCheckedOut) &&
!state.flags.includes('--force')
) {
const createBranch: MessageItem = { title: 'Create New Branch' };
const force: MessageItem = { title: 'Create Anyway' };
const cancel: MessageItem = { title: 'Cancel', isCloseAffordance: true };
const result = await window.showWarningMessage(
`Unable to create the new worktree because ${GitReference.toString(state.reference, {
icon: false,
quoted: true,
})} is already checked out.\n\nWould you like to create a new branch for this worktree or forcibly create it anyway?`,
{ modal: true },
createBranch,
force,
cancel,
);
if (result === createBranch) {
state.flags.push('-b');
this.canSkipConfirmOverride = true;
state.confirm = false;
continue;
}
if (result === force) {
state.flags.push('--force');
this.canSkipConfirmOverride = true;
state.confirm = false;
continue;
}
} else if (WorktreeCreateError.is(ex, WorktreeCreateErrorReason.AlreadyExists)) {
void window.showErrorMessage(
`Unable to create a new worktree in '${GitWorktree.getFriendlyPath(
uri,
)}' because the folder already exists and is not empty.`,
'OK',
);
} else {
void Messages.showGenericErrorMessage(
`Unable to create a new worktree in '${GitWorktree.getFriendlyPath(uri)}.`,
);
}
}
QuickCommand.endSteps(state);
}
}
private async *createCommandChoosePathStep(
state: CreateStepState,
context: Context,
options?: { titleContext?: string },
): AsyncStepResultGenerator<Uri> {
const step = QuickCommand.createCustomStep<Uri>({
show: async (_step: CustomStep<Uri>) => {
const uris = await window.showOpenDialog({
canSelectFiles: false,
canSelectFolders: true,
canSelectMany: false,
defaultUri: context.pickedUri ?? state.uri ?? context.defaultUri,
openLabel: 'Select Worktree Location',
title: `${appendReposToTitle(
`Choose Worktree Location${options?.titleContext ?? ''}`,
state,
context,
)}`,
});
if (uris == null || uris.length === 0) return Directive.Back;
return uris[0];
},
});
const value: StepSelection<typeof step> = yield step;
if (
!QuickCommand.canStepContinue(step, state, value) ||
!(await QuickCommand.canInputStepContinue(step, state, value))
) {
return StepResult.Break;
}
return value;
}
private *createCommandConfirmStep(
state: CreateStepState,
context: Context,
): StepResultGenerator<[Uri, CreateFlags[]]> {
/**
* Here are the rules for creating the recommended path for the new worktree:
*
* If the user picks a folder outside the repo, it will be `<chosen-path>/<repo>.worktrees/<?branch>`
* If the user picks the repo folder, it will be `<repo>/../<repo>.worktrees/<?branch>`
* If the user picks a folder inside the repo, it will be `<repo>/../<repo>.worktrees/<?branch>`
*/
const pickedUri = context.pickedUri ?? state.uri;
const pickedFriendlyPath = truncateLeft(GitWorktree.getFriendlyPath(pickedUri), 60);
let canCreateDirectlyInPicked = true;
let recommendedRootUri;
const repoUri = state.repo.uri;
if (repoUri.toString() !== pickedUri.toString()) {
if (isDescendent(pickedUri, repoUri)) {
recommendedRootUri = Uri.joinPath(repoUri, '..', `${basename(repoUri.path)}.worktrees`);
} else {
recommendedRootUri = Uri.joinPath(pickedUri, `${basename(repoUri.path)}.worktrees`);
}
} else {
recommendedRootUri = Uri.joinPath(repoUri, '..', `${basename(repoUri.path)}.worktrees`);
// Don't allow creating directly into the main worktree folder
canCreateDirectlyInPicked = false;
}
const recommendedUri =
state.reference != null
? Uri.joinPath(recommendedRootUri, ...state.reference.name.replace(/\\/g, '/').split('/'))
: recommendedRootUri;
const recommendedFriendlyPath = truncateLeft(GitWorktree.getFriendlyPath(recommendedUri), 65);
const recommendedNewBranchFriendlyPath = truncateLeft(
GitWorktree.getFriendlyPath(Uri.joinPath(recommendedRootUri, '<new-branch-name>')),
60,
);
const step: QuickPickStep<FlagsQuickPickItem<CreateFlags, Uri>> = QuickCommand.createConfirmStep(
appendReposToTitle(`Confirm ${context.title}`, state, context),
[
FlagsQuickPickItem.create<CreateFlags, Uri>(
state.flags,
[],
{
label: context.title,
description: ` for ${GitReference.toString(state.reference)}`,
detail: `Will create worktree in $(folder) ${recommendedFriendlyPath}`,
},
recommendedRootUri,
),
FlagsQuickPickItem.create<CreateFlags, Uri>(
state.flags,
['-b'],
{
label: 'Create New Branch and Worktree',
description: ` from ${GitReference.toString(state.reference)}`,
detail: `Will create worktree in $(folder) ${recommendedNewBranchFriendlyPath}`,
},
recommendedRootUri,
),
...(canCreateDirectlyInPicked
? [
QuickPickSeparator.create(),
FlagsQuickPickItem.create<CreateFlags, Uri>(
state.flags,
['--direct'],
{
label: `${context.title} (directly in folder)`,
description: ` for ${GitReference.toString(state.reference)}`,
detail: `Will create worktree directly in $(folder) ${pickedFriendlyPath}`,
},
pickedUri,
),
FlagsQuickPickItem.create<CreateFlags, Uri>(
state.flags,
['-b', '--direct'],
{
label: 'Create New Branch and Worktree (directly in folder)',
description: ` from ${GitReference.toString(state.reference)}`,
detail: `Will create worktree directly in $(folder) ${pickedFriendlyPath}`,
},
pickedUri,
),
]
: []),
] as FlagsQuickPickItem<CreateFlags, Uri>[],
context,
);
const selection: StepSelection<typeof step> = yield step;
return QuickCommand.canPickStepContinue(step, state, selection)
? [selection[0].context, selection[0].item]
: StepResult.Break;
}
private async *deleteCommandSteps(state: DeleteStepState, context: Context): StepGenerator {
context.worktrees = await state.repo.getWorktrees();
if (state.flags == null) {
state.flags = [];
}
while (this.canStepsContinue(state)) {
if (state.counter < 3 || state.uris == null || state.uris.length === 0) {
context.title = getTitle('Worktrees', state.subcommand);
const result = yield* pickWorktreesStep(state, context, {
filter: wt => wt.main || !wt.opened, // Can't delete the main or opened worktree
includeStatus: true,
picked: state.uris?.map(uri => uri.toString()),
placeholder: 'Choose worktrees to delete',
});
// Always break on the first step (so we will go back)
if (result === StepResult.Break) break;
state.uris = result.map(w => w.uri);
}
context.title = getTitle(pluralize('Worktree', state.uris.length, { only: true }), state.subcommand);
const result = yield* this.deleteCommandConfirmStep(state, context);
if (result === StepResult.Break) continue;
state.flags = result;
QuickCommand.endSteps(state);
for (const uri of state.uris) {
let retry = false;
do {
retry = false;
const force = state.flags.includes('--force');
try {
if (force) {
const worktree = context.worktrees.find(wt => wt.uri.toString() === uri.toString());
const status = await worktree?.getStatus();
if (status?.hasChanges ?? false) {
const confirm: MessageItem = { title: 'Force Delete' };
const cancel: MessageItem = { title: 'Cancel', isCloseAffordance: true };
const result = await window.showWarningMessage(
`The worktree in '${uri.fsPath}' has uncommitted changes.\n\nDeleting it will cause those changes to be FOREVER LOST.\nThis is IRREVERSIBLE!\n\nAre you sure you still want to delete it?`,
{ modal: true },
confirm,
cancel,
);
if (result !== confirm) return;
}
}
await state.repo.deleteWorktree(uri, { force: force });
} catch (ex) {
if (WorktreeDeleteError.is(ex)) {
if (ex.reason === WorktreeDeleteErrorReason.MainWorkingTree) {
void window.showErrorMessage('Unable to delete the main worktree');
} else if (!force) {
const confirm: MessageItem = { title: 'Force Delete' };
const cancel: MessageItem = { title: 'Cancel', isCloseAffordance: true };
const result = await window.showErrorMessage(
ex.reason === WorktreeDeleteErrorReason.HasChanges
? `Unable to delete worktree because there are UNCOMMITTED changes in '${uri.fsPath}'.\n\nForcibly deleting it will cause those changes to be FOREVER LOST.\nThis is IRREVERSIBLE!\n\nWould you like to forcibly delete it?`
: `Unable to delete worktree in '${uri.fsPath}'.\n\nWould you like to try to forcibly delete it?`,
{ modal: true },
confirm,
cancel,
);
if (result === confirm) {
state.flags.push('--force');
retry = true;
}
}
} else {
void Messages.showGenericErrorMessage(`Unable to delete worktree in '${uri.fsPath}.`);
}
}
} while (retry);
}
}
}
private *deleteCommandConfirmStep(state: DeleteStepState, context: Context): StepResultGenerator<DeleteFlags[]> {
const step: QuickPickStep<FlagsQuickPickItem<DeleteFlags>> = QuickCommand.createConfirmStep(
appendReposToTitle(`Confirm ${context.title}`, state, context),
[
FlagsQuickPickItem.create<DeleteFlags>(state.flags, [], {
label: context.title,
detail: `Will delete ${pluralize('worktree', state.uris.length, {
only: state.uris.length === 1,
})}${state.uris.length === 1 ? ` in $(folder) ${GitWorktree.getFriendlyPath(state.uris[0])}` : ''}`,
}),
FlagsQuickPickItem.create<DeleteFlags>(state.flags, ['--force'], {
label: `Force ${context.title}`,
description: 'including ANY UNCOMMITTED changes',
detail: `Will forcibly delete ${pluralize('worktree', state.uris.length, {
only: state.uris.length === 1,
})} ${
state.uris.length === 1 ? ` in $(folder) ${GitWorktree.getFriendlyPath(state.uris[0])}` : ''
}`,
}),
],
context,
);
const selection: StepSelection<typeof step> = yield step;
return QuickCommand.canPickStepContinue(step, state, selection) ? selection[0].item : StepResult.Break;
}
private async *openCommandSteps(state: OpenStepState, context: Context): StepGenerator {
context.worktrees = await state.repo.getWorktrees();
if (state.flags == null) {
state.flags = [];
}
while (this.canStepsContinue(state)) {
if (state.counter < 3 || state.uri == null) {
context.title = getTitle('Worktree', state.subcommand);
const result = yield* pickWorktreeStep(state, context, {
includeStatus: true,
picked: state.uri?.toString(),
placeholder: 'Choose worktree to open',
});
// Always break on the first step (so we will go back)
if (result === StepResult.Break) break;
state.uri = result.uri;
}
context.title = getTitle('Worktree', state.subcommand);
const result = yield* this.openCommandConfirmStep(state, context);
if (result === StepResult.Break) continue;
state.flags = result;
QuickCommand.endSteps(state);
const worktree = context.worktrees.find(wt => wt.uri.toString() === state.uri.toString())!;
if (state.flags.includes('--reveal-explorer')) {
void GitActions.Worktree.revealInFileExplorer(worktree);
} else {
GitActions.Worktree.open(worktree, {
location: state.flags.includes('--new-window')
? OpenWorkspaceLocation.NewWindow
: OpenWorkspaceLocation.CurrentWindow,
});
}
}
}
private *openCommandConfirmStep(state: OpenStepState, context: Context): StepResultGenerator<OpenFlags[]> {
const step: QuickPickStep<FlagsQuickPickItem<OpenFlags>> = QuickCommand.createConfirmStep(
appendReposToTitle(`Confirm ${context.title}`, state, context),
[
FlagsQuickPickItem.create<OpenFlags>(state.flags, [], {
label: context.title,
detail: `Will open, in the current window, the worktree in $(folder) ${GitWorktree.getFriendlyPath(
state.uri,
)}`,
}),
FlagsQuickPickItem.create<OpenFlags>(state.flags, ['--new-window'], {
label: `${context.title} in a New Window`,
detail: `Will open, in a new window, the worktree in $(folder) ${GitWorktree.getFriendlyPath(
state.uri,
)}`,
}),
FlagsQuickPickItem.create<OpenFlags>(state.flags, ['--reveal-explorer'], {
label: `Reveal in File Explorer`,
detail: `Will open, in the File Explorer, the worktree in $(folder) ${GitWorktree.getFriendlyPath(
state.uri,
)}`,
}),
],
context,
);
const selection: StepSelection<typeof step> = yield step;
return QuickCommand.canPickStepContinue(step, state, selection) ? selection[0].item : StepResult.Break;
}
} | the_stack |
* Copyright 2015 Dev Shop Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// notice_end
import {EventContext, ModelRecord, ObservationStage, SingleModelRouter, State, Status} from './';
import {Observable, RouterObservable, RouterSubject, Subject} from '../reactive';
import {Guard, Logger} from '../system';
import {CompositeDisposable, Disposable, DisposableBase} from '../system/disposables';
import {EspDecoratorUtil, ObserveEventPredicate} from '../decorators';
import {DecoratorObservationRegister} from './decoratorObservationRegister';
import {CompositeDiagnosticMonitor} from './devtools';
import {EventProcessors} from './eventProcessors';
import {DispatchType, EventEnvelope, ModelEnvelope} from './envelopes';
import {EventStreamsRegistration} from './modelRecord';
import {DefaultEventContext} from './eventContext';
import {DecoratorTypes} from '../decorators';
let _log = Logger.create('Router');
type Envelope = ModelEnvelope<any> | EventEnvelope<any, any>;
const RUN_ACTION_EVENT_NAME = '__runAction';
export class Router extends DisposableBase {
private _models: Map<string, ModelRecord>;
private _dispatchSubject: Subject<Envelope>;
private _haltingException: Error;
private _state: State;
private _onErrorHandlers: Array<(error: Error) => void>;
private _diagnosticMonitor: CompositeDiagnosticMonitor;
private _decoratorObservationRegister: DecoratorObservationRegister;
public constructor() {
super();
this._models = new Map();
this._haltingException = undefined;
this._dispatchSubject = new Subject<Envelope>();
this._onErrorHandlers = [];
this._diagnosticMonitor = new CompositeDiagnosticMonitor();
this.addDisposable(this._diagnosticMonitor);
this._state = new State(this._diagnosticMonitor);
this._decoratorObservationRegister = new DecoratorObservationRegister();
}
public get currentStatus(): Status {
return this._state.currentStatus;
}
public addModel(modelId: string, model: any, eventProcessors?: EventProcessors) {
this._throwIfHaltedOrDisposed();
Guard.isString(modelId, 'The modelId argument should be a string');
Guard.isDefined(model, 'The model argument must be defined');
if (eventProcessors) {
Guard.isObject(eventProcessors, `The eventProcessors argument provided with the model (of id ${modelId}) should be an object`);
}
let modelRecord = this._models.get(modelId);
if (modelRecord) {
// It's possible the model was observed first, thus has a model record but not yet an actual model.
// If there is a record, we just ensure it's model isn't there yet.
Guard.isFalsey(modelRecord.model, 'The model with id [' + modelId + '] is already registered');
}
this._getOrCreateModelRecord(modelId, model, eventProcessors);
this._dispatchSubject.onNext({modelId: modelId, model: model, dispatchType: DispatchType.Model});
this._diagnosticMonitor.addModel(modelId);
}
public removeModel(modelId: string) {
Guard.isString(modelId, 'The modelId argument should be a string');
let modelRecord = this._models.get(modelId);
if (modelRecord) {
this._diagnosticMonitor.removeModel(modelId);
modelRecord.wasRemoved = true;
this._models.delete(modelId);
modelRecord.dispose();
}
}
public isModelRegistered(modelId: string): boolean {
Guard.isString(modelId, 'The modelId argument should be a string');
return this._models.has(modelId);
}
/**
* Exists for read only access to a model.
*
* Note: given this is JavaScript, it's up to the caller to not write against the model.
* If you want to modify the model, publish an event to it.
*
* @param modelId
*/
public getModel(modelId: string): any {
Guard.isString(modelId, 'The modelId argument should be a string');
if (this._models.get(modelId)) {
let modelRecord = this._models.get(modelId);
if (!modelRecord.hasModel) {
throw new Error(`Model with id ${modelId} is registered, however it's model has not yet been set. Can not retrieve`);
}
return modelRecord.model;
}
return null;
}
/**
* Exists to find a model for read only access.
*
* Note: given this is JavaScript, it's up to the caller to not write against the model.
* If you want to modify the model, publish an event to it.
*
* Returns the found model else null.
*
* @param predicate = a predicate which is used as a test against each model. Will stop on first match
*/
public findModel(predicate: (model: any) => boolean) {
Guard.isFunction(predicate, 'predicate should be a function');
for (let [key, value] of this._models) {
if (value.hasModel) {
if (predicate(value.model)) {
return value.model;
}
}
}
return null;
}
public publishEvent(modelId: string, eventType: string, event: any) {
Guard.isString(modelId, 'The modelId argument should be a string');
Guard.isString(eventType, 'The eventType argument should be a string');
Guard.isDefined(event, 'The event argument must be defined');
this._throwIfHaltedOrDisposed();
if (this._state.currentStatus === Status.EventExecution) {
throw new Error('You can not publish further events when performing an event execution. modelId1: [' + modelId + '], eventType:[' + eventType + ']');
}
this._diagnosticMonitor.publishEvent(modelId, eventType, event);
this._tryEnqueueEvent(modelId, eventType, event);
}
public broadcastEvent(eventType: string, event: any) {
Guard.isString(eventType, 'The eventType argument should be a string');
Guard.isDefined(event, 'The event argument should be defined');
this._diagnosticMonitor.broadcastEvent(eventType);
for (let [key, value] of this._models) {
this._tryEnqueueEvent(value.modelId, eventType, event);
}
try {
this._purgeEventQueues();
} catch (err) {
this._halt(err);
}
}
public executeEvent(eventType: string, event: any) {
this._throwIfHaltedOrDisposed();
Guard.isString(eventType, 'The eventType argument should be a string');
Guard.isDefined(event, 'The event argument should be defined');
this._diagnosticMonitor.executingEvent(eventType);
this._state.executeEvent(() => {
this._dispatchEventToEventProcessors(
this._state.currentModelRecord,
event,
eventType
);
});
}
public runAction<TModel>(modelId: string, action: (model: TModel) => void) {
this._throwIfHaltedOrDisposed();
Guard.isString(modelId, 'modelId must be a string');
Guard.isTruthy(modelId !== '', 'modelId must not be empty');
Guard.isFunction(action, 'the argument passed to runAction must be a function and can not be null|undefined');
this._diagnosticMonitor.runAction(modelId);
let modelRecord = this._models.get(modelId);
if (!modelRecord) {
throw new Error('Can not run action as model with id [' + modelId + '] not registered');
} else {
modelRecord.eventQueue.push({eventType: RUN_ACTION_EVENT_NAME, action: action});
try {
this._purgeEventQueues();
} catch (err) {
this._halt(err);
}
}
}
public getEventObservable<TEvent, TModel>(modelId: string, eventType: string, stage?: ObservationStage): Observable<EventEnvelope<TEvent, TModel>> {
return Observable.create<EventEnvelope<TEvent, TModel>>(o => {
this._throwIfHaltedOrDisposed();
Guard.isString(modelId, 'The modelId argument should be a string');
Guard.isString(eventType, 'The eventType must be a string');
Guard.isDefined(modelId, 'The modelId argument should be defined');
stage = this._tryDefaultObservationStage(stage);
let modelRecord = this._getOrCreateModelRecord(modelId);
let eventStreamDetails: EventStreamsRegistration = modelRecord.getOrCreateEventStreamsRegistration(
eventType,
<Observable<EventEnvelope<any, any>>>this._dispatchSubject
);
switch (stage) {
case ObservationStage.preview:
return eventStreamDetails.preview.subscribe(o);
case ObservationStage.normal:
return eventStreamDetails.normal.subscribe(o);
case ObservationStage.committed:
return eventStreamDetails.committed.subscribe(o);
case ObservationStage.final:
return eventStreamDetails.final.subscribe(o);
case ObservationStage.all:
return eventStreamDetails.all.subscribe(o);
default:
throw new Error(`Unknown stage '${stage}' requested for eventType ${eventType} and modelId: ${modelId}`);
}
});
}
// all events, optionally provide a stage to filter on
public getAllEventsObservable<TModel>(stage?: ObservationStage): Observable<EventEnvelope<any, TModel>>;
// given events, optionally provide a stage to filter on
public getAllEventsObservable<TModel>(eventTypes: string[], stage?: ObservationStage): Observable<EventEnvelope<any, TModel>>;
public getAllEventsObservable<TModel>(...args: any[]): Observable<EventEnvelope<any, TModel>> {
let eventFilter: (eventType?: string) => boolean;
let stage: ObservationStage;
const buildFilter = (eventTypes: string[]) => {
Guard.lengthIsAtLeast(eventTypes, 1, 'eventTypes.length must be > 0');
let set = new Set(eventTypes);
return eventType => set.has(eventType);
};
// try figure out which overload was used
if (!args || args.length === 0) {
stage = ObservationStage.normal;
eventFilter = () => true;
} else if (args.length === 1) {
// first param could be an array or an observation stage
if (ObservationStage.isObservationStage(args[0])) {
stage = this._tryDefaultObservationStage(args[0]);
eventFilter = () => true;
} else {
// else assume it's an array
stage = ObservationStage.normal;
eventFilter = buildFilter(args[0]);
}
} else if (args.length === 2) {
// with this overload, the first param should be an event array
eventFilter = buildFilter(args[0]);
stage = this._tryDefaultObservationStage(args[1]);
} else {
throw new Error(`unsupported overload called for getAllEventsObservable. Received ${args}.`);
}
stage = this._tryDefaultObservationStage(stage);
return Observable.create(o => {
this._throwIfHaltedOrDisposed();
return this._dispatchSubject
.filter(envelope => envelope.dispatchType === DispatchType.Event)
.cast<EventEnvelope<any, any>>()
.filter(envelope => {
if (ObservationStage.isAll(stage)) {
return true;
} else {
return envelope.observationStage === stage;
}
})
.cast<EventEnvelope<any, any>>()
.filter(envelope => eventFilter(envelope.eventType))
.subscribe(o);
});
}
public getModelObservable<TModel>(modelId: string): Observable<TModel> {
return Observable.create(o => {
this._throwIfHaltedOrDisposed();
Guard.isString(modelId, 'The modelId should be a string');
let modelRecord = this._getOrCreateModelRecord(modelId);
return modelRecord.modelObservationStream
.map(envelope => envelope.model)
.subscribe(o);
});
}
public createObservableFor<TModel>(modelId: string, observer): RouterObservable<TModel> {
return Observable
.create<TModel>(observer)
.asRouterObservable(this)
.subscribeOn(modelId);
}
public createSubject<T>(): RouterSubject<T> {
return new RouterSubject<T>(this);
}
public createModelRouter<TModel>(targetModelId: string) {
Guard.isString(targetModelId, 'The targetModelId argument should be a string');
return SingleModelRouter.createWithRouter<TModel>(this, targetModelId);
}
public observeEventsOn(modelId: string, object: any): Disposable {
if (EspDecoratorUtil.hasMetadata(object)) {
return this._observeEventsUsingDirectives(modelId, object);
}
return new DisposableBase();
}
public addOnErrorHandler(handler: (error: Error) => void) {
this._onErrorHandlers.push(handler);
}
public removeOnErrorHandler(handler) {
let index = this._onErrorHandlers.indexOf(handler);
if (index >= 0) {
delete this._onErrorHandlers[index];
} else {
throw new Error('Unknown error handler.');
}
}
public getDispatchLoopDiagnostics() {
return this._diagnosticMonitor.getLoggingDiagnosticSummary();
}
public get enableDiagnosticLogging() {
return this._diagnosticMonitor.enableDiagnosticLogging;
}
public set enableDiagnosticLogging(isEnabled: boolean) {
this._diagnosticMonitor.enableDiagnosticLogging = isEnabled;
}
public isOnDispatchLoopFor(modelId: string) {
Guard.isString(modelId, 'modelId must be a string');
Guard.isFalsey(modelId === '', 'modelId must not be empty');
return this._state.currentModelId === modelId;
}
private _getOrCreateModelRecord(modelId: string, model?: any, eventProcessors?: EventProcessors): ModelRecord {
let modelRecord: ModelRecord = this._models.get(modelId);
if (modelRecord) {
if (!modelRecord.hasModel) {
modelRecord.setModel(model, eventProcessors);
}
} else {
let modelObservationStream = this._dispatchSubject
.cast<ModelEnvelope<any>>()
.filter(envelope => envelope.dispatchType === DispatchType.Model && envelope.modelId === modelId)
.share(true);
modelRecord = new ModelRecord(modelId, model, modelObservationStream, eventProcessors);
this._models.set(modelId, modelRecord);
}
return modelRecord;
}
private _tryEnqueueEvent(modelId: string, eventType: string, event: any) {
// we allow for lazy model registration, you can observe a model but then register it later,
// this means at this point when publishing an event we need to ensure the actual model is there.
if (!this._models.has(modelId) || !this._models.get(modelId).model) {
throw new Error('Can not publish event of type [' + eventType + '] as model with id [' + modelId + '] not registered');
} else {
try {
if (this._models.has(modelId)) {
let modelRecord = this._getOrCreateModelRecord(modelId);
modelRecord.enqueueEvent(eventType, event);
this._diagnosticMonitor.eventEnqueued(modelId, eventType);
this._purgeEventQueues();
}
} catch (err) {
this._halt(err);
}
}
}
private _purgeEventQueues() {
if (this._state.currentStatus === Status.Idle) {
let modelRecord = this._getNextModelRecordWithQueuedEvents();
let hasEvents = !!modelRecord;
this._diagnosticMonitor.dispatchLoopStart();
while (hasEvents) {
let eventRecord = modelRecord.eventQueue.shift();
this._diagnosticMonitor.startingModelEventLoop(modelRecord.modelId, eventRecord.eventType);
this._state.moveToPreProcessing(modelRecord.modelId, modelRecord);
this._diagnosticMonitor.preProcessingModel();
modelRecord.preEventProcessor(modelRecord.model);
if (!modelRecord.wasRemoved) {
this._state.moveToEventDispatch();
this._diagnosticMonitor.dispatchingEvents();
while (hasEvents) {
if (eventRecord.eventType === RUN_ACTION_EVENT_NAME) {
this._diagnosticMonitor.dispatchingAction();
modelRecord.eventDispatchProcessor(modelRecord.model, null, RUN_ACTION_EVENT_NAME);
eventRecord.action(modelRecord.model);
modelRecord.eventDispatchedProcessor(modelRecord.model, null, RUN_ACTION_EVENT_NAME);
} else {
this._state.eventsProcessed.push(eventRecord.eventType);
this._dispatchEventToEventProcessors(
modelRecord,
eventRecord.event,
eventRecord.eventType
);
}
if (modelRecord.wasRemoved) {
break;
}
modelRecord.hasReceivedEvent = true;
hasEvents = modelRecord.eventQueue.length > 0;
if (hasEvents) {
eventRecord = modelRecord.eventQueue.shift();
}
} // keep looping until any events from the dispatch to processors stage are processed
this._diagnosticMonitor.finishDispatchingEvent();
if (!modelRecord.wasRemoved) {
this._diagnosticMonitor.postProcessingModel();
this._state.moveToPostProcessing();
modelRecord.postEventProcessor(modelRecord.model, this._state.eventsProcessed);
this._state.clearEventDispatchQueue();
}
}
modelRecord.eventQueuePurged();
// we now dispatch updates before processing the next model, if any
this._state.moveToDispatchModelUpdates();
this._dispatchModelUpdates();
modelRecord = this._getNextModelRecordWithQueuedEvents();
hasEvents = !!modelRecord;
this._diagnosticMonitor.endingModelEventLoop();
} // keep looping until any events raised during post event processing OR event that have come in for other models are processed
this._state.moveToIdle();
this._diagnosticMonitor.dispatchLoopEnd();
}
}
private _dispatchEventToEventProcessors(modelRecord: ModelRecord, event, eventType): void {
let eventContext = new DefaultEventContext(
modelRecord.modelId,
eventType
);
this._dispatchEvent(modelRecord, event, eventType, eventContext, ObservationStage.preview);
if (eventContext.isCommitted) {
throw new Error('You can\'t commit an event at the preview stage. Event: [' + eventContext.eventType + '], ModelId: [' + modelRecord.modelId + ']');
}
if (!eventContext.isCanceled) {
let wasCommittedAtNormalStage;
eventContext.updateCurrentState(ObservationStage.normal);
this._dispatchEvent(modelRecord, event, eventType, eventContext, ObservationStage.normal);
if (eventContext.isCanceled) {
throw new Error('You can\'t cancel an event at the normal stage. Event: [' + eventContext.eventType + '], ModelId: [' + modelRecord.modelId + ']');
}
wasCommittedAtNormalStage = eventContext.isCommitted;
if (wasCommittedAtNormalStage) {
eventContext.updateCurrentState(ObservationStage.committed);
this._dispatchEvent(modelRecord, event, eventType, eventContext, ObservationStage.committed);
if (eventContext.isCanceled) {
throw new Error('You can\'t cancel an event at the committed stage. Event: [' + eventContext.eventType + '], ModelId: [' + modelRecord.modelId + ']');
}
}
eventContext.updateCurrentState(ObservationStage.final);
this._dispatchEvent(modelRecord, event, eventType, eventContext, ObservationStage.final);
if (eventContext.isCanceled) {
throw new Error('You can\'t cancel an event at the final stage. Event: [' + eventContext.eventType + '], ModelId: [' + modelRecord.modelId + ']');
}
if (!wasCommittedAtNormalStage && eventContext.isCommitted) {
throw new Error('You can\'t commit an event at the final stage. Event: [' + eventContext.eventType + '], ModelId: [' + modelRecord.modelId + ']');
}
}
}
private _dispatchEvent(modelRecord: ModelRecord, event: any, eventType: string, context: EventContext, stage: ObservationStage) {
this._diagnosticMonitor.dispatchingEvent(eventType, stage);
modelRecord.eventDispatchProcessor(modelRecord.model, eventType, event, stage);
this._dispatchSubject.onNext({
event: event,
eventType: eventType,
modelId: modelRecord.modelId,
model: modelRecord.model,
context: context,
observationStage: stage,
dispatchType: DispatchType.Event
});
modelRecord.eventDispatchedProcessor(modelRecord.model, eventType, event, stage);
}
private _dispatchModelUpdates() {
let updates: ModelRecord[] = [];
for (let [key, value] of this._models) {
if (value.hasReceivedEvent) {
value.hasReceivedEvent = false;
updates.push(value);
}
}
for (let i = 0, len = updates.length; i < len; i++) {
let modelRecord: ModelRecord = updates[i];
this._diagnosticMonitor.dispatchingModelUpdates(modelRecord.modelId);
this._dispatchSubject.onNext({
modelId: modelRecord.modelId,
model: modelRecord.model,
dispatchType: DispatchType.Model
});
}
}
/**
* Tries to find the a ModelRecord with pending events.
* ModelRecord's with older enqueued events are returned first.
* @private
*/
private _getNextModelRecordWithQueuedEvents(): ModelRecord {
let candidate: ModelRecord = null;
let dirtyEpochMs: number = Date.now();
for (let [key, value] of this._models) {
if (value.eventQueue.length > 0 && value.eventQueueDirtyEpochMs <= dirtyEpochMs) {
candidate = value;
dirtyEpochMs = value.eventQueueDirtyEpochMs;
}
}
return candidate;
}
private _observeEventsUsingDirectives(modelId: string, object: any) {
if (this._decoratorObservationRegister.isRegistered(modelId, object)) {
// tslint:disable-next-line:max-line-length
throw new Error(`observeEventsOn has already been called for model with id '${modelId}' and the given object. Note you can observe the same model with different decorated objects, however you have called observeEventsOn twice with the same object.`);
}
this._decoratorObservationRegister.register(modelId, object);
let compositeDisposable = new CompositeDisposable();
let eventsDetails = EspDecoratorUtil.getAllEvents(object);
for (let i = 0; i < eventsDetails.length; i++) {
let details = eventsDetails[i];
compositeDisposable.add(this.getEventObservable(modelId, details.eventType, details.observationStage).subscribe((eventEnvelope) => {
// note if the code is uglifyied then details.functionName isn't going to mean much.
// If you're packing your vendor bundles, or debug bundles separately then you can use the no-mangle-functions option to retain function names.
let predicate = <ObserveEventPredicate>details.predicate;
if (!predicate || predicate(object, eventEnvelope.event, eventEnvelope.context)) {
this._diagnosticMonitor.dispatchingViaDirective(details.functionName);
if (details.decoratorType === DecoratorTypes.observeEvent) {
object[details.functionName](eventEnvelope.event, eventEnvelope.context, eventEnvelope.model);
} else {
object[details.functionName](eventEnvelope);
}
}
}));
}
compositeDisposable.add(() => {
this._decoratorObservationRegister.removeRegistration(modelId, object);
});
return compositeDisposable;
}
private _tryDefaultObservationStage(stage?: ObservationStage) {
if (stage) {
Guard.isString(stage, 'The stage argument should be a string');
Guard.isTruthy(ObservationStage.isObservationStage(stage), 'The stage argument value of [' + stage + '] is incorrect. It should be ObservationStage.preview, ObservationStage.normal, ObservationStage.committed or ObservationStage.all.');
return stage;
} else {
return ObservationStage.normal;
}
}
private _throwIfHaltedOrDisposed() {
if (this._state.currentStatus === Status.Halted) {
throw new Error(`ESP router halted due to previous unhandled error [${this._haltingException}]`);
}
if (this.isDisposed) {
throw new Error(`ESP router has been disposed`);
}
}
private _halt(err) {
let isInitialHaltingError = this._state.currentStatus !== Status.Halted;
this._state.moveToHalted();
let modelIds = [...this._models.keys()];
this._diagnosticMonitor.halted(modelIds, err);
_log.error('The ESP router has caught an unhandled error and will halt', err);
this._haltingException = err;
// We run the onErrorHandlers after the
// router has had time to set it's own state
if (isInitialHaltingError) {
this._onErrorHandlers.forEach(handler => {
try {
handler(err);
} catch (handlerError) {
_log.info(`Error handler errored. Ignoring and continuing, Error = ${handlerError}`, handlerError);
}
});
}
throw err;
}
} | the_stack |
import { EventHandler, dom, h } from "../../src/index";
import { expect, sinon } from "../test-utilities";
let noopEventHandlerInterceptor = (
propertyName: string,
functionPropertyArgument: EventHandler
) => {
return function (this: Node) {
/* eslint-disable prefer-rest-params */
return functionPropertyArgument.apply(this, arguments as any);
/* eslint-enable prefer-rest-params */
};
};
describe("dom", () => {
describe("properties", () => {
describe("classes", () => {
it("adds and removes classes", () => {
let projection = dom.create(h("div", { classes: { a: true, b: false } }));
let div = projection.domNode as HTMLDivElement;
expect(div.className).to.equal("a");
projection.update(h("div", { classes: { a: true, b: true } }));
expect(div.className).to.equal("a b");
projection.update(h("div", { classes: { a: false, b: true } }));
expect(div.className).to.equal("b");
});
it("allows a constant class to be applied to make JSX workable", () => {
let projection = dom.create(h("div", { class: "extra special" }));
expect(projection.domNode.outerHTML).to.equal('<div class="extra special"></div>');
projection.update(h("div", { class: "super special" }));
expect(projection.domNode.outerHTML).to.equal('<div class="super special"></div>');
projection.update(h("div", { class: undefined }));
expect(projection.domNode.outerHTML).to.equal('<div class=""></div>');
projection.update(h("div", { class: "me too" }));
expect(projection.domNode.outerHTML).to.equal('<div class="me too"></div>');
});
it("is lenient towards extra spaces in class attribute", () => {
let projection = dom.create(h("div", { class: "extra special " }));
expect(projection.domNode.outerHTML).to.equal('<div class="extra special"></div>');
projection.update(h("div", { class: "super special" }));
expect(projection.domNode.outerHTML).to.equal('<div class="super special"></div>');
projection.update(h("div", { class: undefined }));
expect(projection.domNode.outerHTML).to.equal('<div class=""></div>');
projection.update(h("div", { class: " me too" }));
expect(projection.domNode.outerHTML).to.equal('<div class="me too"></div>');
});
it("allows classes and class to be combined", () => {
let projection = dom.create(h("div", { classes: { extra: true }, class: "special" }));
expect(projection.domNode.outerHTML).to.equal('<div class="extra special"></div>');
projection.update(h("div", { classes: { extra: true }, class: "good" }));
expect(projection.domNode.outerHTML).to.equal('<div class="extra good"></div>');
projection.update(h("div", { classes: { extra: false }, class: "good" }));
expect(projection.domNode.outerHTML).to.equal('<div class="good"></div>');
});
it("can update class, even when class was initially empty", () => {
let projection = dom.create(h("div", { class: "" }));
expect(projection.domNode.outerHTML).to.equal("<div></div>");
projection.update(h("div", { class: "good" }));
expect(projection.domNode.outerHTML).to.equal('<div class="good"></div>');
projection.update(h("div", { class: undefined }));
expect(projection.domNode.outerHTML).to.equal('<div class=""></div>');
});
it("can update class, even when class was initially undefined", () => {
let projection = dom.create(h("div", { class: undefined }));
expect(projection.domNode.outerHTML).to.equal("<div></div>");
projection.update(h("div", { class: "good" }));
expect(projection.domNode.outerHTML).to.equal('<div class="good"></div>');
});
it("helps to prevent mistakes when using className", () => {
expect(() => {
dom.create(h("div", { className: "special" } as any));
}).to.throw(Error);
});
});
it("updates attributes", () => {
let projection = dom.create(h("a", { href: "#1" }));
let link = projection.domNode as HTMLLinkElement;
expect(link.getAttribute("href")).to.equal("#1");
projection.update(h("a", { href: "#2" }));
expect(link.getAttribute("href")).to.equal("#2");
projection.update(h("a", { href: undefined }));
expect(link.getAttribute("href")).to.equal("");
});
it("can add an attribute that was initially undefined", () => {
let projection = dom.create(h("a", { href: undefined }));
let link = projection.domNode as HTMLLinkElement;
expect(link.getAttribute("href")).to.be.null;
projection.update(h("a", { href: "#2" }));
expect(link.getAttribute("href")).to.equal("#2");
});
it("can remove disabled property when set to null or undefined", () => {
let projection = dom.create(h("a", { disabled: true }));
let link = projection.domNode as HTMLLinkElement;
expect(link.disabled).to.equal(true);
// Unfortunately JSDom does not map the property value to the attribute as real browsers do
// expect(link.getAttribute('disabled')).to.equal('');
projection.update(h("a", <any>{ disabled: null }));
// What Chrome would do:
// expect(link.disabled).to.equal(false);
// expect(link.getAttribute('disabled')).to.be.null;
// What JSDom does:
expect(link.disabled).to.be.null;
});
it("updates properties", () => {
let projection = dom.create(h("a", { href: "#1", tabIndex: 1 }));
let link = projection.domNode as HTMLLinkElement;
expect(link.tabIndex).to.equal(1);
projection.update(h("a", { href: "#1", tabIndex: 2 }));
expect(link.tabIndex).to.equal(2);
projection.update(h("a", { href: "#1", tabIndex: undefined }));
expect(link.tabIndex).to.equal(0);
});
it("updates innerHTML", () => {
let projection = dom.create(h("p", { innerHTML: "<span>INNER</span>" }));
let paragraph = projection.domNode as HTMLElement;
expect(paragraph.childNodes).to.have.length(1);
expect(paragraph.firstChild.textContent).to.equal("INNER");
projection.update(h("p", { innerHTML: "<span>UPDATED</span>" }));
expect(paragraph.childNodes).to.have.length(1);
expect(paragraph.firstChild.textContent).to.equal("UPDATED");
});
it("does not mess up scrolling in Edge", () => {
let projection = dom.create(h("div", { scrollTop: 0 }));
let div = projection.domNode as HTMLDivElement;
Object.defineProperty(div, "scrollTop", {
get: () => 1,
set: sinon.stub().throws("Setting scrollTop would mess up scrolling"),
}); // meaning: div.scrollTop = 1;
projection.update(h("div", { scrollTop: 1 }));
});
it("sets HTMLInputElement.type before the element is added to the DOM for IE8 and earlier", () => {
let parentNode = {
appendChild: sinon.spy((child: HTMLElement) => {
expect(child.getAttribute("type")).to.equal("file");
}),
ownerDocument: {
createElement: sinon.spy((tag: string) => {
return document.createElement(tag);
}),
},
};
dom.append(<any>parentNode, h("input", { type: "file" }));
expect(parentNode.appendChild).to.have.been.called;
expect(parentNode.ownerDocument.createElement).to.have.been.called;
});
describe("event handlers", () => {
it("allows one to correct the value while being typed", () => {
// Here we are trying to trim the value to 2 characters
let typedKeys = "";
let handleInput = (evt: Event) => {
typedKeys = (evt.target as HTMLInputElement).value.substr(0, 2);
};
let renderFunction = () => h("input", { value: typedKeys, oninput: handleInput });
let projection = dom.create(renderFunction(), {
eventHandlerInterceptor: noopEventHandlerInterceptor,
});
let inputElement = projection.domNode as HTMLInputElement;
expect(inputElement.value).to.equal(typedKeys);
// No correction
inputElement.value = "ab";
inputElement.oninput({ target: inputElement } as any);
expect(typedKeys).to.equal("ab");
projection.update(renderFunction());
expect(inputElement.value).to.equal("ab");
// Correction kicking in
inputElement.value = "abc";
inputElement.oninput({ target: inputElement } as any);
expect(typedKeys).to.equal("ab");
projection.update(renderFunction());
expect(inputElement.value).to.equal("ab");
});
it("does not undo keystrokes, even if a browser runs an animationFrame between changing the value property and running oninput", () => {
// Crazy internet explorer behavior
let typedKeys = "";
let handleInput = (evt: Event) => {
typedKeys = (evt.target as HTMLInputElement).value;
};
let renderFunction = () => h("input", { value: typedKeys, oninput: handleInput });
let projection = dom.create(renderFunction(), {
eventHandlerInterceptor: noopEventHandlerInterceptor,
});
let inputElement = projection.domNode as HTMLInputElement;
expect(inputElement.value).to.equal(typedKeys);
// Normal behavior
inputElement.value = "a";
inputElement.oninput({ target: inputElement } as any);
expect(typedKeys).to.equal("a");
projection.update(renderFunction());
// Crazy behavior
inputElement.value = "ab";
projection.update(renderFunction()); // renderFunction still produces value:'a'
expect(typedKeys).to.equal("a");
expect(inputElement.value).to.equal("ab");
inputElement.oninput({ target: inputElement } as any);
expect(typedKeys).to.equal("ab");
projection.update(renderFunction());
});
});
it("allows passing functions to props", () => {
let someMethod = () => {
/* noop */
};
let renderFunction = () => h("div", { nonEventFunctionProp: someMethod });
let projection = dom.create(renderFunction(), {
eventHandlerInterceptor: noopEventHandlerInterceptor,
});
interface FakeCustomElement extends HTMLElement {
nonEventFunctionProp: () => void;
}
let fakeCustomElement = projection.domNode as FakeCustomElement;
expect(fakeCustomElement.nonEventFunctionProp).to.equal(someMethod);
});
it("updates the value property", () => {
let typedKeys = "";
let handleInput = (evt: Event) => {
typedKeys = (evt.target as HTMLInputElement).value;
};
let renderFunction = () => h("input", { value: typedKeys, oninput: handleInput });
let projection = dom.create(renderFunction(), {
eventHandlerInterceptor: noopEventHandlerInterceptor,
});
let inputElement = projection.domNode as HTMLInputElement;
expect(inputElement.value).to.equal(typedKeys);
typedKeys = "value1";
projection.update(renderFunction());
expect(inputElement.value).to.equal(typedKeys);
});
it("does not clear a value that was set by a testing tool (like Ranorex) which manipulates input.value directly", () => {
let typedKeys = "";
let handleInput = (evt: Event) => {
typedKeys = (evt.target as HTMLInputElement).value;
};
let renderFunction = () => h("input", { value: typedKeys, oninput: handleInput });
let projection = dom.create(renderFunction(), {
eventHandlerInterceptor: noopEventHandlerInterceptor,
});
let inputElement = projection.domNode as HTMLInputElement;
expect(inputElement.value).to.equal(typedKeys);
inputElement.value = "value written by a testing tool without invoking the input event";
projection.update(renderFunction());
expect(inputElement.value).not.to.equal(typedKeys); // no resetting should have taken place
});
it("Can handle oninput event handlers which pro-actively change element.value to correct user input when typing faster than 60 keys per second", () => {
let model = "";
let handleInput = (evt: Event) => {
let element = evt.target as HTMLInputElement;
model = element.value;
if (model.indexOf(",") > 0) {
model = model.replace(/,/g, ".");
element.value = model; // To allow a user to type faster than 60 keys per second
// in reality, selectionStart would now also be reset
}
};
let renderFunction = () => h("input", { value: model, oninput: handleInput });
let projection = dom.create(renderFunction(), {
eventHandlerInterceptor: noopEventHandlerInterceptor,
});
let inputElement = projection.domNode as HTMLInputElement;
expect(inputElement.value).to.equal(model);
inputElement.value = "4";
inputElement.oninput({ target: inputElement } as any as Event);
projection.update(renderFunction());
inputElement.value = "4,";
inputElement.oninput({ target: inputElement } as any as Event);
projection.update(renderFunction());
expect(inputElement.value).to.equal("4.");
model = "";
projection.update(renderFunction());
expect(inputElement.value).to.equal("");
});
it("removes the attribute when a role property is set to undefined", () => {
let role: string | undefined = "button";
let renderFunction = () => h("div", { role: role });
let projection = dom.create(renderFunction(), {
eventHandlerInterceptor: noopEventHandlerInterceptor,
});
let element = projection.domNode;
expect(element.attributes).to.have.property("role");
expect(element.getAttribute("role")).to.equal(role);
role = undefined;
projection.update(renderFunction());
expect(element.attributes).to.not.have.property("role");
});
});
}); | the_stack |
import { TreeGrid } from '../../src/treegrid/base/treegrid';
import { createGrid, destroy } from '../base/treegridutil.spec';
import { projectData, sampleData } from '../base/datasource.spec';
import { ToolbarItem } from '../../src/treegrid/enum';
import { Toolbar } from '../../src/treegrid/actions/toolbar';
import { Edit } from '../../src/treegrid/actions/edit';
import { profile, inMB, getMemoryProfile } from '../common.spec';
import { select } from '@syncfusion/ej2-base';
/**
* Grid Toolbar spec
*/
TreeGrid.Inject(Toolbar,Edit);
describe('TreeGrid Toolbar module', () => {
beforeAll(() => {
const isDef = (o: any) => o !== undefined && o !== null;
if (!isDef(window.performance)) {
console.log("Unsupported environment, window.performance.memory is unavailable");
this.skip(); //Skips test (in Chai)
return;
}
});
describe('Toolbar string', () => {
let gridObj: TreeGrid;
let actionComplete: () => void;
beforeAll((done: Function) => {
gridObj = createGrid(
{
dataSource: projectData,
idMapping: 'TaskID',
parentIdMapping: 'parentID',
treeColumnIndex: 1,
toolbar: ['Search', 'ExpandAll', 'CollapseAll'],
columns: ['TaskID', 'TaskName', 'StartDate', 'EndDate']
},
done
);
});
it('Toolbars string', () => {
let toolbarElements: Element = gridObj.toolbarModule.getToolbar().firstElementChild;
expect(toolbarElements.querySelectorAll('.e-toolbar-item')[0].getAttribute('title')).toBe('Expand All');
expect(toolbarElements.querySelectorAll('.e-toolbar-item')[1].getAttribute('title')).toBe('Collapse All');
expect(toolbarElements.querySelectorAll('.e-toolbar-item')[2].getAttribute('title')).toBe('Search');
});
afterAll(() => {
destroy(gridObj);
});
});
describe('Script error throws in inline editing', () => {
let gridObj: TreeGrid;
let actionComplete: () => void;
beforeAll((done: Function) => {
gridObj = createGrid(
{
dataSource: sampleData,
childMapping: 'subtasks',
treeColumnIndex: 1,
height: 400,
editSettings: {
allowAdding: true,
allowEditing: true,
allowDeleting: true,
},
toolbar: ['Add', 'Delete', 'Update', 'Cancel'],
columns: [
{
field: 'taskID', headerText: 'Task ID', isPrimaryKey: true, textAlign: 'Right',
validationRules: { required: true, number: true}, width: 90
},
{ field: 'taskName', headerText: 'Task Name', editType: 'stringedit', width: 220, validationRules: {required: true}, showCheckbox: true },
{ field: 'startDate', headerText: 'Start Date', textAlign: 'Right', width: 130, editType: 'datepickeredit',
format: 'yMd', validationRules: { date: true} },
{
field: 'duration', headerText: 'Duration', textAlign: 'Right', width: 100, editType: 'numericedit',
validationRules: { number: true, min: 0}, edit: { params: { format: 'n'}}
}
]
},
done
);
});
it('Script Error', (done: Function) => {
actionComplete = (args?: any): void => {
if (args.action === 'add') {
expect(args.data.taskName).toBe('fourth');
done();
}
};
gridObj.actionComplete = actionComplete;
(<any>gridObj.grid.toolbarModule).toolbarClickHandler({ item: { id: gridObj.grid.element.id + '_add' } });
(<any>gridObj.grid.toolbarModule).toolbarClickHandler({ item: { id: gridObj.grid.element.id + '_update' } });
let formEle: HTMLFormElement = gridObj.grid.editModule.formObj.element;
(select('#' + gridObj.grid.element.id + 'taskID', formEle) as any).value = '124';
(select('#' + gridObj.grid.element.id + 'taskName', formEle) as any).value = 'fourth';
(select('#' + gridObj.grid.element.id + 'startDate', formEle) as any).value = '2/3/2017';
(<any>gridObj.grid.toolbarModule).toolbarClickHandler({ item: { id: gridObj.grid.element.id + '_update' } });
});
afterAll(() => {
destroy(gridObj);
});
});
describe('Toolbar enum', () => {
let gridObj: TreeGrid;
let actionComplete: () => void;
beforeAll((done: Function) => {
gridObj = createGrid(
{
dataSource: projectData,
idMapping: 'TaskID',
parentIdMapping: 'parentID',
treeColumnIndex: 1,
toolbar: [ToolbarItem.Search, ToolbarItem.ExpandAll, ToolbarItem.CollapseAll],
columns: ['TaskID', 'TaskName', 'StartDate', 'EndDate']
},
done
);
});
it('enum', () => {
let toolbarElements: Element = gridObj.toolbarModule.getToolbar().firstElementChild;
expect(toolbarElements.querySelectorAll('.e-toolbar-item')[0].getAttribute('title')).toBe('Expand All');
expect(toolbarElements.querySelectorAll('.e-toolbar-item')[1].getAttribute('title')).toBe('Collapse All');
expect(toolbarElements.querySelectorAll('.e-toolbar-item')[2].getAttribute('title')).toBe('Search');
});
it('click events', () => {
(<HTMLElement>select('#' + gridObj.grid.element.id + '_collapseall', gridObj.toolbarModule.getToolbar())).click();
expect((<HTMLTableRowElement>gridObj.getRows()[1]).style.display).toBe('none');
(<HTMLElement>select('#' + gridObj.grid.element.id + '_expandall', gridObj.toolbarModule.getToolbar())).click();
expect((<HTMLTableRowElement>gridObj.getRows()[1]).style.display).toBe('table-row');
});
afterAll(() => {
destroy(gridObj);
});
});
describe('Toolbar setmodel', () => {
let gridObj: TreeGrid;
let actionComplete: () => void;
beforeAll((done: Function) => {
gridObj = createGrid(
{
dataSource: projectData,
idMapping: 'TaskID',
parentIdMapping: 'parentID',
treeColumnIndex: 1,
toolbar: [ToolbarItem.Search, ToolbarItem.ExpandAll, ToolbarItem.CollapseAll],
columns: ['TaskID', 'TaskName', 'StartDate', 'EndDate']
},
done
);
});
it('setmodel', () => {
gridObj.toolbar = [{text: 'testToolbar'}];
gridObj.dataBind();
expect(gridObj.toolbarModule.getToolbar().firstElementChild.querySelector('.e-tbar-btn-text').textContent).toBe('testToolbar');
});
afterAll(() => {
destroy(gridObj);
});
});
describe('Toolbar Module test', () => {
let gridObj: TreeGrid;
let actionComplete: () => void;
beforeAll((done: Function) => {
gridObj = createGrid(
{
dataSource: projectData,
idMapping: 'TaskID',
parentIdMapping: 'parentID',
treeColumnIndex: 1,
toolbar: [ToolbarItem.ExpandAll, ToolbarItem.CollapseAll],
columns: ['TaskID', 'TaskName', 'StartDate', 'EndDate']
},
done
);
});
it('methods', () => {
let tool: Element = gridObj.toolbarModule.getToolbar();
gridObj.toolbarModule.enableItems([gridObj.element.id + '_gridcontrol_expandall'], false);
expect(tool.firstElementChild.firstElementChild.classList.contains('e-overlay')).toBeTruthy();
});
afterAll(() => {
destroy(gridObj);
});
});
describe('CollapseAll records ', () => {
let gridObj: TreeGrid;
let actionComplete: () => void;
beforeAll((done: Function) => {
gridObj = createGrid(
{
dataSource: sampleData,
childMapping: 'subtasks',
treeColumnIndex: 1,
allowSorting: true,
allowPaging: true,
allowFiltering: true,
allowExcelExport: true,
pageSettings: {pageSize: 11},
toolbar: ['ExpandAll', 'CollapseAll'],
columns: [
{ field: 'taskID', headerText: 'Task ID', isPrimaryKey: true, width: 150},
{ field: 'taskName', headerText: 'Task Name', width: 150 },
{ field: 'priority', headerText: 'priority' ,width: 150},
{ field: 'approved', headerText: 'approved',width: 150 }
]
},
done
);
});
it('CollapseAll records', () => {
gridObj.pagerModule.goToPage(4);
(<HTMLElement>gridObj.element.querySelector('.e-collapse')).click();
expect(gridObj.pageSettings.currentPage == 1).toBe(true);
expect(gridObj.getRows()[0].getElementsByClassName('e-rowcell')[1].querySelector("div>.e-treecell").innerHTML == "Planning").toBe(true);
expect(gridObj.getRows()[1].getElementsByClassName('e-rowcell')[1].querySelector("div>.e-treecell").innerHTML == "Design").toBe(true);
expect(gridObj.getRows()[2].getElementsByClassName('e-rowcell')[1].querySelector("div>.e-treecell").innerHTML == "Implementation Phase").toBe(true);
});
afterAll(() => {
destroy(gridObj);
});
});
it('memory leak', () => {
profile.sample();
let average: any = inMB(profile.averageChange)
//Check average change in memory samples to not be over 10MB
expect(average).toBeLessThan(10);
let memory: any = inMB(getMemoryProfile())
//Check the final memory usage against the first usage, there should be little change if everything was properly deallocated
expect(memory).toBeLessThan(profile.samples[0] + 0.25);
});
}); | the_stack |
import { BaseResource, CloudError, AzureServiceClientOptions } from "@azure/ms-rest-azure-js";
import * as msRest from "@azure/ms-rest-js";
export { BaseResource, CloudError };
/**
* Role Definitions filter
*/
export interface RoleDefinitionFilter {
/**
* Returns role definition with the specific name.
*/
roleName?: string;
}
/**
* Role definition permissions.
*/
export interface Permission {
/**
* Allowed actions.
*/
actions?: string[];
/**
* Denied actions.
*/
notActions?: string[];
}
/**
* Role definition.
*/
export interface RoleDefinition {
/**
* The role definition ID.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The role definition name.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The role definition type.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* The role name.
*/
roleName?: string;
/**
* The role definition description.
*/
description?: string;
/**
* The role type.
*/
roleType?: string;
/**
* Role definition permissions.
*/
permissions?: Permission[];
/**
* Role definition assignable scopes.
*/
assignableScopes?: string[];
}
/**
* Operation
*/
export interface ProviderOperation {
/**
* The operation name.
*/
name?: string;
/**
* The operation display name.
*/
displayName?: string;
/**
* The operation description.
*/
description?: string;
/**
* The operation origin.
*/
origin?: string;
/**
* The operation properties.
*/
properties?: any;
}
/**
* Resource Type
*/
export interface ResourceType {
/**
* The resource type name.
*/
name?: string;
/**
* The resource type display name.
*/
displayName?: string;
/**
* The resource type operations.
*/
operations?: ProviderOperation[];
}
/**
* Provider Operations metadata
*/
export interface ProviderOperationsMetadata {
/**
* The provider id.
*/
id?: string;
/**
* The provider name.
*/
name?: string;
/**
* The provider type.
*/
type?: string;
/**
* The provider display name.
*/
displayName?: string;
/**
* The provider resource types
*/
resourceTypes?: ResourceType[];
/**
* The provider operations.
*/
operations?: ProviderOperation[];
}
/**
* Role Assignments filter
*/
export interface RoleAssignmentFilter {
/**
* Returns role assignment of the specific principal.
*/
principalId?: string;
}
/**
* Role assignment properties with scope.
*/
export interface RoleAssignmentPropertiesWithScope {
/**
* The role assignment scope.
*/
scope?: string;
/**
* The role definition ID.
*/
roleDefinitionId?: string;
/**
* The principal ID.
*/
principalId?: string;
}
/**
* Role Assignments
*/
export interface RoleAssignment {
/**
* The role assignment ID.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The role assignment name.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The role assignment type.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* Role assignment properties.
*/
properties?: RoleAssignmentPropertiesWithScope;
}
/**
* Role assignment properties.
*/
export interface RoleAssignmentProperties {
/**
* The role definition ID used in the role assignment.
*/
roleDefinitionId: string;
/**
* The principal ID assigned to the role. This maps to the ID inside the Active Directory. It can
* point to a user, service principal, or security group.
*/
principalId: string;
}
/**
* Role assignment create parameters.
*/
export interface RoleAssignmentCreateParameters {
/**
* Role assignment properties.
*/
properties: RoleAssignmentProperties;
}
/**
* Optional Parameters.
*/
export interface RoleDefinitionsListOptionalParams extends msRest.RequestOptionsBase {
/**
* The filter to apply on the operation. Use atScopeAndBelow filter to search below the given
* scope as well.
*/
filter?: string;
}
/**
* Optional Parameters.
*/
export interface ProviderOperationsMetadataGetOptionalParams extends msRest.RequestOptionsBase {
/**
* Specifies whether to expand the values. Default value: 'resourceTypes'.
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface ProviderOperationsMetadataListOptionalParams extends msRest.RequestOptionsBase {
/**
* Specifies whether to expand the values. Default value: 'resourceTypes'.
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface RoleAssignmentsListForResourceOptionalParams extends msRest.RequestOptionsBase {
/**
* The filter to apply on the operation. Use $filter=atScope() to return all role assignments at
* or above the scope. Use $filter=principalId eq {id} to return all role assignments at, above
* or below the scope for the specified principal.
*/
filter?: string;
}
/**
* Optional Parameters.
*/
export interface RoleAssignmentsListForResourceGroupOptionalParams extends msRest.RequestOptionsBase {
/**
* The filter to apply on the operation. Use $filter=atScope() to return all role assignments at
* or above the scope. Use $filter=principalId eq {id} to return all role assignments at, above
* or below the scope for the specified principal.
*/
filter?: string;
}
/**
* Optional Parameters.
*/
export interface RoleAssignmentsListOptionalParams extends msRest.RequestOptionsBase {
/**
* The filter to apply on the operation. Use $filter=atScope() to return all role assignments at
* or above the scope. Use $filter=principalId eq {id} to return all role assignments at, above
* or below the scope for the specified principal.
*/
filter?: string;
}
/**
* Optional Parameters.
*/
export interface RoleAssignmentsListForScopeOptionalParams extends msRest.RequestOptionsBase {
/**
* The filter to apply on the operation. Use $filter=atScope() to return all role assignments at
* or above the scope. Use $filter=principalId eq {id} to return all role assignments at, above
* or below the scope for the specified principal.
*/
filter?: string;
}
/**
* An interface representing AuthorizationManagementClientOptions.
*/
export interface AuthorizationManagementClientOptions extends AzureServiceClientOptions {
baseUri?: string;
}
/**
* @interface
* Permissions information.
* @extends Array<Permission>
*/
export interface PermissionGetResult extends Array<Permission> {
/**
* The URL to use for getting the next set of results.
*/
nextLink?: string;
}
/**
* @interface
* Role definition list operation result.
* @extends Array<RoleDefinition>
*/
export interface RoleDefinitionListResult extends Array<RoleDefinition> {
/**
* The URL to use for getting the next set of results.
*/
nextLink?: string;
}
/**
* @interface
* Provider operations metadata list
* @extends Array<ProviderOperationsMetadata>
*/
export interface ProviderOperationsMetadataListResult extends Array<ProviderOperationsMetadata> {
/**
* The URL to use for getting the next set of results.
*/
nextLink?: string;
}
/**
* @interface
* Role assignment list operation result.
* @extends Array<RoleAssignment>
*/
export interface RoleAssignmentListResult extends Array<RoleAssignment> {
/**
* The URL to use for getting the next set of results.
*/
nextLink?: string;
}
/**
* Contains response data for the listForResourceGroup operation.
*/
export type PermissionsListForResourceGroupResponse = PermissionGetResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: PermissionGetResult;
};
};
/**
* Contains response data for the listForResource operation.
*/
export type PermissionsListForResourceResponse = PermissionGetResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: PermissionGetResult;
};
};
/**
* Contains response data for the listForResourceGroupNext operation.
*/
export type PermissionsListForResourceGroupNextResponse = PermissionGetResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: PermissionGetResult;
};
};
/**
* Contains response data for the listForResourceNext operation.
*/
export type PermissionsListForResourceNextResponse = PermissionGetResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: PermissionGetResult;
};
};
/**
* Contains response data for the deleteMethod operation.
*/
export type RoleDefinitionsDeleteMethodResponse = RoleDefinition & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleDefinition;
};
};
/**
* Contains response data for the get operation.
*/
export type RoleDefinitionsGetResponse = RoleDefinition & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleDefinition;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type RoleDefinitionsCreateOrUpdateResponse = RoleDefinition & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleDefinition;
};
};
/**
* Contains response data for the list operation.
*/
export type RoleDefinitionsListResponse = RoleDefinitionListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleDefinitionListResult;
};
};
/**
* Contains response data for the getById operation.
*/
export type RoleDefinitionsGetByIdResponse = RoleDefinition & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleDefinition;
};
};
/**
* Contains response data for the listNext operation.
*/
export type RoleDefinitionsListNextResponse = RoleDefinitionListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleDefinitionListResult;
};
};
/**
* Contains response data for the get operation.
*/
export type ProviderOperationsMetadataGetResponse = ProviderOperationsMetadata & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ProviderOperationsMetadata;
};
};
/**
* Contains response data for the list operation.
*/
export type ProviderOperationsMetadataListResponse = ProviderOperationsMetadataListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ProviderOperationsMetadataListResult;
};
};
/**
* Contains response data for the listNext operation.
*/
export type ProviderOperationsMetadataListNextResponse = ProviderOperationsMetadataListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ProviderOperationsMetadataListResult;
};
};
/**
* Contains response data for the listForResource operation.
*/
export type RoleAssignmentsListForResourceResponse = RoleAssignmentListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignmentListResult;
};
};
/**
* Contains response data for the listForResourceGroup operation.
*/
export type RoleAssignmentsListForResourceGroupResponse = RoleAssignmentListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignmentListResult;
};
};
/**
* Contains response data for the deleteMethod operation.
*/
export type RoleAssignmentsDeleteMethodResponse = RoleAssignment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignment;
};
};
/**
* Contains response data for the create operation.
*/
export type RoleAssignmentsCreateResponse = RoleAssignment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignment;
};
};
/**
* Contains response data for the get operation.
*/
export type RoleAssignmentsGetResponse = RoleAssignment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignment;
};
};
/**
* Contains response data for the deleteById operation.
*/
export type RoleAssignmentsDeleteByIdResponse = RoleAssignment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignment;
};
};
/**
* Contains response data for the createById operation.
*/
export type RoleAssignmentsCreateByIdResponse = RoleAssignment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignment;
};
};
/**
* Contains response data for the getById operation.
*/
export type RoleAssignmentsGetByIdResponse = RoleAssignment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignment;
};
};
/**
* Contains response data for the list operation.
*/
export type RoleAssignmentsListResponse = RoleAssignmentListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignmentListResult;
};
};
/**
* Contains response data for the listForScope operation.
*/
export type RoleAssignmentsListForScopeResponse = RoleAssignmentListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignmentListResult;
};
};
/**
* Contains response data for the listForResourceNext operation.
*/
export type RoleAssignmentsListForResourceNextResponse = RoleAssignmentListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignmentListResult;
};
};
/**
* Contains response data for the listForResourceGroupNext operation.
*/
export type RoleAssignmentsListForResourceGroupNextResponse = RoleAssignmentListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignmentListResult;
};
};
/**
* Contains response data for the listNext operation.
*/
export type RoleAssignmentsListNextResponse = RoleAssignmentListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignmentListResult;
};
};
/**
* Contains response data for the listForScopeNext operation.
*/
export type RoleAssignmentsListForScopeNextResponse = RoleAssignmentListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleAssignmentListResult;
};
}; | the_stack |
import {EventEmitter} from "eventemitter3";
import PriorityQueue from "typescript-collections/PriorityQueue";
export interface Asset {
id: string;
url: string;
priority: number;
type?: any;
}
export interface SoundAsset extends Asset {
autoplay: boolean;
loop?: boolean;
volume?: number;
mute?: boolean;
rate?: number;
html5?: boolean;
howl?: Howl;
}
export interface LoadAsset {
asset: Asset;
loaded: boolean;
error: Error | null;
}
export class PixiAssetsLoader extends EventEmitter {
/**
* Event fired when an asset has been loaded.
* @type {string}
*/
public static readonly ASSET_LOADED: string = "AssetsLoader.ASSET_LOADED";
/**
* Event fired when an assets throws an error on load.
* @type {string}
*/
public static readonly ASSET_ERROR: string = "AssetsLoader.ASSET_ERROR";
/**
* Event fired when a group of assets of same priority have been loaded (with or without errors).
* @type {string}
*/
public static readonly PRIORITY_GROUP_LOADED: string = "AssetsLoader.PRIORITY_GROUP_LOADED";
/**
* Event fired with the progress on a group of assets of same priority.
* @type {string}
*/
public static readonly PRIORITY_GROUP_PROGRESS: string = "AssetsLoader.PRIORITY_GROUP_PROGRESS";
/**
* Event fired when all queued assets have been loaded (with or without errors).
* @type {string}
*/
public static readonly ALL_ASSETS_LOADED: string = "AssetsLoader.ALL_ASSETS_LOADED";
/**
* Multi-purpose loader used to load everything except sounds.
*/
private loader: PIXI.loaders.Loader;
/**
* Prioritized assets queue to be loaded.
*/
private assetsQueue: PriorityQueue<Asset>;
/**
* Next group of assets to load.
*/
private assetsLoading: { [key: string]: LoadAsset };
/**
* Current priority of the assets being loaded.
*/
private currentPriorityLoading: number | null;
/**
* Generic assets to load.
*/
private genericAssetsToLoad: number;
/**
* Generic assets to load remaining.
*/
private genericAssetsRemaining: number;
/**
* Sound assets to load.
*/
private soundAssetsToLoad: number;
/**
* Sound assets to load remaining.
*/
private soundAssetsRemaining: number;
/**
* Progress percentages.
*/
private progressPercents: { generic: number, sound: number, total: number };
/**
* Custom asset loader that makes use of PIXI.loader and Howler to load.
* @param {PIXI.loaders.Loader} pixiLoader
* Provided pixi loader instance to use. Uses PIXI.loader if none is specified.
*/
constructor(pixiLoader?: PIXI.loaders.Loader) {
super();
this.loader = pixiLoader ? pixiLoader : PIXI.loader;
this.loader.onProgress.add(this.onGenericAssetProgress.bind(this)); // called once per loaded/errored file
this.loader.onError.add(this.onGenericAssetError.bind(this)); // called once per errored file
this.loader.onLoad.add(this.onGenericAssetLoad.bind(this)); // called once per loaded file
this.assetsQueue = new PriorityQueue<Asset>((a, b) => a.priority - b.priority);
this.initLoadingQueue();
}
/**
* Determines if there is sth loading right now.
* @returns {boolean}
*/
public isLoading(): boolean {
return Object.keys(this.assetsLoading).length > 0;
}
/**
* Add provided asset to the asset queue.
* @param {Asset} asset
*/
public addAsset(asset: Asset): this {
this.assetsQueue.enqueue(asset);
return this;
}
/**
* Adds provided assets to the asset queue.
* @param {[Asset]} assets
*/
public addAssets(assets: Asset[]): this {
assets.forEach(asset => this.addAsset(asset));
return this;
}
/**
* Loads all assets on the asset queue batching them by priority.
*/
public load(): this {
if (!this.isLoading()) {
this.loadNextPriorityGroup();
}
return this;
}
/**
* Resets queued and loading asset + PIXI.loader.
*/
public reset(): this {
this.assetsQueue.clear();
this.initLoadingQueue();
this.loader.reset();
return this;
}
private initLoadingQueue(): void {
this.assetsLoading = {};
this.currentPriorityLoading = null;
this.genericAssetsToLoad = 0;
this.genericAssetsRemaining = 0;
this.soundAssetsToLoad = 0;
this.soundAssetsRemaining = 0;
this.progressPercents = {generic: 0, sound: 0, total: 0};
}
private onGenericAssetProgress(loader: PIXI.loaders.Loader, resource: PIXI.loaders.Resource): void {
// Calculate real percentage (including other loaders)
const innerIncrement = loader.progress - this.progressPercents.generic;
this.progressPercents.generic = loader.progress;
const totalIncrement = innerIncrement * this.genericAssetsToLoad / Object.keys(this.assetsLoading).length;
this.progressPercents.total += totalIncrement;
this.emit(PixiAssetsLoader.PRIORITY_GROUP_PROGRESS, {
priority: this.currentPriorityLoading,
progress: this.progressPercents.total,
});
}
private onGenericAssetError(error: Error, loader: PIXI.loaders.Loader, resource: PIXI.loaders.Resource): void {
const loadAsset = this.assetsLoading[resource.name];
// Some generic resources add other resources to load we don't know about
if (loadAsset) {
loadAsset.loaded = false;
loadAsset.error = error; // resource.error holds the error too
this.emit(PixiAssetsLoader.ASSET_ERROR, loadAsset);
}
}
private onGenericAssetLoad(loader: PIXI.loaders.Loader, resource: PIXI.loaders.Resource): void {
const loadAsset = this.assetsLoading[resource.name];
// Some generic resources add other resources to load we don't know about
if (loadAsset) {
loadAsset.loaded = true;
loadAsset.error = null;
this.emit(PixiAssetsLoader.ASSET_LOADED, loadAsset);
}
}
private onAllGenericAssetsComplete(): void {
// We don't really know how many resources are being loaded because middlewares can add some more to the queue,
// so we go down from n resources-to-load to 0 in one step.
// See for instance the spine atlas parser middleware: pixi-spine/src/loaders.ts
this.genericAssetsRemaining = 0;
this.checkAllAssetsLoaded();
}
private onSoundAssetProgress(): void {
this.soundAssetsRemaining--;
// Calculate real percentage (including other loaders)
const innerPercent = (this.soundAssetsToLoad - this.soundAssetsRemaining) * 100 / this.soundAssetsToLoad;
const innerIncrement = innerPercent - this.progressPercents.sound;
this.progressPercents.sound = innerPercent;
const totalIncrement = innerIncrement * this.soundAssetsToLoad / Object.keys(this.assetsLoading).length;
this.progressPercents.total += totalIncrement;
this.emit(PixiAssetsLoader.PRIORITY_GROUP_PROGRESS, {
priority: this.currentPriorityLoading,
progress: this.progressPercents.total,
});
}
private onSoundAssetError(asset: Asset, error: Error): void {
this.onSoundAssetProgress();
const loadAsset = this.assetsLoading[asset.id];
if (loadAsset) {
loadAsset.loaded = false;
loadAsset.error = error;
this.emit(PixiAssetsLoader.ASSET_ERROR, loadAsset);
}
if (this.soundAssetsRemaining <= 0) {
this.checkAllAssetsLoaded();
}
}
private onSoundAssetLoad(asset: Asset): void {
this.onSoundAssetProgress();
const loadAsset = this.assetsLoading[asset.id];
if (loadAsset) {
loadAsset.loaded = true;
loadAsset.error = null;
this.emit(PixiAssetsLoader.ASSET_LOADED, loadAsset);
}
if (this.soundAssetsRemaining <= 0) {
this.checkAllAssetsLoaded();
}
}
/**
* Loads all assets with next prioriy on the queue.
*/
private loadNextPriorityGroup(): void {
this.initLoadingQueue();
const asset = this.assetsQueue.peek();
if (asset) {
this.currentPriorityLoading = asset.priority;
this.loadPriorityGroup();
} else {
// No more assets in the queue, we are done.
this.emit(PixiAssetsLoader.ALL_ASSETS_LOADED);
}
}
/**
* Add all assets with same priority to the loading list and start loading them.
*/
private loadPriorityGroup(): void {
while (this.assetsQueue.peek() && this.assetsQueue.peek()!.priority === this.currentPriorityLoading) {
const asset = this.assetsQueue.dequeue()!;
this.assetsLoading[asset.id] = {
asset,
loaded: false,
error: null,
};
}
this.startLoadingAssets();
}
private startLoadingAssets(): void {
const loadAssets = Object.keys(this.assetsLoading).map(key => this.assetsLoading[key]);
loadAssets.forEach(loadAsset => {
if ((loadAsset.asset as SoundAsset).autoplay !== undefined) {
this.soundAssetsToLoad++;
this.soundAssetsRemaining++;
this.loadSoundAsset(loadAsset.asset as SoundAsset);
} else {
this.genericAssetsToLoad++;
this.addGenericAsset(loadAsset.asset);
}
});
// Load generic assets through the loader
this.genericAssetsRemaining = this.genericAssetsToLoad;
this.loadGenericAssets();
}
private loadSoundAsset(asset: SoundAsset): void {
asset.howl = new Howl({
src: [asset.url],
autoplay: asset.autoplay,
loop: asset.loop || false,
volume: asset.volume || 1,
mute: asset.mute || false,
rate: asset.rate || 1,
html5: asset.html5 || false,
onload: this.onSoundAssetLoad.bind(this, asset),
onloaderror: (soundId: number, error: any) => {
const loadError = error instanceof Error ? error : new Error(`Error loading sound ${asset.id}`);
this.onSoundAssetError(asset, loadError);
},
});
}
private addGenericAsset(asset: Asset): void {
this.loader.add(asset.id, asset.url);
}
private loadGenericAssets(): void {
if (this.genericAssetsToLoad > 0) {
this.loader.load(this.onAllGenericAssetsComplete.bind(this));
} else {
this.checkAllAssetsLoaded();
}
}
private checkAllAssetsLoaded(): void {
if (this.genericAssetsRemaining + this.soundAssetsRemaining <= 0) {
// Notify priority group loaded
this.emit(PixiAssetsLoader.PRIORITY_GROUP_LOADED, {
priority: this.currentPriorityLoading,
assets: Object.keys(this.assetsLoading).map(key => this.assetsLoading[key]).filter(loadAsset => loadAsset.loaded),
});
// Load next priority group
this.loadNextPriorityGroup();
}
}
} | the_stack |
import {Component, ComponentSet} from './component';
import {EmbeddedComponent} from './embedded-component';
import {attribute, primaryIdentifier, secondaryIdentifier, provide} from './decorators';
import {serialize} from './serialization';
describe('Serialization', () => {
test('Component classes', async () => {
class BaseMovie extends Component {}
expect(BaseMovie.serialize()).toStrictEqual({__component: 'typeof BaseMovie'});
class Movie extends BaseMovie {
@attribute() static limit = 100;
@attribute() static offset: number;
}
expect(Movie.serialize()).toStrictEqual({
__component: 'typeof Movie',
limit: 100,
offset: {__undefined: true}
});
expect(Movie.serialize({attributeSelector: {limit: true}})).toStrictEqual({
__component: 'typeof Movie',
limit: 100
});
expect(Movie.serialize({returnComponentReferences: true})).toStrictEqual({
__component: 'typeof Movie'
});
// - Value sourcing -
Movie.getAttribute('limit').setValueSource('client');
expect(Movie.serialize()).toStrictEqual({
__component: 'typeof Movie',
limit: 100,
offset: {__undefined: true}
});
expect(Movie.serialize({target: 'client'})).toStrictEqual({
__component: 'typeof Movie',
offset: {__undefined: true}
});
Movie.getAttribute('offset').setValueSource('client');
expect(Movie.serialize({target: 'client'})).toStrictEqual({
__component: 'typeof Movie'
});
// --- With referenced components ---
class Cinema extends Component {
@attribute() static limit = 100;
@attribute() static MovieClass = Movie;
}
expect(Cinema.serialize()).toStrictEqual({
__component: 'typeof Cinema',
limit: 100,
MovieClass: {__component: 'typeof Movie'}
});
let componentDependencies: ComponentSet = new Set();
expect(Cinema.serialize({componentDependencies})).toStrictEqual({
__component: 'typeof Cinema',
limit: 100,
MovieClass: {__component: 'typeof Movie'}
});
expect(Array.from(componentDependencies)).toStrictEqual([]);
componentDependencies = new Set();
expect(
Cinema.serialize({returnComponentReferences: true, componentDependencies})
).toStrictEqual({
__component: 'typeof Cinema'
});
expect(Array.from(componentDependencies)).toStrictEqual([]);
});
test('Component instances', async () => {
class Person extends EmbeddedComponent {
@attribute() name?: string;
@attribute() country?: string;
}
class Director extends Person {}
class Actor extends Person {}
class Movie extends Component {
@provide() static Director = Director;
@provide() static Actor = Actor;
@attribute() title = '';
@attribute('Director?') director?: Director;
@attribute('Actor[]') actors = new Array<Actor>();
}
let movie = new Movie();
expect(movie.serialize()).toStrictEqual({
__component: 'Movie',
__new: true,
title: '',
director: {__undefined: true},
actors: []
});
expect(movie.serialize({attributeSelector: {title: true}})).toStrictEqual({
__component: 'Movie',
__new: true,
title: ''
});
expect(movie.serialize({includeIsNewMarks: false})).toStrictEqual({
__component: 'Movie',
title: '',
director: {__undefined: true},
actors: []
});
movie = Movie.instantiate();
expect(movie.serialize()).toStrictEqual({
__component: 'Movie'
});
expect(movie.serialize({includeComponentTypes: false})).toStrictEqual({});
movie.title = 'Inception';
expect(movie.serialize()).toStrictEqual({
__component: 'Movie',
title: 'Inception'
});
expect(movie.serialize({includeComponentTypes: false})).toStrictEqual({
title: 'Inception'
});
// - Value sourcing -
movie = Movie.instantiate({title: 'Inception'}, {source: 'client'});
expect(movie.serialize()).toStrictEqual({
__component: 'Movie',
title: 'Inception'
});
expect(movie.serialize({target: 'client'})).toStrictEqual({
__component: 'Movie'
});
// --- With an embedded component ---
movie.director = new Director({name: 'Christopher Nolan'});
expect(movie.serialize()).toStrictEqual({
__component: 'Movie',
title: 'Inception',
director: {
__component: 'Director',
__new: true,
name: 'Christopher Nolan',
country: {__undefined: true}
}
});
expect(
movie.serialize({attributeSelector: {title: true, director: {name: true}}})
).toStrictEqual({
__component: 'Movie',
title: 'Inception',
director: {__component: 'Director', __new: true, name: 'Christopher Nolan'}
});
expect(movie.serialize({attributeSelector: {title: true, director: {}}})).toStrictEqual({
__component: 'Movie',
title: 'Inception',
director: {__component: 'Director', __new: true}
});
expect(movie.serialize({includeIsNewMarks: false})).toStrictEqual({
__component: 'Movie',
title: 'Inception',
director: {__component: 'Director', name: 'Christopher Nolan', country: {__undefined: true}}
});
expect(
movie.serialize({
attributeFilter(attribute) {
expect(this).toBe(movie);
expect(attribute.getParent()).toBe(movie);
return attribute.getName() === 'title';
}
})
).toStrictEqual({
__component: 'Movie',
title: 'Inception'
});
expect(
await movie.serialize({
async attributeFilter(attribute) {
expect(this).toBe(movie);
expect(attribute.getParent()).toBe(movie);
return attribute.getName() === 'title';
}
})
).toStrictEqual({
__component: 'Movie',
title: 'Inception'
});
// - Value sourcing -
movie
.getAttribute('director')
.setValue(
Director.instantiate({name: 'Christopher Nolan', country: 'USA'}, {source: 'client'}),
{
source: 'client'
}
);
expect(movie.serialize()).toStrictEqual({
__component: 'Movie',
title: 'Inception',
director: {__component: 'Director', name: 'Christopher Nolan', country: 'USA'}
});
expect(movie.serialize({target: 'client'})).toStrictEqual({__component: 'Movie'});
movie.director.country = 'US';
expect(movie.serialize({target: 'client'})).toStrictEqual({
__component: 'Movie',
director: {__component: 'Director', country: 'US'}
});
// --- With an array of embedded components ---
movie.actors = [new Actor({name: 'Leonardo DiCaprio'})];
expect(movie.serialize({attributeSelector: {actors: true}})).toStrictEqual({
__component: 'Movie',
actors: [
{__component: 'Actor', __new: true, name: 'Leonardo DiCaprio', country: {__undefined: true}}
]
});
// - Value sourcing -
movie
.getAttribute('actors')
.setValue(
[Actor.instantiate({name: 'Leonardo DiCaprio', country: 'USA'}, {source: 'client'})],
{
source: 'client'
}
);
expect(movie.serialize({attributeSelector: {actors: true}})).toStrictEqual({
__component: 'Movie',
actors: [{__component: 'Actor', name: 'Leonardo DiCaprio', country: 'USA'}]
});
expect(movie.serialize({attributeSelector: {actors: true}, target: 'client'})).toStrictEqual({
__component: 'Movie'
});
movie.actors[0].country = 'US';
expect(movie.serialize({attributeSelector: {actors: true}, target: 'client'})).toStrictEqual({
__component: 'Movie',
actors: [{__component: 'Actor', name: 'Leonardo DiCaprio', country: 'US'}]
});
});
test('Identifiable component instances', async () => {
class Movie extends Component {
@primaryIdentifier() id!: string;
@secondaryIdentifier() slug!: string;
@attribute('string') title = '';
}
let movie = Movie.fork().instantiate({title: 'Inception'});
expect(movie.serialize()).toEqual({
__component: 'Movie',
title: 'Inception'
});
expect(() => movie.serialize({returnComponentReferences: true})).toThrow(
"Cannot get an identifier descriptor from a component that has no set identifier (component: 'Movie')"
);
movie = Movie.fork().instantiate({id: 'abc123', title: 'Inception'});
expect(movie.serialize()).toEqual({
__component: 'Movie',
id: 'abc123',
title: 'Inception'
});
expect(movie.serialize({returnComponentReferences: true})).toEqual({
__component: 'Movie',
id: 'abc123'
});
movie = Movie.fork().instantiate({slug: 'inception', title: 'Inception'});
expect(movie.serialize()).toEqual({
__component: 'Movie',
slug: 'inception',
title: 'Inception'
});
expect(movie.serialize({returnComponentReferences: true})).toEqual({
__component: 'Movie',
slug: 'inception'
});
movie = Movie.fork().instantiate({id: 'abc123', slug: 'inception', title: 'Inception'});
expect(movie.serialize()).toEqual({
__component: 'Movie',
id: 'abc123',
slug: 'inception',
title: 'Inception'
});
expect(movie.serialize({returnComponentReferences: true})).toEqual({
__component: 'Movie',
id: 'abc123'
});
// - Value sourcing -
movie = Movie.fork().instantiate({id: 'abc123', title: 'Inception'}, {source: 'client'});
expect(movie.serialize()).toStrictEqual({
__component: 'Movie',
id: 'abc123',
title: 'Inception'
});
expect(movie.serialize({target: 'client'})).toStrictEqual({
__component: 'Movie',
id: 'abc123'
});
// --- With referenced identifiable component instances ---
class Cinema extends Component {
@provide() static Movie = Movie;
@primaryIdentifier() id!: string;
@attribute('string') name = '';
@attribute('Movie[]') movies!: Movie[];
}
movie = Movie.instantiate({id: 'abc123', title: 'Inception'});
const cinema = Cinema.instantiate({
id: 'xyz456',
name: 'Paradiso',
movies: [movie]
});
expect(cinema.serialize()).toEqual({
__component: 'Cinema',
id: 'xyz456',
name: 'Paradiso',
movies: [{__component: 'Movie', id: 'abc123'}]
});
let componentDependencies: ComponentSet = new Set();
expect(cinema.serialize({componentDependencies})).toEqual({
__component: 'Cinema',
id: 'xyz456',
name: 'Paradiso',
movies: [{__component: 'Movie', id: 'abc123'}]
});
expect(Array.from(componentDependencies)).toEqual([Cinema, Movie]);
componentDependencies = new Set();
expect(cinema.serialize({returnComponentReferences: true, componentDependencies})).toEqual({
__component: 'Cinema',
id: 'xyz456'
});
expect(Array.from(componentDependencies)).toEqual([Cinema, Movie]);
// - With an array of components -
const serializedComponents: ComponentSet = new Set();
componentDependencies = new Set();
expect(
serialize([cinema, movie, movie], {serializedComponents, componentDependencies})
).toEqual([
{
__component: 'Cinema',
id: 'xyz456',
name: 'Paradiso',
movies: [{__component: 'Movie', id: 'abc123'}]
},
{__component: 'Movie', id: 'abc123', title: 'Inception'},
{__component: 'Movie', id: 'abc123'}
]);
expect(Array.from(serializedComponents)).toEqual([cinema, movie]);
expect(Array.from(componentDependencies)).toEqual([Cinema, Movie]);
// - Using 'returnComponentReferences' option -
expect(
serialize(
{
'<=': cinema,
'play=>': {'()': [movie]}
},
{returnComponentReferences: true}
)
).toEqual({
'<=': {__component: 'Cinema', id: 'xyz456'},
'play=>': {'()': [{__component: 'Movie', id: 'abc123'}]}
});
});
test('Functions', async () => {
function sum(a: number, b: number) {
return a + b;
}
expect(serialize(sum)).toStrictEqual({});
expect(trimSerializedFunction(serialize(sum, {serializeFunctions: true}))).toStrictEqual({
__function: 'function sum(a, b) {\nreturn a + b;\n}'
});
sum.displayName = 'sum';
expect(serialize(sum)).toStrictEqual({displayName: 'sum'});
expect(trimSerializedFunction(serialize(sum, {serializeFunctions: true}))).toStrictEqual({
__function: 'function sum(a, b) {\nreturn a + b;\n}',
displayName: 'sum'
});
function trimSerializedFunction(serializedFunction: any) {
return {
...serializedFunction,
__function: serializedFunction.__function.replace(/\n +/g, '\n')
};
}
});
}); | the_stack |
import { stripIndent } from "common-tags";
import { fromFixture } from "eslint-etc";
import rule = require("../../source/rules/prefer-takeuntil");
import { ruleTester } from "../utils";
ruleTester({ types: true }).run("prefer-takeuntil", rule, {
valid: [
{
code: stripIndent`
// correct component
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "correct-component"
})
class CorrectComponent implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o),
takeUntil(this.destroy)
).subscribe();
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
`,
},
{
code: stripIndent`
// correct component, not last
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { map, switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "correct-component"
})
class CorrectComponent implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o),
takeUntil(this.destroy),
map(value => value)
).subscribe();
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
`,
},
{
code: stripIndent`
// destructured component
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "destructured-component"
})
class DestructuredComponent implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
const { destroy } = this;
o.pipe(
switchMap(_ => o),
takeUntil(destroy)
).subscribe();
}
ngOnDestroy() {
const { destroy } = this;
destroy.next();
destroy.complete();
}
}
`,
},
{
code: stripIndent`
// secondary takeuntil component
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "secondary-takeuntil-component"
})
class SecondaryTakeUntilComponent implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
takeUntil(o),
switchMap(_ => o),
takeUntil(this.destroy)
).subscribe();
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
`,
},
{
code: stripIndent`
// not components
import { of } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
class SomeClass {
someMethod() {
o.pipe(switchMap(_ => o)).subscribe();
o.pipe(switchMap(_ => o), takeUntil(o)).subscribe();
}
}
function someFunction() {
o.pipe(switchMap(_ => o)).subscribe();
o.pipe(switchMap(_ => o), takeUntil(o)).subscribe();
}
@Injectable()
class NoTakeUntilService {
someMethod() {
o.pipe(
switchMap(_ => o)
).subscribe();
}
}
@Pipe({
name: 'some-pipe',
})
class NoTakeUntilPipe {
someMethod() {
o.pipe(
switchMap(_ => o)
).subscribe();
}
}
@Directive({
selector: 'some-directive'
})
class NoTakeUntilDirective {
someMethod() {
o.pipe(
switchMap(_ => o)
).subscribe();
}
}
`,
},
{
code: stripIndent`
// no destroy only takeuntil
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "only-takeuntil"
})
class CorrectComponent {
someMethod() {
o.pipe(
switchMap(_ => o),
takeUntil(NEVER)
).subscribe();
}
}
`,
options: [{ checkDestroy: false }],
},
{
code: stripIndent`
// with alias
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
const someAlias = takeUntil;
@Component({
selector: "component-with-alias"
})
class CorrectComponent implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o),
someAlias(this.destroy)
).subscribe();
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
`,
options: [{ alias: ["someAlias"] }],
},
{
code: stripIndent`
// decorators with takeuntil
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "correct-component"
})
class CorrectComponent implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o),
takeUntil(this.destroy)
).subscribe();
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
@Injectable()
class CorrectService implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o),
takeUntil(this.destroy)
).subscribe();
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
@Pipe({
name: 'controlByName',
})
class CorrectPipe implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o),
takeUntil(this.destroy)
).subscribe();
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
@Directive({
selector: 'my-directive'
})
class CorrectDirective implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o),
takeUntil(this.destroy)
).subscribe();
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
`,
options: [
{
checkDecorators: ["Component", "Pipe", "Injectable", "Directive"],
},
],
},
{
code: stripIndent`
// https://github.com/cartant/rxjs-tslint-rules/issues/115
import { Component } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
const someAlias = (cmp) => takeUntil(cmp.destroy);
@Component({
selector: "component-with-alias"
})
class CorrectComponent implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o),
someAlias(this)
).subscribe();
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
`,
options: [
{
alias: ["someAlias"],
checkDestroy: false,
},
],
},
{
code: stripIndent`
// https://github.com/cartant/eslint-plugin-rxjs-angular/issues/5
import { Component } from "@angular/core";
import { of } from "rxjs";
import { switchMap, take } from "rxjs/operators";
const o = of("o");
@Component({
selector: "component-with-alias"
})
class CorrectComponent implements OnDestroy {
someMethod() {
o.pipe(
switchMap(_ => o),
take(1)
).subscribe();
}
}
`,
options: [
{
alias: ["take"],
checkDestroy: false,
},
],
},
],
invalid: [
fromFixture(
stripIndent`
// no pipe component
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "no-pipe-component"
})
class NoPipeComponent {
private destroy = new Subject<void>();
someMethod() {
const { destroy } = this;
o.subscribe();
~~~~~~~~~ [noTakeUntil]
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
`,
{ options: [{ checkComplete: true }] }
),
fromFixture(
stripIndent`
// no takeuntil component
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "no-takeuntil-component"
})
class NoTakeUntilComponent {
private destroy = new Subject<void>();
someMethod() {
const { destroy } = this;
o.pipe(
switchMap(_ => o)
).subscribe();
~~~~~~~~~ [noTakeUntil]
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
`,
{ options: [{ checkComplete: true }] }
),
fromFixture(
stripIndent`
// no subject component
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "no-subject-component"
})
class NoSubjectComponent implements OnDestroy {
~~~~~~~~~~~~~~~~~~ [notDeclared { "name": "o" }]
someMethod() {
o.pipe(
switchMap(_ => o),
takeUntil(o)
).subscribe();
}
ngOnDestroy() {
~~~~~~~~~~~ [notCalled { "method": "next", "name": "o" }]
~~~~~~~~~~~ [notCalled { "method": "complete", "name": "o" }]
}
}
`,
{ options: [{ checkComplete: true }] }
),
fromFixture(
stripIndent`
// no destroy component
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "no-destroy-component"
})
class NoDestroyComponent {
~~~~~~~~~~~~~~~~~~ [noDestroy]
private destroy = new Subject<void>();
someMethod() {
const { destroy } = this;
o.pipe(
switchMap(_ => o),
takeUntil(destroy)
).subscribe();
}
}
`,
{ options: [{ checkComplete: true }] }
),
fromFixture(
stripIndent`
// no next component
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "no-next-component"
})
class NoNextComponent implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o),
takeUntil(this.destroy)
).subscribe();
}
ngOnDestroy() {
~~~~~~~~~~~ [notCalled { "method": "next", "name": "destroy" }]
this.destroy.complete();
}
}
`,
{ options: [{ checkComplete: true }] }
),
fromFixture(
stripIndent`
// no complete component
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "no-complete-component"
})
class NoCompleteComponent implements OnDestroy {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o),
takeUntil(this.destroy)
).subscribe();
}
ngOnDestroy() {
~~~~~~~~~~~ [notCalled { "method": "complete", "name": "destroy" }]
this.destroy.next();
}
}
`,
{ options: [{ checkComplete: true }] }
),
fromFixture(
stripIndent`
// no destroy and no takeuntil component
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "no-takeuntil-component"
})
class NoTakeUntilComponent {
~~~~~~~~~~~~~~~~~~~~ [noDestroy]
someMethod() {
o.pipe(
switchMap(_ => o)
).subscribe();
~~~~~~~~~ [noTakeUntil]
}
}
`,
{ options: [{ checkComplete: true }] }
),
fromFixture(
stripIndent`
// without alias
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
const someAlias = takeUntil;
@Component({
selector: "component-without-alias"
})
class NoTakeUntilComponent {
private destroy = new Subject<void>();
someMethod() {
o.pipe(
switchMap(_ => o)
).subscribe();
~~~~~~~~~ [noTakeUntil]
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
`,
{ options: [{ alias: ["someAlias"] }] }
),
fromFixture(
stripIndent`
// decorators without takeuntil
import { Component, OnDestroy } from "@angular/core";
import { of, Subject } from "rxjs";
import { switchMap, takeUntil } from "rxjs/operators";
const o = of("o");
@Component({
selector: "no-next-component"
})
class NoTakeUntilComponent {
~~~~~~~~~~~~~~~~~~~~ [noDestroy]
someMethod() {
o.pipe(
switchMap(_ => o)
).subscribe();
~~~~~~~~~ [noTakeUntil]
}
}
@Injectable()
class NoTakeUntilService {
~~~~~~~~~~~~~~~~~~ [noDestroy]
someMethod() {
o.pipe(
switchMap(_ => o)
).subscribe();
~~~~~~~~~ [noTakeUntil]
}
}
@Pipe({
name: 'controlByName',
})
class NoTakeUntilPipe {
~~~~~~~~~~~~~~~ [noDestroy]
someMethod() {
o.pipe(
switchMap(_ => o)
).subscribe();
~~~~~~~~~ [noTakeUntil]
}
}
@Directive({
selector: 'my-directive'
})
class NoTakeUntilDirective {
~~~~~~~~~~~~~~~~~~~~ [noDestroy]
someMethod() {
o.pipe(
switchMap(_ => o)
).subscribe();
~~~~~~~~~ [noTakeUntil]
}
}
`,
{
options: [
{
checkDecorators: ["Component", "Pipe", "Injectable", "Directive"],
},
],
}
),
],
}); | the_stack |
import { Component, OnInit, OnDestroy, Input } from '@angular/core';
import { FetchResourcesService } from './../../services/fetch-resources.service';
import { ActivatedRoute } from '@angular/router';
import 'rxjs/add/operator/switchMap';
import { Subscription } from 'rxjs/Subscription';
import { AssetGroupObservableService } from '../../../core/services/asset-group-observable.service';
import { AwsResourceTypeSelectionService } from './../../services/aws-resource-type-selection.service';
import { ErrorHandlingService } from '../../../shared/services/error-handling.service';
import { ICONS } from './../../../shared/constants/icons-mapping';
import { DomainTypeObservableService } from '../../../core/services/domain-type-observable.service';
import { LoggerService } from '../../../shared/services/logger.service';
import { CONFIGURATIONS } from './../../../../config/configurations';
@Component({
selector: 'app-aws-resource-details',
templateUrl: './aws-resource-details.component.html',
styleUrls: ['./aws-resource-details.component.css'],
providers: [ FetchResourcesService ]
})
export class AwsResourceDetailsComponent implements OnInit, OnDestroy {
selectedResource: any = {
type: undefined
};
dataLoaded: boolean;
loading: boolean;
errorMessage: string;
error: boolean;
private selectedResourceTypeFromUrl: string;
private awsResourceDetails: any = [];
private selectedResourceRecommendation: any = [];
private allAvailableCategories: any = [];
private selectedAssetGroup: any;
awsResourcesCache: any = [];
private showViewMore = false;
private assetGroupSubscription: Subscription;
private routeSubscription: Subscription;
private resourceSelectionSubscription: Subscription;
private dataSubscription: Subscription;
subscriptionDomain: Subscription;
selectedDomain: any;
@Input() pageLevel: number;
public config;
public oss;
constructor(private fetchResourcesService: FetchResourcesService,
private route: ActivatedRoute,
private assetGroupObservableService: AssetGroupObservableService,
private awsResourceTypeSelectionService: AwsResourceTypeSelectionService,
private errorHandling: ErrorHandlingService,
private domainObservableService: DomainTypeObservableService,
private logger: LoggerService) {
this.config = CONFIGURATIONS;
this.oss = this.config && this.config.optional && this.config.optional.general && this.config.optional.general.OSS;
this.assetGroupSubscription = this.assetGroupObservableService.getAssetGroup()
.subscribe(
assetGroupName => {
this.selectedAssetGroup = assetGroupName;
});
this.subscriptionDomain = this.domainObservableService.getDomainType().subscribe(domain => {
this.selectedDomain = domain;
this.init();
});
this.routeSubscription = this.route.queryParams.subscribe(params => {
if (params['type']) {
this.selectedResourceTypeFromUrl = params['type'];
}
});
}
ngOnInit() {
// Reset all variables
this.loading = false;
this.dataLoaded = false;
this.error = false;
this.errorMessage = 'apiResponseError';
this.awsResourcesCache = [];
this.selectedResource['recommendations'] = [];
this.getData();
this.viewAllSetup();
}
init() {
this.getData();
}
setDataLoading() {
this.loading = true;
this.dataLoaded = false;
this.error = false;
}
setDataLoaded() {
this.loading = false;
this.dataLoaded = true;
this.error = false;
}
setError(error) {
this.loading = false;
this.dataLoaded = false;
this.error = true;
this.logger.log('error', error);
}
viewAllSetup() {
try {
if (this.resourceSelectionSubscription) {
this.resourceSelectionSubscription.unsubscribe();
}
// SeelctedResource selected from viewAll.
this.resourceSelectionSubscription = this.awsResourceTypeSelectionService.getSelectedResource()
.subscribe(
selectedResourceInViewAll => {
if (this.awsResourceDetails !== undefined) {
this.awsResourceDetails.forEach(element => {
if (element.type === selectedResourceInViewAll) {
// If the selected element from 'view-all' is already present in 'awsResourcesCache', then remove it
for (let i = 0; i < this.awsResourcesCache.length; i++) {
if (this.awsResourcesCache[i].type === selectedResourceInViewAll) {
this.awsResourcesCache.splice(i, 1);
}
}
// Add the freshly selected resource to the first index of array 'awsResourcesCache
this.awsResourcesCache.unshift(element);
// Limit the items in 'awsResourcesCache' to maximum of 7
if (this.awsResourcesCache.length > 7) {
this.awsResourcesCache = this.awsResourcesCache.slice(0, 7);
}
this.selectedResource = Object.assign(element);
this.selectResourceTile(this.selectedResource.type);
}
});
}
});
} catch (error) {
this.errorMessage = this.errorHandling.handleJavascriptError(error);
this.setError(error);
this.logger.log('error', error);
}
}
getData() {
this.setDataLoading();
this.getResourceTypeAndCountAndRecommendation();
}
getResourceTypeAndCountAndRecommendation() {
try {
if (this.dataSubscription) {
this.dataSubscription.unsubscribe();
}
const queryParams = {
'ag' : this.selectedAssetGroup,
'domain': this.selectedDomain
};
const output = this.fetchResourcesService.getResourceTypesAndCount(queryParams);
this.dataSubscription = output.subscribe(results => {
try {
const resourceTypes = results[0]['targettypes'];
let resourceTypeCount = results[1];
let recommendations = results[2];
this.setDataLoaded();
this.awsResourceDetails = resourceTypes.map(function(resourceType){
if (resourceTypeCount !== undefined && resourceTypeCount !== null) {
resourceTypeCount = results[1].assetcount;
const countObj = resourceTypeCount.find(obj => obj.type === resourceType.type);
resourceType.count = countObj ? countObj.count : 0;
}
if (recommendations !== undefined && recommendations !== null) {
recommendations = results[2]['response'];
let recommendationArray = [];
recommendationArray = recommendations.filter((value) => {
return value.targetType === resourceType.type;
});
resourceType.recommendations = recommendationArray;
resourceType.recommendationAvailable = recommendationArray.length > 0 ? true : false;
}
return resourceType;
});
this.awsResourceDetails = this.removeTargetTypesOfCategoryOthers(this.awsResourceDetails, 'Other');
this.sortAwsResources();
// Update the aws resources in the common shared service
this.awsResourceTypeSelectionService.allAwsResourcesForAssetGroup(this.awsResourceDetails);
this.assignIconsToResources();
this.setDataLoaded();
this.setupMainPageResourceTypes();
} catch (error) {
this.errorMessage = this.errorHandling.handleJavascriptError(error);
this.setError(error);
this.logger.log('error', error);
}
},
error => {
this.setError(error);
this.errorMessage = error;
this.logger.log('error', error);
}
);
} catch (error) {
this.errorMessage = this.errorHandling.handleJavascriptError(error);
this.setError(error);
this.logger.log('error', error);
}
}
assignIconsToResources() {
const categoriesObj = [];
// Temporary array to store unique categories
this.allAvailableCategories = [];
this.awsResourceDetails.forEach(resources => {
const category = ICONS.categories[resources.category] === undefined ? 'Extra' : resources['category'];
resources.category = category; // Update the category of current resource depending on the result of the above line
if (this.allAvailableCategories.indexOf(category) === -1) {
this.allAvailableCategories.push(category);
const obj = {
'name' : category,
'color' : ICONS.categories[category]
};
categoriesObj.push(obj);
}
});
// If extra categories are present, push them to the end
if (this.allAvailableCategories.indexOf('Extra') > -1) {
const extraCategory = categoriesObj.splice(this.allAvailableCategories.indexOf('Extra'), 1);
categoriesObj.push(extraCategory);
}
this.awsResourceDetails.forEach(resources => {
resources['iconPath'] = ICONS.awsResources[resources.type];
});
}
sortAwsResources() {
this.awsResourceDetails.sort(function(a, b) {
return b.count - a.count; // For descending order
// return a.count - b.count; // For ascending order
});
let computeCategoryPresent = false;
let firstComputeIndex = 0;
// Check if 'compute' category assets are there. If present, sort them with Compute category resources being first.
const allResources = this.awsResourceDetails.slice();
this.awsResourceDetails.forEach((element, index) => {
if (element.category.toLowerCase() === 'compute') {
computeCategoryPresent = true;
firstComputeIndex += 1;
allResources.splice(index, 1);
allResources.unshift(element);
}
});
// Sort compute category items
const computeCategoryResources = allResources.slice(0, firstComputeIndex);
computeCategoryResources.sort(function(a, b) {
return b.count - a.count; // For descending order
// return a.count - b.count; // For ascending order
});
for ( let i = 0; i < computeCategoryResources.length; i++) {
allResources[i] = computeCategoryResources[i];
}
if (computeCategoryPresent) {
this.awsResourceDetails = allResources.slice();
}
}
setupMainPageResourceTypes () {
if (this.awsResourceDetails.length > 7) {
this.awsResourcesCache = this.awsResourceDetails.slice(0, 7);
this.showViewMore = true;
} else {
this.awsResourcesCache = this.awsResourceDetails.slice();
this.showViewMore = false;
}
if (!this.selectedResourceTypeFromUrl) {
this.selectResourceTile(this.awsResourceDetails[0].type);
} else {
this.selectResourceTile(this.selectedResourceTypeFromUrl);
}
}
awsTileClicked(resources, index) {
this.selectedResource = this.awsResourcesCache[index];
this.selectedResourceRecommendation = this.selectedResource['recommendations'];
}
removeTargetTypesOfCategoryOthers(resourceTypes, categoryType) {
const updatedResourceTypes = resourceTypes.filter((value) => {
return value.category.toLowerCase() !== categoryType.toLowerCase();
});
return updatedResourceTypes;
}
selectResourceTile(resources) {
const tileIndex = this.awsResourcesCache.findIndex((value) => {
return value.type.toLowerCase() === resources.toLowerCase();
});
this.awsTileClicked(resources, tileIndex);
}
getResourceTypeObjectFromType(resources) {
const tileIndex = this.awsResourceDetails.findIndex((value) => {
return value.type.toLowerCase() === resources.toLowerCase();
});
return this.awsResourceDetails[tileIndex];
}
ngOnDestroy() {
try {
this.dataSubscription.unsubscribe();
this.routeSubscription.unsubscribe();
this.assetGroupSubscription.unsubscribe();
this.resourceSelectionSubscription.unsubscribe();
this.subscriptionDomain.unsubscribe();
} catch (error) {
this.errorMessage = this.errorHandling.handleJavascriptError(error);
}
}
} | the_stack |
import chalk from 'chalk'
import { EventData } from 'web3-eth-contract'
import { EventEmitter } from 'events'
import { toBN, toHex } from 'web3-utils'
import { toBuffer, PrefixedHexString, BN } from 'ethereumjs-util'
import { IRelayHubInstance } from '@opengsn/contracts/types/truffle-contracts'
import { ContractInteractor, RelayCallABI } from '@opengsn/common/dist/ContractInteractor'
import { TransactionRejectedByPaymaster, TransactionRelayed } from '@opengsn/common/dist/types/GSNContractsDataTypes'
import { GasPriceFetcher } from './GasPriceFetcher'
import { Address, IntString } from '@opengsn/common/dist/types/Aliases'
import { RelayTransactionRequest } from '@opengsn/common/dist/types/RelayTransactionRequest'
import { ReadinessInfo, StatsResponse } from '@opengsn/common/dist/StatsResponse'
import { PingResponse } from '@opengsn/common/dist/PingResponse'
import { VersionsManager } from '@opengsn/common/dist/VersionsManager'
import { AmountRequired } from '@opengsn/common/dist/AmountRequired'
import { LoggerInterface } from '@opengsn/common/dist/LoggerInterface'
import { Environment } from '@opengsn/common/dist/Environments'
import { gsnRequiredVersion, gsnRuntimeVersion } from '@opengsn/common/dist/Version'
import {
address2topic,
decodeRevertReason,
getLatestEventData,
PaymasterGasAndDataLimits,
randomInRange,
sleep
} from '@opengsn/common/dist/Utils'
import { RegistrationManager } from './RegistrationManager'
import { PaymasterStatus, ReputationManager } from './ReputationManager'
import { SendTransactionDetails, SignedTransactionDetails, TransactionManager } from './TransactionManager'
import { ServerAction } from './StoredTransaction'
import { TxStoreManager } from './TxStoreManager'
import { configureServer, ServerConfigParams, ServerDependencies } from './ServerConfigParams'
import { TransactionType } from '@opengsn/common/dist/types/TransactionType'
import { isSameAddress, toNumber } from '@opengsn/common'
/**
* After EIP-150, every time the call stack depth is increased without explicit call gas limit set,
* the 63/64th rule is applied to gas limit.
* As we have to pass enough gas to a transaction to pass 'relayRequest.request.gas' to the recipient,
* and this check is at stack depth of 3, we have to oversupply gas to an outermost ('relayCall') transaction
* by approximately 1/(63/64)^3 times.
*/
const GAS_FACTOR = 1.1
/**
* A constant oversupply of gas to each 'relayCall' transaction.
*/
const GAS_RESERVE = 100000
export class RelayServer extends EventEmitter {
readonly logger: LoggerInterface
lastScannedBlock: number
lastRefreshBlock = 0
ready = false
readonly managerAddress: PrefixedHexString
readonly workerAddress: PrefixedHexString
minMaxPriorityFeePerGas: number = 0
running = false
alerted = false
alertedByTransactionBlockTimestamp: number = 0
initialized: boolean = false
readonly contractInteractor: ContractInteractor
readonly gasPriceFetcher: GasPriceFetcher
private readonly versionManager: VersionsManager
config: ServerConfigParams
transactionManager: TransactionManager
txStoreManager: TxStoreManager
readinessInfo: ReadinessInfo
maxGasLimit: number = 0
transactionType = TransactionType.LEGACY
lastMinedActiveTransaction?: EventData
reputationManager!: ReputationManager
registrationManager!: RegistrationManager
chainId!: number
networkId!: number
relayHubContract!: IRelayHubInstance
trustedPaymastersGasAndDataLimits: Map<String | undefined, PaymasterGasAndDataLimits> = new Map<String | undefined, PaymasterGasAndDataLimits>()
workerBalanceRequired: AmountRequired
environment: Environment
constructor (
config: Partial<ServerConfigParams>,
transactionManager: TransactionManager,
dependencies: ServerDependencies) {
super()
this.logger = dependencies.logger
this.lastScannedBlock = config.coldRestartLogsFromBlock ?? 0
this.versionManager = new VersionsManager(gsnRuntimeVersion, gsnRequiredVersion)
this.config = configureServer(config)
this.contractInteractor = dependencies.contractInteractor
this.environment = this.contractInteractor.environment
this.gasPriceFetcher = dependencies.gasPriceFetcher
this.txStoreManager = dependencies.txStoreManager
this.transactionManager = transactionManager
this.managerAddress = this.transactionManager.managerKeyManager.getAddress(0)
this.workerAddress = this.transactionManager.workersKeyManager.getAddress(0)
this.workerBalanceRequired = new AmountRequired('Worker Balance', toBN(this.config.workerMinBalance), this.logger)
if (this.config.runPaymasterReputations) {
if (dependencies.reputationManager == null) {
throw new Error('ReputationManager is not initialized')
}
this.reputationManager = dependencies.reputationManager
}
const now = Date.now()
this.readinessInfo = {
runningSince: now,
currentStateTimestamp: now,
totalReadyTime: 0,
totalNotReadyTime: 0,
totalReadinessChanges: 0
}
this.printServerAddresses()
this.logger.warn(`RelayServer version', ${gsnRuntimeVersion}`)
this.logger.info(`Using server configuration:\n ${JSON.stringify(this.config)}`)
}
printServerAddresses (): void {
this.logger.info(`Server manager address | ${this.managerAddress}`)
this.logger.info(`Server worker address | ${this.workerAddress}`)
}
getMinMaxPriorityFeePerGas (): number {
return this.minMaxPriorityFeePerGas
}
async pingHandler (paymaster?: string): Promise<PingResponse> {
if (this.config.runPaymasterReputations && paymaster != null) {
await this.validatePaymasterReputation(paymaster, this.lastScannedBlock)
}
return {
relayWorkerAddress: this.workerAddress,
relayManagerAddress: this.managerAddress,
relayHubAddress: this.relayHubContract?.address ?? '',
ownerAddress: this.config.ownerAddress,
minMaxPriorityFeePerGas: this.getMinMaxPriorityFeePerGas().toString(),
maxAcceptanceBudget: this._getPaymasterMaxAcceptanceBudget(paymaster),
chainId: this.chainId.toString(),
networkId: this.networkId.toString(),
ready: this.isReady() ?? false,
version: gsnRuntimeVersion
}
}
statsHandler (): StatsResponse {
// First updating latest saved state up to the time of this 'stats' http request, since it might not be up to date.
const now = Date.now()
const statsResponse: StatsResponse = { ...this.readinessInfo, totalUptime: now - this.readinessInfo.runningSince }
if (this.isReady()) {
statsResponse.totalReadyTime = this.readinessInfo.totalReadyTime + now - this.readinessInfo.currentStateTimestamp
} else {
statsResponse.totalNotReadyTime = this.readinessInfo.totalNotReadyTime + now - this.readinessInfo.currentStateTimestamp
}
return statsResponse
}
validateRequestTxType (req: RelayTransactionRequest): void {
if (this.transactionType === TransactionType.LEGACY && req.relayRequest.relayData.maxFeePerGas !== req.relayRequest.relayData.maxPriorityFeePerGas) {
throw new Error(`Network ${this.contractInteractor.getNetworkType()} doesn't support eip1559`)
}
}
validateInput (req: RelayTransactionRequest, currentBlockNumber: number): void {
// Check that the relayHub is the correct one
if (req.metadata.relayHubAddress !== this.relayHubContract.address) {
throw new Error(
`Wrong hub address.\nRelay server's hub address: ${this.relayHubContract.address}, request's hub address: ${req.metadata.relayHubAddress}\n`)
}
// Check the relayWorker (todo: once migrated to multiple relays, check if exists)
if (!isSameAddress(req.relayRequest.relayData.relayWorker.toLowerCase(), this.workerAddress.toLowerCase())) {
throw new Error(
`Wrong worker address: ${req.relayRequest.relayData.relayWorker}\n`)
}
this.validateGasFees(req)
if (this._isBlacklistedPaymaster(req.relayRequest.relayData.paymaster)) {
throw new Error(`Paymaster ${req.relayRequest.relayData.paymaster} is blacklisted!`)
}
// validate the validUntil is not too close
const secondsNow = Math.round(Date.now() / 1000)
const expiredInSeconds = parseInt(req.relayRequest.request.validUntilTime) - secondsNow
if (expiredInSeconds < this.config.requestMinValidSeconds) {
const expirationDate = new Date(parseInt(req.relayRequest.request.validUntilTime) * 1000)
throw new Error(
`Request expired (or too close): expired at (${expirationDate.toUTCString()}), we expect it to be valid until ${new Date(secondsNow + this.config.requestMinValidSeconds).toUTCString()} `)
}
}
validateGasFees (req: RelayTransactionRequest): void {
const requestPriorityFee = parseInt(req.relayRequest.relayData.maxPriorityFeePerGas)
const requestMaxFee = parseInt(req.relayRequest.relayData.maxFeePerGas)
if (this.minMaxPriorityFeePerGas > requestPriorityFee) {
throw new Error(
`priorityFee given ${requestPriorityFee} too low. Minimum maxPriorityFee server accepts: ${this.minMaxPriorityFeePerGas}`)
}
if (parseInt(this.config.maxGasPrice) < requestMaxFee) {
throw new Error(
`maxFee given ${requestMaxFee} too high : ${this.config.maxGasPrice}`)
}
if (requestMaxFee < requestPriorityFee) {
throw new Error(
`maxFee ${requestMaxFee} cannot be lower than priorityFee ${requestPriorityFee}`)
}
}
validateRelayFees (req: RelayTransactionRequest): void {
// if trusted paymaster, we trust it to handle fees
if (this._isTrustedPaymaster(req.relayRequest.relayData.paymaster)) {
return
}
// Check that the fee is acceptable
if (parseInt(req.relayRequest.relayData.pctRelayFee) < this.config.pctRelayFee) {
throw new Error(
`Unacceptable pctRelayFee: ${req.relayRequest.relayData.pctRelayFee} relayServer's pctRelayFee: ${this.config.pctRelayFee}`)
}
if (toBN(req.relayRequest.relayData.baseRelayFee).lt(toBN(this.config.baseRelayFee))) {
throw new Error(
`Unacceptable baseRelayFee: ${req.relayRequest.relayData.baseRelayFee} relayServer's baseRelayFee: ${this.config.baseRelayFee}`)
}
}
async validateMaxNonce (relayMaxNonce: number): Promise<void> {
// Check that max nonce is valid
const nonce = await this.transactionManager.pollNonce(this.workerAddress)
if (nonce > relayMaxNonce) {
throw new Error(`Unacceptable relayMaxNonce: ${relayMaxNonce}. current nonce: ${nonce}`)
}
}
async validatePaymasterReputation (paymaster: Address, currentBlockNumber: number): Promise<void> {
if (this._isTrustedPaymaster(paymaster)) {
return
}
const status = await this.reputationManager.getPaymasterStatus(paymaster, currentBlockNumber)
if (status === PaymasterStatus.GOOD) {
return
}
let message: string
switch (status) {
case PaymasterStatus.ABUSED:
message = 'This paymaster has failed a lot of transactions recently is temporarily blocked by this relay'
break
case PaymasterStatus.THROTTLED:
message = 'This paymaster only had a small number of successful transactions and is therefore throttled by this relay'
break
case PaymasterStatus.BLOCKED:
message = 'This paymaster had too many unsuccessful transactions and is now permanently blocked by this relay'
break
}
throw new Error(`Refusing to serve transactions for paymaster at ${paymaster}: ${message}`)
}
async validatePaymasterGasAndDataLimits (req: RelayTransactionRequest): Promise<{
maxPossibleGas: number
acceptanceBudget: number
}> {
const paymaster = req.relayRequest.relayData.paymaster
let gasAndDataLimits = this.trustedPaymastersGasAndDataLimits.get(paymaster)
let acceptanceBudget: number
acceptanceBudget = this.config.maxAcceptanceBudget
const relayCallAbiInput: RelayCallABI = {
maxAcceptanceBudget: acceptanceBudget.toString(),
relayRequest: req.relayRequest,
signature: req.metadata.signature,
approvalData: req.metadata.approvalData
}
const msgData = this.contractInteractor.encodeABI(relayCallAbiInput)
const relayTransactionCalldataGasUsedCalculation = this.contractInteractor.calculateCalldataCost(msgData)
const message =
`Client signed transactionCalldataGasUsed: ${req.relayRequest.relayData.transactionCalldataGasUsed}` +
`Server estimate of its transactionCalldata gas expenses: ${relayTransactionCalldataGasUsedCalculation}`
this.logger.info(message)
if (toBN(relayTransactionCalldataGasUsedCalculation).gt(toBN(req.relayRequest.relayData.transactionCalldataGasUsed))) {
throw new Error(`Refusing to relay a transaction due to calldata cost. ${message}`)
}
const msgDataLength = toBuffer(msgData).length
// estimated cost of transferring the TX between GSN functions (innerRelayCall, preRelayedCall, forwarder, etc)
// const msgDataGasCostInsideTransaction = (await this.relayHubContract.calldataGasCost(msgDataLength)).toNumber()
if (gasAndDataLimits == null) {
try {
const paymasterContract = await this.contractInteractor._createPaymaster(paymaster)
gasAndDataLimits = await paymasterContract.getGasAndDataLimits()
} catch (e) {
const error = e as Error
let message = `unknown paymaster error: ${error.message}`
if (error.message.includes('Returned values aren\'t valid, did it run Out of Gas?')) {
message = `not a valid paymaster contract: ${paymaster}`
} else if (error.message.includes('no code at address')) {
message = `'non-existent paymaster contract: ${paymaster}`
}
throw new Error(message)
}
const msgDataGasCostInsideTransaction = msgDataLength * this.environment.dataOnChainHandlingGasCostPerByte
const paymasterAcceptanceBudget = toNumber(gasAndDataLimits.acceptanceBudget)
if (paymasterAcceptanceBudget + msgDataGasCostInsideTransaction > acceptanceBudget) {
if (!this._isTrustedPaymaster(paymaster)) {
throw new Error(
`paymaster acceptance budget + msg.data gas cost too high. given: ${paymasterAcceptanceBudget + msgDataGasCostInsideTransaction} max allowed: ${this.config.maxAcceptanceBudget}`)
}
this.logger.debug(`Using trusted paymaster's higher than max acceptance budget: ${paymasterAcceptanceBudget}`)
acceptanceBudget = paymasterAcceptanceBudget
}
} else {
// its a trusted paymaster. just use its acceptance budget as-is
acceptanceBudget = toNumber(gasAndDataLimits.acceptanceBudget)
}
// TODO: this is not a good way to calculate gas limit for relay call
const tmpMaxPossibleGas = this.contractInteractor.calculateTransactionMaxPossibleGas({
msgData,
gasAndDataLimits,
relayCallGasLimit: req.relayRequest.request.gas
})
const maxPossibleGas = GAS_RESERVE + Math.floor(tmpMaxPossibleGas * GAS_FACTOR)
if (maxPossibleGas > this.maxGasLimit) {
throw new Error(`maxPossibleGas (${maxPossibleGas}) exceeds maxGasLimit (${this.maxGasLimit})`)
}
const maxCharge =
await this.relayHubContract.calculateCharge(maxPossibleGas, req.relayRequest.relayData,
{ gasPrice: req.relayRequest.relayData.maxFeePerGas })
const paymasterBalance = await this.relayHubContract.balanceOf(paymaster)
if (paymasterBalance.lt(maxCharge)) {
throw new Error(`paymaster balance too low: ${paymasterBalance.toString()}, maxCharge: ${maxCharge.toString()}`)
}
this.logger.debug(`paymaster balance: ${paymasterBalance.toString()}, maxCharge: ${maxCharge.toString()}`)
this.logger.debug(`Estimated max charge of relayed tx: ${maxCharge.toString()}, GasLimit of relayed tx: ${maxPossibleGas}`)
return {
acceptanceBudget,
maxPossibleGas
}
}
async validateViewCallSucceeds (req: RelayTransactionRequest, maxAcceptanceBudget: number, maxPossibleGas: number): Promise<void> {
this.logger.debug(`validateViewCallSucceeds: ${JSON.stringify(arguments)}`)
const method = this.relayHubContract.contract.methods.relayCall(
maxAcceptanceBudget, req.relayRequest, req.metadata.signature, req.metadata.approvalData)
let viewRelayCallRet: { paymasterAccepted: boolean, returnValue: string }
try {
if (this.transactionType === TransactionType.TYPE_TWO) {
viewRelayCallRet =
await method.call({
from: this.workerAddress,
maxFeePerGas: req.relayRequest.relayData.maxFeePerGas,
maxPriorityFeePerGas: req.relayRequest.relayData.maxPriorityFeePerGas,
gasLimit: maxPossibleGas
}, 'pending')
} else {
viewRelayCallRet =
await method.call({
from: this.workerAddress,
gasPrice: req.relayRequest.relayData.maxFeePerGas,
gasLimit: maxPossibleGas
}, 'pending')
}
} catch (e) {
throw new Error(`relayCall reverted in server: ${(e as Error).message}`)
}
this.logger.debug(`Result for view-only relay call (on pending block):
paymasterAccepted | ${viewRelayCallRet.paymasterAccepted ? chalk.green('true') : chalk.red('false')}
returnValue | ${viewRelayCallRet.returnValue}
`)
if (!viewRelayCallRet.paymasterAccepted) {
throw new Error(
`Paymaster rejected in server: ${decodeRevertReason(viewRelayCallRet.returnValue)} req=${JSON.stringify(req, null, 2)}`)
}
}
async createRelayTransaction (req: RelayTransactionRequest): Promise<PrefixedHexString> {
this.logger.debug(`dump request params: ${JSON.stringify(req)}`)
if (!this.isReady()) {
throw new Error('relay not ready')
}
this.validateRequestTxType(req)
if (this.alerted) {
this.logger.error('Alerted state: slowing down traffic')
await sleep(randomInRange(this.config.minAlertedDelayMS, this.config.maxAlertedDelayMS))
}
const currentBlockNumber = await this.contractInteractor.getBlockNumber()
const block = await this.contractInteractor.getBlock(currentBlockNumber)
const currentBlockTimestamp = toNumber(block.timestamp)
this.validateInput(req, currentBlockNumber)
this.validateRelayFees(req)
await this.validateMaxNonce(req.metadata.relayMaxNonce)
if (this.config.runPaymasterReputations) {
await this.validatePaymasterReputation(req.relayRequest.relayData.paymaster, this.lastScannedBlock)
}
// Call relayCall as a view function to see if we'll get paid for relaying this tx
const {
acceptanceBudget,
maxPossibleGas
} = await this.validatePaymasterGasAndDataLimits(req)
await this.validateViewCallSucceeds(req, acceptanceBudget, maxPossibleGas)
if (this.config.runPaymasterReputations) {
await this.reputationManager.onRelayRequestAccepted(req.relayRequest.relayData.paymaster)
}
// Send relayed transaction
this.logger.debug(`maxPossibleGas is: ${maxPossibleGas}`)
const method = this.relayHubContract.contract.methods.relayCall(
acceptanceBudget, req.relayRequest, req.metadata.signature, req.metadata.approvalData)
const details: SendTransactionDetails =
{
signer: this.workerAddress,
serverAction: ServerAction.RELAY_CALL,
method,
destination: req.metadata.relayHubAddress,
gasLimit: maxPossibleGas,
creationBlockNumber: currentBlockNumber,
creationBlockTimestamp: currentBlockTimestamp,
maxFeePerGas: req.relayRequest.relayData.maxFeePerGas,
maxPriorityFeePerGas: req.relayRequest.relayData.maxPriorityFeePerGas
}
const { signedTx } = await this.transactionManager.sendTransaction(details)
// after sending a transaction is a good time to check the worker's balance, and replenish it.
await this.replenishServer(0, currentBlockNumber, currentBlockTimestamp)
return signedTx
}
start (): void {
this.logger.info(`Started polling for new blocks every ${this.config.checkInterval}ms`)
this.running = true
// eslint-disable-next-line @typescript-eslint/no-misused-promises
setTimeout(this.intervalHandler.bind(this), this.config.checkInterval)
}
stop (): void {
if (!this.running) {
throw new Error('Server not started')
}
this.running = false
this.logger.info('Stopping server')
}
fatal (message: string): void {
this.logger.error('FATAL: ' + message)
process.exit(1)
}
/***
* initialize data from trusted paymasters.
* "Trusted" paymasters means that:
* - we trust their code not to alter the gas limits (getGasAndDataLimits returns constants)
* - we trust preRelayedCall to be consistent: off-chain call and on-chain calls should either both succeed
* or both revert.
* - given that, we agree to give the requested acceptanceBudget (since breaking one of the above two "invariants"
* is the only cases where the relayer will have to pay for this budget)
*
* @param paymasters list of trusted paymaster addresses
*/
async _initTrustedPaymasters (paymasters: string[] = []): Promise<void> {
this.trustedPaymastersGasAndDataLimits.clear()
for (const paymasterAddress of paymasters) {
const paymaster = await this.contractInteractor._createPaymaster(paymasterAddress)
const gasAndDataLimits = await paymaster.getGasAndDataLimits().catch((e: Error) => {
throw new Error(`not a valid paymaster address in trustedPaymasters list: ${paymasterAddress}: ${e.message}`)
})
this.trustedPaymastersGasAndDataLimits.set(paymasterAddress.toLowerCase(), gasAndDataLimits)
}
}
_getPaymasterMaxAcceptanceBudget (paymaster?: string): IntString {
const limits = this.trustedPaymastersGasAndDataLimits.get(paymaster?.toLowerCase())
if (limits != null) {
return limits.acceptanceBudget.toString()
} else {
// todo fix
return this.config.maxAcceptanceBudget.toString()
}
}
async init (): Promise<void> {
const initStartTimestamp = Date.now()
this.logger.debug('server init start')
if (this.initialized) {
throw new Error('_init was already called')
}
const latestBlock = await this.contractInteractor.getBlock('latest')
if (this.config.coldRestartLogsFromBlock == null || latestBlock.number < this.config.coldRestartLogsFromBlock) {
throw new Error(
`Cannot start relay worker with coldRestartLogsFromBlock=${this.config.coldRestartLogsFromBlock} when "latest" block returned is ${latestBlock.number}`)
}
if (latestBlock.baseFeePerGas != null) {
this.transactionType = TransactionType.TYPE_TWO
}
await this.transactionManager._init(this.transactionType)
await this._initTrustedPaymasters(this.config.trustedPaymasters)
this.relayHubContract = await this.contractInteractor.relayHubInstance
const relayHubAddress = this.relayHubContract.address
const code = await this.contractInteractor.getCode(relayHubAddress)
if (code.length < 10) {
this.fatal(`No RelayHub deployed at address ${relayHubAddress}.`)
}
this.registrationManager = new RegistrationManager(
this.contractInteractor,
this.transactionManager,
this.txStoreManager,
this,
this.logger,
this.config,
this.managerAddress,
this.workerAddress
)
await this.registrationManager.init()
this.chainId = this.contractInteractor.chainId
this.networkId = this.contractInteractor.getNetworkId()
if (this.config.devMode && (this.chainId < 1000 || this.networkId < 1000)) {
this.logger.error('Don\'t use real network\'s chainId & networkId while in devMode.')
process.exit(-1)
}
this.logger.info(`Current network info:
chainId | ${this.chainId}
networkId | ${this.networkId}
latestBlock | ${latestBlock.number}
latestBlock timestamp | ${latestBlock.timestamp}
`)
this.maxGasLimit = Math.floor(0.75 * latestBlock.gasLimit)
this.initialized = true
// Assume started server is not registered until _worker figures stuff out
this.registrationManager.printNotRegisteredMessage()
this.logger.debug(`server init finished in ${Date.now() - initStartTimestamp} ms`)
}
async replenishServer (
workerIndex: number,
currentBlockNumber: number,
currentBlockTimestamp: number
): Promise<PrefixedHexString[]> {
const transactionHashes: PrefixedHexString[] = []
let managerEthBalance = await this.getManagerBalance()
const managerHubBalance = await this.relayHubContract.balanceOf(this.managerAddress)
this.workerBalanceRequired.currentValue = await this.getWorkerBalance(workerIndex)
if (managerEthBalance.gte(toBN(this.config.managerTargetBalance.toString())) && this.workerBalanceRequired.isSatisfied) {
// all filled, nothing to do
return transactionHashes
}
const mustWithdrawHubDeposit = managerEthBalance.lt(toBN(this.config.managerTargetBalance.toString())) && managerHubBalance.gte(
toBN(this.config.minHubWithdrawalBalance))
const isWithdrawalPending = await this.txStoreManager.isActionPendingOrRecentlyMined(ServerAction.DEPOSIT_WITHDRAWAL, currentBlockNumber, this.config.recentActionAvoidRepeatDistanceBlocks)
if (mustWithdrawHubDeposit && !isWithdrawalPending) {
this.logger.info(`withdrawing manager hub balance (${managerHubBalance.toString()}) to manager`)
// Refill manager eth balance from hub balance
const method = this.relayHubContract?.contract.methods.withdraw(this.managerAddress, toHex(managerHubBalance))
const details: SendTransactionDetails = {
signer: this.managerAddress,
serverAction: ServerAction.DEPOSIT_WITHDRAWAL,
destination: this.relayHubContract.address,
creationBlockNumber: currentBlockNumber,
creationBlockTimestamp: currentBlockTimestamp,
method
}
const { transactionHash } = await this.transactionManager.sendTransaction(details)
transactionHashes.push(transactionHash)
}
managerEthBalance = await this.getManagerBalance()
const mustReplenishWorker = !this.workerBalanceRequired.isSatisfied
const isReplenishPendingForWorker = await this.txStoreManager.isActionPendingOrRecentlyMined(ServerAction.VALUE_TRANSFER, currentBlockNumber, this.config.recentActionAvoidRepeatDistanceBlocks, this.workerAddress)
if (mustReplenishWorker && !isReplenishPendingForWorker) {
const refill = toBN(this.config.workerTargetBalance.toString()).sub(this.workerBalanceRequired.currentValue)
this.logger.debug(
`== replenishServer: mgr balance=${managerEthBalance.toString()} manager hub balance=${managerHubBalance.toString()}
\n${this.workerBalanceRequired.description}\n refill=${refill.toString()}`)
if (refill.lt(managerEthBalance.sub(toBN(this.config.managerMinBalance)))) {
this.logger.debug('Replenishing worker balance by manager eth balance')
const details: SendTransactionDetails = {
signer: this.managerAddress,
serverAction: ServerAction.VALUE_TRANSFER,
destination: this.workerAddress,
value: toHex(refill),
creationBlockNumber: currentBlockNumber,
creationBlockTimestamp: currentBlockTimestamp
}
const { transactionHash } = await this.transactionManager.sendTransaction(details)
transactionHashes.push(transactionHash)
} else {
const message = `== replenishServer: can't replenish: mgr balance too low ${managerEthBalance.toString()} refill=${refill.toString()}`
this.emit('fundingNeeded', message)
this.logger.error(message)
}
}
return transactionHashes
}
async intervalHandler (): Promise<void> {
try {
const blockNumber = await this.contractInteractor.getBlockNumber()
if (blockNumber > this.lastScannedBlock) {
await this._worker(blockNumber)
.then((transactions) => {
if (transactions.length !== 0) {
this.logger.debug(`Done handling block #${blockNumber}. Created ${transactions.length} transactions.`)
}
})
}
} catch (e) {
this.emit('error', e)
const error = e as Error
this.logger.error(`error in worker: ${error.message}`)
this.setReadyState(false)
} finally {
if (this.running) {
// eslint-disable-next-line @typescript-eslint/no-misused-promises
setTimeout(this.intervalHandler.bind(this), this.config.checkInterval)
} else {
this.logger.info('Shutting down worker task')
}
}
}
async _worker (blockNumber: number): Promise<PrefixedHexString[]> {
if (!this.initialized || this.registrationManager.balanceRequired == null) {
throw new Error('Please run init() first')
}
if (blockNumber <= this.lastScannedBlock) {
throw new Error('Attempt to scan older block, aborting')
}
if (!this._shouldRefreshState(blockNumber)) {
return []
}
const block = await this.contractInteractor.getBlock(blockNumber)
const currentBlockTimestamp = toNumber(block.timestamp)
await this.withdrawToOwnerIfNeeded(blockNumber, currentBlockTimestamp)
this.lastRefreshBlock = blockNumber
await this._refreshPriorityFee()
await this.registrationManager.refreshBalance()
if (!this.registrationManager.balanceRequired.isSatisfied) {
this.setReadyState(false)
return []
}
return await this._handleChanges(blockNumber, currentBlockTimestamp)
}
async _refreshPriorityFee (): Promise<void> {
const minMaxPriorityFeePerGas = parseInt(await this.contractInteractor.getMaxPriorityFee())
this.minMaxPriorityFeePerGas = Math.floor(minMaxPriorityFeePerGas * this.config.gasPriceFactor)
if (this.minMaxPriorityFeePerGas === 0) {
this.logger.debug(`Priority fee received from node is 0. Setting priority fee to ${this.config.defaultPriorityFee}`)
this.minMaxPriorityFeePerGas = parseInt(this.config.defaultPriorityFee)
}
if (this.minMaxPriorityFeePerGas > parseInt(this.config.maxGasPrice)) {
throw new Error(`network maxPriorityFeePerGas ${this.minMaxPriorityFeePerGas} is higher than config.maxGasPrice ${this.config.maxGasPrice}`)
}
}
async _handleChanges (currentBlockNumber: number, currentBlockTimestamp: number): Promise<PrefixedHexString[]> {
let transactionHashes: PrefixedHexString[] = []
const hubEventsSinceLastScan = await this.getAllHubEventsSinceLastScan()
await this._updateLatestTxBlockNumber(hubEventsSinceLastScan)
await this.registrationManager.updateLatestRegistrationTxs(hubEventsSinceLastScan)
const shouldRegisterAgain =
await this._shouldRegisterAgain(currentBlockNumber, currentBlockTimestamp, hubEventsSinceLastScan)
transactionHashes = transactionHashes.concat(
await this.registrationManager.handlePastEvents(
hubEventsSinceLastScan, this.lastScannedBlock, currentBlockNumber, currentBlockTimestamp, shouldRegisterAgain))
await this.transactionManager.removeConfirmedTransactions(currentBlockNumber)
await this._boostStuckPendingTransactions(currentBlockNumber, currentBlockTimestamp)
this.lastScannedBlock = currentBlockNumber
const isRegistered = await this.registrationManager.isRegistered()
if (!isRegistered) {
this.logger.debug('Not registered yet')
this.setReadyState(false)
return transactionHashes
}
await this.handlePastHubEvents(currentBlockNumber, hubEventsSinceLastScan)
const workerIndex = 0
transactionHashes = transactionHashes.concat(await this.replenishServer(workerIndex, currentBlockNumber, currentBlockTimestamp))
const workerBalance = await this.getWorkerBalance(workerIndex)
if (workerBalance.lt(toBN(this.config.workerMinBalance))) {
this.logger.debug('Worker balance too low')
this.setReadyState(false)
return transactionHashes
}
this.setReadyState(true)
if (this.alerted && this.alertedByTransactionBlockTimestamp + this.config.alertedDelaySeconds < currentBlockTimestamp) {
this.logger.warn(`Relay exited alerted state. Alerted transaction timestamp: ${this.alertedByTransactionBlockTimestamp}. Current block timestamp: ${currentBlockTimestamp}`)
this.alerted = false
}
return transactionHashes
}
async getManagerBalance (): Promise<BN> {
return toBN(await this.contractInteractor.getBalance(this.managerAddress, 'pending'))
}
async getWorkerBalance (workerIndex: number): Promise<BN> {
return toBN(await this.contractInteractor.getBalance(this.workerAddress, 'pending'))
}
async _shouldRegisterAgain (currentBlock: number, currentBlockTimestamp: number, hubEventsSinceLastScan: EventData[]): Promise<boolean> {
if (this.config.registrationRateSeconds === 0) {
// this.logger.debug(`_shouldRegisterAgain returns false isPendingActivityTransaction=${isPendingActivityTransaction} registrationBlockRate=${this.config.registrationBlockRate}`)
return false
}
const latestRegisterTxBlockTimestamp = this._getLatestRegisterTxBlockTimestamp()
const isPendingRegistration = await this.txStoreManager.isActionPendingOrRecentlyMined(ServerAction.REGISTER_SERVER, currentBlock, this.config.recentActionAvoidRepeatDistanceBlocks)
const registrationExpired =
this.config.registrationRateSeconds !== 0 &&
(currentBlockTimestamp - latestRegisterTxBlockTimestamp >= this.config.registrationRateSeconds) &&
!isPendingRegistration
const shouldRegister = registrationExpired
if (!registrationExpired) {
this.logger.debug(
`_shouldRegisterAgain registrationExpired=${registrationExpired} currentBlock=${currentBlock} latestTxBlockNumber=${latestRegisterTxBlockTimestamp} registrationBlockRate=${this.config.registrationRateSeconds}`)
}
return shouldRegister
}
_shouldRefreshState (currentBlock: number): boolean {
return currentBlock - this.lastRefreshBlock >= this.config.refreshStateTimeoutBlocks || !this.isReady()
}
async handlePastHubEvents (currentBlockNumber: number, hubEventsSinceLastScan: EventData[]): Promise<void> {
for (const event of hubEventsSinceLastScan) {
switch (event.event) {
case TransactionRejectedByPaymaster:
this.logger.debug(`handle TransactionRejectedByPaymaster event: ${JSON.stringify(event)}`)
await this._handleTransactionRejectedByPaymasterEvent(event.returnValues.paymaster, currentBlockNumber, event.blockNumber)
break
case TransactionRelayed:
this.logger.debug(`handle TransactionRelayed event: ${JSON.stringify(event)}`)
await this._handleTransactionRelayedEvent(event.returnValues.paymaster, event.blockNumber)
break
}
}
}
async getAllHubEventsSinceLastScan (): Promise<EventData[]> {
const topics = [address2topic(this.managerAddress)]
const options = {
fromBlock: this.lastScannedBlock + 1,
toBlock: 'latest'
}
const hubEvents = await this.contractInteractor.getPastEventsForHub(topics, options)
const regEvents = await this.contractInteractor.getPastEventsForRegistrar(topics, options)
const events = [...hubEvents, ...regEvents]
if (events.length !== 0) {
this.logger.debug(`Found ${events.length} events since last scan`)
}
return events
}
async _handleTransactionRelayedEvent (paymaster: Address, eventBlockNumber: number): Promise<void> {
if (this.config.runPaymasterReputations) {
await this.reputationManager.updatePaymasterStatus(paymaster, true, eventBlockNumber)
}
}
// TODO: do not call this method when events are processed already (stateful server thing)
async _handleTransactionRejectedByPaymasterEvent (paymaster: Address, currentBlockNumber: number, eventBlockNumber: number): Promise<void> {
this.alerted = true
const block = await this.contractInteractor.getBlock(eventBlockNumber)
const eventBlockTimestamp = toNumber(block.timestamp)
this.alertedByTransactionBlockTimestamp = eventBlockTimestamp
const alertedUntil = this.alertedByTransactionBlockTimestamp + this.config.alertedDelaySeconds
this.logger.error(`Relay entered alerted state. Block number: ${eventBlockNumber} Block timestamp: ${eventBlockTimestamp}.
Alerted for ${this.config.alertedDelaySeconds} seconds until ${alertedUntil}`)
if (this.config.runPaymasterReputations) {
await this.reputationManager.updatePaymasterStatus(paymaster, false, eventBlockNumber)
}
}
_getLatestTxBlockNumber (): number {
return this.lastMinedActiveTransaction?.blockNumber ?? -1
}
_getLatestRegisterTxBlockTimestamp (): number {
return this.registrationManager.lastMinedRegisterTransaction?.blockTimestamp ?? -1
}
async _updateLatestTxBlockNumber (eventsSinceLastScan: EventData[]): Promise<void> {
const latestTransactionSinceLastScan = getLatestEventData(eventsSinceLastScan)
if (latestTransactionSinceLastScan != null) {
this.lastMinedActiveTransaction = latestTransactionSinceLastScan
this.logger.debug(`found newer block ${this.lastMinedActiveTransaction?.blockNumber}`)
}
if (this.lastMinedActiveTransaction == null) {
this.lastMinedActiveTransaction = await this._queryLatestActiveEvent()
this.logger.debug(`queried node for last active server event, found in block ${this.lastMinedActiveTransaction?.blockNumber}`)
}
}
async _queryLatestActiveEvent (): Promise<EventData | undefined> {
const events: EventData[] = await this.contractInteractor.getPastEventsForHub([address2topic(this.managerAddress)], {
fromBlock: this.config.coldRestartLogsFromBlock
})
return getLatestEventData(events)
}
async withdrawToOwnerIfNeeded (currentBlockNumber: number, currentBlockTimestamp: number): Promise<PrefixedHexString[]> {
try {
let txHashes: PrefixedHexString[] = []
if (!this.isReady() || this.config.withdrawToOwnerOnBalance == null) {
return txHashes
}
// todo multiply workerTargetBalance by workerCount when adding multiple workers
const reserveBalance = toBN(this.config.managerTargetBalance).add(toBN(this.config.workerTargetBalance))
const effectiveWithdrawOnBalance = toBN(this.config.withdrawToOwnerOnBalance).add(reserveBalance)
const managerHubBalance = await this.relayHubContract.balanceOf(this.managerAddress)
if (managerHubBalance.lt(effectiveWithdrawOnBalance)) {
return txHashes
}
const withdrawalAmount = managerHubBalance.sub(reserveBalance)
txHashes = txHashes.concat(await this.registrationManager._sendManagerHubBalanceToOwner(currentBlockNumber, currentBlockTimestamp, withdrawalAmount))
this.logger.info(`Withdrew ${withdrawalAmount.toString()} to owner`)
return txHashes
} catch (e) {
this.logger.error(`withdrawToOwnerIfNeeded: ${(e as Error).message}`)
return []
}
}
/**
* Resend all outgoing pending transactions with insufficient gas price by all signers (manager, workers)
* @return the mapping of the previous transaction hash to details of a new boosted transaction
*/
async _boostStuckPendingTransactions (currentBlockNumber: number, currentBlockTimestamp: number): Promise<Map<PrefixedHexString, SignedTransactionDetails>> {
const transactionDetails = new Map<PrefixedHexString, SignedTransactionDetails>()
// repeat separately for each signer (manager, all workers)
const managerBoostedTransactions = await this._boostStuckTransactionsForManager(currentBlockNumber, currentBlockTimestamp)
for (const [txHash, boostedTxDetails] of managerBoostedTransactions) {
transactionDetails.set(txHash, boostedTxDetails)
}
for (const workerIndex of [0]) {
const workerBoostedTransactions = await this._boostStuckTransactionsForWorker(currentBlockNumber, currentBlockTimestamp, workerIndex)
for (const [txHash, boostedTxDetails] of workerBoostedTransactions) {
transactionDetails.set(txHash, boostedTxDetails)
}
}
return transactionDetails
}
async _boostStuckTransactionsForManager (currentBlockNumber: number, currentBlockTimestamp: number): Promise<Map<PrefixedHexString, SignedTransactionDetails>> {
return await this.transactionManager.boostUnderpricedPendingTransactionsForSigner(this.managerAddress, currentBlockNumber, currentBlockTimestamp, this.minMaxPriorityFeePerGas)
}
async _boostStuckTransactionsForWorker (currentBlockNumber: number, currentBlockTimestamp: number, workerIndex: number): Promise<Map<PrefixedHexString, SignedTransactionDetails>> {
const signer = this.workerAddress
return await this.transactionManager.boostUnderpricedPendingTransactionsForSigner(signer, currentBlockNumber, currentBlockTimestamp, this.minMaxPriorityFeePerGas)
}
_isTrustedPaymaster (paymaster: string): boolean {
return this.trustedPaymastersGasAndDataLimits.get(paymaster.toLowerCase()) != null
}
_isBlacklistedPaymaster (paymaster: string): boolean {
return this.config.blacklistedPaymasters.map(it => it.toLowerCase()).includes(paymaster.toLowerCase())
}
isReady (): boolean {
return this.ready
}
setReadyState (isReady: boolean): void {
if (this.isReady() !== isReady) {
const now = Date.now()
if (isReady) {
this.logger.warn(chalk.greenBright('Relayer state: READY'))
this.readinessInfo.totalNotReadyTime += now - this.readinessInfo.currentStateTimestamp
} else {
this.readinessInfo.totalReadyTime += now - this.readinessInfo.currentStateTimestamp
this.logger.warn(chalk.redBright('Relayer state: NOT-READY'))
}
this.readinessInfo.currentStateTimestamp = now
this.readinessInfo.totalReadinessChanges++
}
this.ready = isReady
}
} | the_stack |
* @deprecated `kaitian` was deprecated, Please use `sumi` instead.
*/
declare module 'kaitian' {
export * from 'sumi';
}
declare module 'sumi' {
export * from 'vscode';
import {
ExtensionContext as VSCodeExtensionContext,
Disposable,
TextEditor,
TextEditorEdit,
ExtensionKind,
} from 'vscode';
/**
* Represents an extension.
*
* To get an instance of an `Extension` use [getExtension](#extensions.getExtension).
*/
export interface Extension<T> {
/**
* The canonical extension identifier in the form of: `publisher.name`.
*/
readonly id: string;
/**
* The absolute file path of the directory containing this extension.
*/
readonly extensionPath: string;
/**
* `true` if the extension has been activated.
*/
readonly isActive: boolean;
/**
* The parsed contents of the extension's package.json.
*/
readonly packageJSON: any;
/**
* The extension kind describes if an extension runs where the UI runs
* or if an extension runs where the remote extension host runs. The extension kind
* if defined in the `package.json` file of extensions but can also be refined
* via the the `remote.extensionKind`-setting. When no remote extension host exists,
* the value is [`ExtensionKind.UI`](#ExtensionKind.UI).
*/
extensionKind: ExtensionKind;
/**
* The public API exported by this extension. It is an invalid action
* to access this field before this extension has been activated.
*/
readonly exports: T;
/**
* The public API exported by this extension's node entry. It is an invalid action
* to access this field before this extension has been activated.
*/
readonly extendExports: T;
/**
* Activates this extension and returns its public API.
*
* @return A promise that will resolve when this extension has been activated.
*/
activate(): Thenable<T>;
}
export namespace extensions {
/**
* Get an extension by its full identifier in the form of: `publisher.name`.
*
* @param extensionId An extension identifier.
* @return An extension or `undefined`.
*/
export function getExtension(extensionId: string): Extension<any> | undefined;
/**
* Get an extension its full identifier in the form of: `publisher.name`.
*
* @param extensionId An extension identifier.
* @return An extension or `undefined`.
*/
export function getExtension<T>(extensionId: string): Extension<T> | undefined;
/**
* All extensions currently known to the system.
*/
export const all: ReadonlyArray<Extension<any>>;
/**
* An event which fires when `extensions.all` changes. This can happen when extensions are
* installed, uninstalled, enabled or disabled.
*/
export const onDidChange: Event<void>;
}
export namespace event {
/**
* 事件响应的返回结果
*/
interface IEventResult<R> {
/**
* 如果存在err,说明本次调用存在错误
*/
err?: string;
/**
* 调用结果
*/
result?: R;
}
/**
* 订阅一个事件
* @param eventId 事件id
* @param callback 事件订阅回调
*/
export function subscribe(eventId: string, callback: (...args: any[]) => any): IDisposable;
/**
* 发送一个事件
* @param eventId 事件id
* @param args 事件参数
* @returns Promise 返回处理事件响应的listener的返回值
*/
export function fire<R = any>(eventId: string, ...args: any[]): Promise<IEventResult<R>[]>;
}
export namespace layout {
/**
* 切换底部面板显示/隐藏
*/
export function toggleBottomPanel(): Promise<void>;
/**
* 获取一个 Tab 的 Handle
* @param id tab id
*/
export function getTabbarHandler(id: string): ITabbarHandle;
/**
* 获取一个 Tab 的 Handle
* @param id tab id, 不限制在本插件注册的handle,需要自己进行字符串拼接
*/
export function getExtensionTabbarHandler(id: string, extensionId?: string): ITabbarHandle;
/**
* 切换左侧面板显示/隐藏
*/
export function toggleLeftPanel(): Promise<void>;
/**
* 切换右侧面板显示/隐藏
*/
export function toggleRightPanel(): Promise<void>;
/**
* 显示右侧面板
*/
export function showRightPanel(): Promise<void>;
/**
* 隐藏右侧面板
*/
export function hideRightPanel(): Promise<void>;
/**
* 激活指定 id 的面板,需在注册时指定 activateKeyBinding
* @param id
*/
export function activatePanel(id: string): Promise<void>;
/**
* 返回底部面板是否可见
*/
export function isBottomPanelVisible(): Promise<boolean>;
/**
* 返回左侧面板是否可见
*/
export function isLeftPanelVisible(): Promise<boolean>;
/**
* 返回右侧面板是否可见
*/
export function isRightPanelVisible(): Promise<boolean>;
}
export interface IIDEWindowWebviewOptions {
/**
* 窗口宽度,默认 `800`
*/
width?: number;
/**
* 窗口高度,默认 `600`
*/
height?: number;
[key: string]: any;
}
export interface IIDEWindowWebviewEnv {
/**
* 注入webview中的环境变量
*/
[key: string]: any;
}
export interface IIDEWebviewWindow extends Disposable {
/**
* 加载webview窗口内的资源地址
* @param url
*/
loadUrl(url: string): Promise<void>;
/**
* 隐藏webview窗口
*/
hide(): Promise<void>;
/**
* 展示webview窗口
*/
show(): Promise<void>;
/**
* 设置webview窗口大小
* @param size
*/
setSize(size: { width: number; height: number }): Promise<void>;
/**
* 设置webview窗口是否置顶
* @param flag
*/
setAlwaysOnTop(flag: boolean): Promise<void>;
/**
* 传递消息至webview窗口
* @param message
*/
postMessage(message: any): Promise<void>;
/**
* 接收webview窗口回传消息事件
*/
onMessage: Event<any>;
/**
* 接收webview窗口关闭事件
*/
onClosed: Event<void>;
/**
* Electron Window 的 windowId
*/
windowId: number;
/**
* Electron Window 的 webContentsId
*/
webContentsId: number;
}
export namespace ideWindow {
/**
* 刷新当前 IDE 窗口
*/
export function reloadWindow(): void;
/**
* 打开新的窗口 仅支持 Electron 环境
*/
export function createWebviewWindow(
webviewId: string,
options?: IIDEWindowWebviewOptions,
env?: IIDEWindowWebviewEnv,
): Promise<IIDEWebviewWindow>;
}
export namespace lifecycle {
/**
* 设置 IDE 所加载的插件目录,仅 Electron 下可用,调用后需刷新当前窗口
* @param extensionDir 插件目录
*/
export function setExtensionDir(extensionDir: string): Promise<void>;
/**
* 设置 IDE 所加载的额外插件列表,具体到插件路径
* @param extensionCandidate 插件列表
*
* @example
* ```typescript
* lifecycle.setExtensionCandidate([
* { path: '/path/to/ext-1.0', isBuintin: true }
* ]);
* ```
*/
export function setExtensionCandidate(extensionCandidate: ExtensionCandidate[]): Promise<void>;
}
/**
* 主题相关API
*/
export namespace theme {
/**
* 当主题被改变时的通知
*/
export const onThemeChanged: Event<void>;
/**
* 获得当前主题的颜色值
* 格式 '-分割的颜色名':'颜色值(rgb, rgba或hex)'
* @example
* ```json
* {
* 'editor-background':'#000000',
* }
* ```
*/
export function getThemeColors(): Promise<{ [key: string]: string }>;
}
export enum ExtensionHostKind {
NODE_HOST = 1,
WORKER_HOST = 2,
}
export interface ExtensionCandidate {
path: string;
isBuintin: boolean;
}
export interface IPlainWebviewHandle {
/**
* 向webview内部发送消息
* @param message
*/
postMessage(message: any): Promise<boolean>;
/**
* 接受来自webview的信息
* @example
* ```typescript
* const handle = getPlainWebviewHandle('id');
* handle.onMessage((e: any) => {
* // your code
* })
* ```
*/
onMessage: Event<any>;
/**
* 加载一个url
*/
loadUrl(url: string): Promise<void>;
}
export interface IDisposable {
/**
* Dispose this object.
*/
dispose(): void;
}
export type Event<T> = (listener: (e: T) => any, thisArgs?: any) => IDisposable;
export interface IExtHostPlainWebview extends IPlainWebviewHandle, IDisposable {
reveal(groupIndex: number): Promise<void>;
}
export namespace webview {
/**
* 获取一个使用<Webview id='xxx'>组件创造的plainWebview的Handle
* @param id
*/
export function getPlainWebviewHandle(id: string): IPlainWebviewHandle;
/**
* 创建一个用于在编辑器区域展示的plain webview组件
* @param title
* @param iconPath
*/
export function createPlainWebview(title: string, iconPath?: string): IExtHostPlainWebview;
}
interface IProxy {
[methodName: string]: any; // Function;
}
interface IComponentProxy {
[componentIds: string]: IProxy;
}
export interface ExtensionContext<T = IComponentProxy> extends VSCodeExtensionContext {
registerExtendModuleService<S>(service: S): void;
componentProxy: T;
}
export interface IReporterTimer {
timeEnd(msg?: string, extra?: any): number;
}
export namespace reporter {
export function time(name: string): IReporterTimer;
export function point(name: string, msg?: string, extra?: any): void;
}
export interface ITabbarHandle {
setSize(size: number): void;
/**
* 修改 Tabbar 标题
* @param title 标题
*
* @example
* ```ts
* const tabbar = kaitian.layout.getTabbarHandler('TabbarIconTest');
* tabbar.setTitle('New Title');
* ```
*/
setTitle(title: string): void;
/**
* 修改 Tarbbar 图标
* @param iconPath 图标路径
*
* @example
* ```ts
* const tabbar = kaitian.layout.getTabbarHandler('TabbarIconTest');
* tabbar.setIcon('http://path/to/icon.svg');
* ```
*/
setIcon(iconPath: string): void;
/**
* 修改 Tabbar 徽标文案
* @param badge 徽标文案
*
* 
*
* @example
* ```ts
* const tabbar = kaitian.layout.getTabbarHandler('TabbarIconTest');
* tabbar.setBadge('12');
* ```
*/
setBadge(badge: string): void;
activate(): void;
deactivate(): void;
onActivate: Event<void>;
onInActivate: Event<void>;
setVisible(visible: boolean): void;
}
interface IExtensionInfo {
/**
* package.json 里的 publisher.name
* 用于插件之前的相互调用
*/
readonly id: string;
/**
* 插件市场 id
*/
readonly extensionId: string;
/**
* 是否为内置插件
*/
readonly isBuiltin: boolean;
}
export type PermittedHandler = (extensionInfo: IExtensionInfo, ...args: any[]) => boolean;
export namespace commands {
/**
* Registers a command that can be invoked via a keyboard shortcut,
* a menu item, an action, or directly.
*
* Registering a command with an existing command identifier twice
* will cause an error.
*
* @param command A unique identifier for the command.
* @param callback A command handler function.
* @param thisArg The `this` context used when invoking the handler function.
* @return Disposable which unregisters this command on disposal.
*/
export function registerCommand(command: string, callback: (...args: any[]) => any, thisArg?: any): Disposable;
/**
* Executes the command denoted by the given command identifier.
*
* * *Note 1:* When executing an editor command not all types are allowed to
* be passed as arguments. Allowed are the primitive types `string`, `boolean`,
* `number`, `undefined`, and `null`, as well as [`Position`](#Position), [`Range`](#Range), [`Uri`](#Uri) and [`Location`](#Location).
* * *Note 2:* There are no restrictions when executing commands that have been contributed
* by extensions.
*
* @param command Identifier of the command to execute.
* @param rest Parameters passed to the command function.
* @return A thenable that resolves to the returned value of the given command. `undefined` when
* the command handler function doesn't return anything.
*/
export function executeCommand<T>(command: string, ...rest: any[]): Thenable<T | undefined>;
/**
* Retrieve the list of all available commands. Commands starting an underscore are
* treated as internal commands.
*
* @param filterInternal Set `true` to not see internal commands (starting with an underscore)
* @return Thenable that resolves to a list of command ids.
*/
export function getCommands(filterInternal?: boolean): Thenable<string[]>;
/**
* Registers a text editor command that can be invoked via a keyboard shortcut,
* a menu item, an action, or directly.
*
* Text editor commands are different from ordinary [commands](#commands.registerCommand) as
* they only execute when there is an active editor when the command is called. Also, the
* command handler of an editor command has access to the active editor and to an
* [edit](#TextEditorEdit)-builder.
*
* @param command A unique identifier for the command.
* @param callback A command handler function with access to an [editor](#TextEditor) and an [edit](#TextEditorEdit).
* @param thisArg The `this` context used when invoking the handler function.
* @return Disposable which unregisters this command on disposal.
*/
export function registerTextEditorCommand(
command: string,
callback: (textEditor: TextEditor, edit: TextEditorEdit, ...args: any[]) => void,
thisArg?: any,
): Disposable;
/**
* Register a command that requires authentication
* This command is only registered in the extension host
* Does not appear in the menu and command palette
*
* @param command A unique identifier for the command.
* @param callback A command handler function.
* @param isPermitted Check if you have permission to execute command.It first argument is extension information to help you judge.
* @return Disposable which unregisters this command on disposal.
*/
export function registerCommandWithPermit(
id: string,
command: <T>(...args: any[]) => T | Promise<T>,
isPermitted: PermittedHandler,
): Disposable;
}
export namespace toolbar {
export interface IToolbarButtonActionHandle {
/**
* 当按钮被点击时触发
*/
onClick: Event<void>;
/**
* 设置 Button 的 State
* state 需要对应在 kaitianContributes 中配置
* @param state
* @param 额外改变的 title
*/
setState(state: string, title?: string): Promise<void>;
/**
* 提供了自定义 Popover 组件的场景下,设置 Popover 组件 props 接收到的 context 状态
*
* @example
* ```ts
* // extension
* const action = await kaitian.toolbar.getToolbarButtonHandle(<action-id>);
* action.setContext({ name: 'World' });
*
* // CustomPopOverComponent
*
* const PopOver = (props) => {
* return (
* <div>Hello {props?.name}</div>
* );
* };
* ```
* @param context {any}
*/
setContext(context: any): void;
/**
* State 改变时触发
*/
onStateChanged: Event<{ from: string; to: string }>;
/**
* 显示 button 元素对应的 popover 元素,需要在 kaitianContributes 中配置
*/
showPopover(): Promise<void>;
hidePopover(): Promise<void>;
}
export interface IToolbarSelectActionHandle<T> {
/**
* 设置 Select 的 State
* state 需要对应在 kaitianContributes 中配置
* @param state
*/
setState(state: string): Promise<void>;
/**
* 修改可用 options
* 注意:如果修改过后的options变更,会导致当前选中变更(原有选中如果在新的options中找不到,默认使用第一个),
* 那么它会引起 onSelect 被触发
* @param options
*/
setOptions(
options: {
iconPath?: string;
iconMaskMode?: boolean;
label?: string;
value: T;
}[],
): void;
/**
* Select 值改变时触发
*/
onSelect: Event<T>;
/**
* State 改变时触发
*/
onStateChanged: Event<{ from: string; to: string }>;
/**
* 使用代码更改选择
* @param value
*/
setSelect(value: T): Promise<void>;
/**
* 获得当前选择值
*/
getValue(): T;
}
export interface IToolbarActionBasicContribution {
id: string;
preferredPosition?: {
location?: string;
group?: string;
};
strictPosition?: {
location: string;
group: string;
};
description: string;
}
export interface IToolbarSelectStyle {
// 背景色
backgroundColor?: string;
// 下拉菜单前景色
labelForegroundColor?: string;
// icon 前景色
iconForegroundColor?: string;
// 宽度
width?: number;
// 最小宽度
minWidth?: number;
}
export interface IToolbarActionBtnStyle {
// 指定按钮宽度
// 不指定,则按默认8px左右边距
width?: number;
// 指定按钮高度
// 默认值为 22
height?: number;
// 是否显示 Title
showTitle?: boolean;
// icon 前景色
iconForeground?: string;
// icon 背景色
iconBackground?: string;
// title 前景色
titleForeground?: string;
// title 背景色
titleBackground?: string;
// title 字体大小
titleSize?: string;
// icon 图标大小
iconSize?: string;
// 整体背景色
background?: string;
// 样式类型,
// inline则不会有外边框
// button则为按钮样式
btnStyle?: 'inline' | 'button';
// button 的文本位置样式
// vertical: 上icon 下文本
// horizontal: 左icon 右文本
btnTitleStyle?: 'vertical' | 'horizontal';
}
export interface IToolbarPopoverStyle {
/**
* 在上方还是在下方, 默认下方
* // TODO: 暂时只支持 bottom;
*/
position?: 'top' | 'bottom';
/**
* ```text
* 距离右边的偏移量(px), 默认 30
* [ button ]
* /\ |<-offset->|
* [------ -------------]
* [ ]
* [ popover ]
* [ ]
* [______________________]
* ```
*/
horizontalOffset?: number;
/**
* 点击组件外部时自动隐藏, 默认 true
*/
hideOnClickOutside?: boolean;
/**
* 不要带箭头,阴影,背景色等默认样式
*/
noContainerStyle?: boolean;
/**
* 指定 popOver 的最小宽度
*/
minWidth?: number;
/**
* 指定 popOver 的最小高度
*/
minHeight?: number;
}
export interface IToolbarButtonContribution extends IToolbarActionBasicContribution {
type: 'button';
command?: string;
title: string;
iconPath: string;
iconMaskMode?: boolean;
popoverComponent?: string;
popoverStyle?: IToolbarPopoverStyle;
states?: {
[key: string]: {
title?: string;
iconPath?: string;
iconMaskMode?: boolean;
} & IToolbarActionBtnStyle;
};
defaultState?: string;
}
export interface IToolbarSelectContribution<T = any> extends IToolbarActionBasicContribution {
type: 'select';
command?: string;
options: {
iconPath?: string;
iconMaskMode?: boolean;
label?: string;
value: T;
}[];
defaultValue: T;
optionEqualityKey?: string;
states?: {
[key: string]: IToolbarSelectStyle;
};
defaultState?: string;
}
/**
* 注册一个 select 类型的 toolbar Action
* @param contribution IToolbarSelectContribution
* 返回一个用于操作和响应 toolbar 上对应 select 控件的 handle
*/
export function registerToolbarAction<T>(
contribution: IToolbarSelectContribution<T>,
): Promise<IToolbarSelectActionHandle<T>>;
/**
* 注册一个 button 类型的 toolbar action
* @param contribution IToolbarButtonContribution
* 返回一个用于操作和响应 toolbar 上对应 button 控件的 handle
*/
export function registerToolbarAction(
contribution: IToolbarButtonContribution,
): Promise<IToolbarButtonActionHandle>;
/**
* 获得一个 toolbar action 的 handle, 用于操作和响应 toolbar 上的 button
* @param id
*/
export function getToolbarActionButtonHandle(id: string): Promise<IToolbarButtonActionHandle>;
/**
* 获得一个 toolbar action 的 handle, 用于操作和响应 toolbar 上的 select
* @param id
*/
export function getToolbarActionSelectHandle<T = any>(id: string): Promise<IToolbarSelectActionHandle<T>>;
}
} | the_stack |
import * as React from 'react';
import { connect } from 'react-redux';
import { Dispatch } from 'redux';
import { removeActivity } from '../../redux/Actions';
import {
cadLog,
getSetting,
returnOptionalCookieAPIAttributes,
siteDataToBrowser,
throwErrorNotification,
} from '../../services/Libs';
import { FilterOptions } from '../../typings/Enums';
import { ReduxAction } from '../../typings/ReduxConstants';
import IconButton from './IconButton';
const createSummary = (cleanupObj: ActivityLog) => {
const domainSet = new Set<string>();
Object.values(cleanupObj.storeIds).forEach((value) => {
value.forEach((deletedLog) => domainSet.add(deletedLog.cookie.hostname));
});
if (cleanupObj.browsingDataCleanup) {
Object.values(cleanupObj.browsingDataCleanup).forEach((sd) => {
sd && sd.forEach((domain) => domainSet.add(domain));
});
}
return {
total: domainSet.size.toString(),
domains: Array.from(domainSet).slice(0, 5).join(', '),
};
};
const createDetailedSummary = (cleanReasonObjects: CleanReasonObject[]) => {
const mapDomainToCookieNames: { [domain: string]: CleanReasonObject[] } = {};
cleanReasonObjects.forEach((obj) => {
if (mapDomainToCookieNames[obj.cookie.hostname]) {
mapDomainToCookieNames[obj.cookie.hostname].push(obj);
} else {
mapDomainToCookieNames[obj.cookie.hostname] = [obj];
}
});
return Object.entries(mapDomainToCookieNames).map(
([domain, cleanReasonObj]) => {
return (
<div
style={{
marginLeft: '10px',
}}
className={`alert alert-danger`}
key={`${domain}`}
role="alert"
>
{`${domain} (${cleanReasonObj
.map((obj) => obj.cookie.name)
.join(', ')}): ${returnReasonMessages(cleanReasonObj[0])}`}
</div>
);
},
);
};
const returnReasonMessages = (cleanReasonObject: CleanReasonObject) => {
const { reason } = cleanReasonObject;
const { hostname, mainDomain } = cleanReasonObject.cookie;
const matchedExpression = cleanReasonObject.expression;
switch (reason) {
case ReasonClean.CADSiteDataCookie:
case ReasonClean.ExpiredCookie: {
return browser.i18n.getMessage(reason, [hostname]);
}
case ReasonKeep.OpenTabs: {
return browser.i18n.getMessage(reason, [mainDomain]);
}
case ReasonClean.NoMatchedExpression:
case ReasonClean.StartupNoMatchedExpression: {
return browser.i18n.getMessage(reason, [hostname]);
}
case ReasonClean.StartupCleanupAndGreyList: {
return browser.i18n.getMessage(reason, [
matchedExpression ? matchedExpression.expression : '',
]);
}
case ReasonClean.MatchedExpressionButNoCookieName:
case ReasonKeep.MatchedExpression: {
return browser.i18n.getMessage(reason, [
matchedExpression ? matchedExpression.expression : '',
matchedExpression && matchedExpression.listType === ListType.GREY
? browser.i18n.getMessage('greyListWordText')
: browser.i18n.getMessage('whiteListWordText'),
]);
}
default:
return '';
}
};
type ActivityAction = (log: ActivityLog) => void;
interface StateProps {
activityLog: ReadonlyArray<ActivityLog>;
cache: CacheMap;
state: State;
}
interface DispatchProps {
onRemoveActivity: ActivityAction;
}
interface OwnProps {
decisionFilter: FilterOptions;
numberToShow?: number;
}
type ActivityTableProps = OwnProps & StateProps & DispatchProps;
const restoreCookies = async (
state: State,
log: ActivityLog,
onRemoveActivity: ActivityAction,
) => {
const debug = getSetting(state, SettingID.DEBUG_MODE) as boolean;
const cleanReasonObjsArrays = Object.values(log.storeIds);
const promiseArr = [];
cadLog(
{
msg: `ActivityTable.restoreCookies: Restoring Cookies for triggered ActivityLog entry`,
x: log,
},
debug,
);
for (const cleanReasonObjs of cleanReasonObjsArrays) {
for (const obj of cleanReasonObjs) {
// Cannot set cookies from file:// protocols
if (obj.cookie.preparedCookieDomain.startsWith('file:')) {
cadLog(
{
msg:
'Cookie appears to come from a local file. Cannot be restored normally.',
type: 'warn',
x: obj.cookie,
},
debug,
);
continue;
}
// Silently ignore cookies with no domain
if (obj.cookie.preparedCookieDomain.trim() === '') {
cadLog(
{
msg: 'Cookie appears to have no domain. Cannot restore.',
type: 'warn',
x: obj.cookie,
},
debug,
);
continue;
}
const {
domain,
expirationDate,
firstPartyDomain,
hostOnly,
httpOnly,
name,
sameSite,
secure,
storeId,
value,
} = obj.cookie;
// Prefix fun: https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#Cookie_prefixes
// Since the cookies returned through web-extension API should already be validated,
// we are not doing any validations for __Secure- cookies.
// For cookies starting with __Secure-, secure attribute should already be true,
// and url should already start with https://
// Only modify cookie names starting with __Host- as it shouldn't have domain.
const cookieProperties = {
...returnOptionalCookieAPIAttributes(state, {
firstPartyDomain,
}),
domain: name.startsWith('__Host-') || hostOnly ? undefined : domain,
expirationDate,
httpOnly,
name,
sameSite,
secure,
storeId,
url: obj.cookie.preparedCookieDomain,
value,
};
promiseArr.push(browser.cookies.set(cookieProperties));
}
}
try {
// If any error/rejection was thrown, the rest of the promises are not processed.
// FUTURE: Use https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled to process all regardless of rejection. ** Perhaps too early to implement at this time 2020-May-03 **
await Promise.all(promiseArr).catch((e) => {
throwErrorNotification(
e,
getSetting(state, SettingID.NOTIFY_DURATION) as number,
);
cadLog(
{
msg:
'An Error occurred while trying to restore cookie(s). The rest of the cookies to restore are not processed.',
type: 'error',
x: e,
},
debug,
);
throw e;
});
} catch (e) {
// eslint-disable-next-line no-console
console.error(e);
return;
}
// Restore didn't fail
onRemoveActivity(log);
};
const ActivityTable: React.FunctionComponent<ActivityTableProps> = (props) => {
const { activityLog, cache, numberToShow, state, onRemoveActivity } = props;
if (props.activityLog.length === 0) {
return (
<div className="alert alert-primary" role="alert">
<i>
{browser.i18n.getMessage('noCleanupLogText')}
<br /> {browser.i18n.getMessage('noPrivateLogging')}
</i>
</div>
);
}
const filtered = activityLog.slice(0, numberToShow || 10);
return (
<div
className="accordion"
id="accordion"
style={{
marginBottom: '10px',
}}
>
{filtered.map((log, index) => {
const summary = createSummary(log);
const message = browser.i18n.getMessage('notificationContent', [
log.recentlyCleaned.toString(),
summary.total,
summary.domains !== '' ? summary.domains : '(Private)',
]);
const browsingDataEntries = Object.entries(
log.browsingDataCleanup || {},
);
const storeIdEntries = Object.entries(log.storeIds);
return (
<div key={index} className="card">
<div
style={{ display: 'flex' }}
className="card-header"
id={`heading${index}`}
>
{(log.recentlyCleaned > 0 && (
<IconButton
className={'btn-primary mr-auto'}
iconName={'undo'}
onClick={() => restoreCookies(state, log, onRemoveActivity)}
title={browser.i18n.getMessage('restoreText')}
/>
)) || <div className={'mr-auto'} style={{ minWidth: '42px' }} />}
<h5
className="mb-0"
style={{
overflowX: 'hidden',
}}
>
<button
className="btn btn-link collapsed"
type="button"
data-toggle="collapse"
data-target={`#collapse${index}`}
aria-expanded="false"
aria-controls={`collapse${index}`}
>
{`${new Date(log.dateTime).toLocaleString([], {
timeZoneName: 'short',
})} - ${message} ...`}
</button>
</h5>
<IconButton
className={'btn-outline-danger ml-auto'}
iconName={'trash'}
onClick={() => onRemoveActivity(log)}
title={browser.i18n.getMessage('removeActivityLogEntryText')}
/>
</div>
<div
id={`collapse${index}`}
className="collapse"
aria-labelledby={`heading${index}`}
data-parent="#accordion"
>
<div className="card-body">
{browsingDataEntries.map(([siteData, domains]) => {
if (!domains || domains.length === 0) return '';
return (
<div
key={`${siteData}-${log.dateTime}`}
style={{
marginLeft: '10px',
}}
className={`alert alert-info`}
role="alert"
>
{browser.i18n.getMessage(
'activityLogSiteDataDomainsText',
[
browser.i18n.getMessage(
`${siteDataToBrowser(
siteData as SiteDataType,
)}Text`,
),
domains.join(', '),
],
)}
</div>
);
})}
{storeIdEntries.map(([storeId, cleanReasonObjects]) => {
return (
<div key={`${storeId}-${log.dateTime}`}>
{(storeIdEntries.length > 1 ||
getSetting(state, SettingID.CONTEXTUAL_IDENTITIES)) && (
<h6>
{cache[storeId] !== undefined
? `${cache[storeId]} `
: ''}
({storeId})
</h6>
)}
{createDetailedSummary(cleanReasonObjects)}
</div>
);
})}
</div>
</div>
</div>
);
})}
</div>
);
};
const mapStateToProps = (state: State) => {
const { activityLog, cache } = state;
return {
activityLog,
cache,
state,
};
};
const mapDispatchToProps = (dispatch: Dispatch<ReduxAction>) => ({
onRemoveActivity(activity: ActivityLog) {
dispatch(removeActivity(activity));
},
});
export default connect(mapStateToProps, mapDispatchToProps)(ActivityTable); | the_stack |
namespace ts {
describe("unittests:: createSetShim", () => {
const stringKeys = [
"1",
"3",
"2",
"4",
"0",
"999",
"A",
"B",
"C",
"Z",
"X",
"X1",
"X2",
"Y"
];
const mixedKeys = [
true,
3,
{ toString() { return "2"; } },
"4",
false,
null, // eslint-disable-line no-null/no-null
undefined,
"B",
{ toString() { return "C"; } },
"Z",
"X",
{ toString() { return "X1"; } },
"X2",
"Y"
];
function testSetIterationAddedValues<K>(keys: K[], set: Set<K>, useForEach: boolean): string {
let resultString = "";
set.add(keys[0]);
set.add(keys[1]);
set.add(keys[2]);
set.add(keys[3]);
let addedThree = false;
const doForEach = (key: K) => {
resultString += `${key};`;
// Add a new key ("0") - the set should provide this
// one in the next iteration.
if (key === keys[0]) {
set.add(keys[0]);
set.add(keys[4]);
set.add(keys[3]);
}
else if (key === keys[1]) {
if (!addedThree) {
addedThree = true;
// Remove and re-add key "3"; the set should
// visit it after "0".
set.delete(keys[1]);
set.add(keys[1]);
// Change the value of "2"; the set should provide
// it when visiting the key.
set.add(keys[2]);
}
else {
// Check that an entry added when we visit the
// currently last entry will still be visited.
set.add(keys[5]);
}
}
else if (key === keys[5]) {
// Ensure that clear() behaves correctly same as removing all keys.
set.add(keys[6]);
set.add(keys[7]);
set.add(keys[8]);
}
else if (key === keys[6]) {
set.clear();
set.add(keys[9]);
}
else if (key === keys[9]) {
// Check that the set behaves correctly when two items are
// added and removed immediately.
set.add(keys[10]);
set.add(keys[11]);
set.add(keys[12]);
set.delete(keys[11]);
set.delete(keys[12]);
set.add(keys[13]);
}
};
if (useForEach) {
set.forEach(doForEach);
}
else {
// Use an iterator.
const iterator = set.values();
while (true) {
const iterResult = iterator.next();
if (iterResult.done) {
break;
}
doForEach(iterResult.value);
}
}
return resultString;
}
let SetShim!: SetConstructor;
beforeEach(() => {
function getIterator<I extends readonly any[] | ReadonlySet<any> | ReadonlyESMap<any, any> | undefined>(iterable: I): Iterator<
I extends ReadonlyESMap<infer K, infer V> ? [K, V] :
I extends ReadonlySet<infer T> ? T :
I extends readonly (infer T)[] ? T :
I extends undefined ? undefined :
never>;
function getIterator(iterable: readonly any[] | ReadonlySet<any> | ReadonlyESMap<any, any> | undefined): Iterator<any> | undefined {
// override `ts.getIterator` with a version that allows us to iterate over a `SetShim` in an environment with a native `Set`.
if (iterable instanceof SetShim) return iterable.values();
return ts.getIterator(iterable);
}
SetShim = ShimCollections.createSetShim(getIterator);
});
afterEach(() => {
SetShim = undefined!;
});
it("iterates values in insertion order and handles changes with string keys", () => {
const expectedResult = "1;3;2;4;0;3;999;A;Z;X;Y;";
// First, ensure the test actually has the same behavior as a native Set.
let nativeSet = new Set<string>();
const nativeSetForEachResult = testSetIterationAddedValues(stringKeys, nativeSet, /* useForEach */ true);
assert.equal(nativeSetForEachResult, expectedResult, "nativeSet-forEach");
nativeSet = new Set<string>();
const nativeSetIteratorResult = testSetIterationAddedValues(stringKeys, nativeSet, /* useForEach */ false);
assert.equal(nativeSetIteratorResult, expectedResult, "nativeSet-iterator");
// Then, test the set shim.
let localShimSet = new SetShim<string>();
const shimSetForEachResult = testSetIterationAddedValues(stringKeys, localShimSet, /* useForEach */ true);
assert.equal(shimSetForEachResult, expectedResult, "shimSet-forEach");
localShimSet = new SetShim<string>();
const shimSetIteratorResult = testSetIterationAddedValues(stringKeys, localShimSet, /* useForEach */ false);
assert.equal(shimSetIteratorResult, expectedResult, "shimSet-iterator");
});
it("iterates values in insertion order and handles changes with mixed-type keys", () => {
const expectedResult = "true;3;2;4;false;3;null;undefined;Z;X;Y;";
// First, ensure the test actually has the same behavior as a native Set.
let nativeSet = new Set<any>();
const nativeSetForEachResult = testSetIterationAddedValues(mixedKeys, nativeSet, /* useForEach */ true);
assert.equal(nativeSetForEachResult, expectedResult, "nativeSet-forEach");
nativeSet = new Set<any>();
const nativeSetIteratorResult = testSetIterationAddedValues(mixedKeys, nativeSet, /* useForEach */ false);
assert.equal(nativeSetIteratorResult, expectedResult, "nativeSet-iterator");
// Then, test the set shim.
let localshimSet = new SetShim<any>();
const shimSetForEachResult = testSetIterationAddedValues(mixedKeys, localshimSet, /* useForEach */ true);
assert.equal(shimSetForEachResult, expectedResult, "shimSet-forEach");
localshimSet = new SetShim<any>();
const shimSetIteratorResult = testSetIterationAddedValues(mixedKeys, localshimSet, /* useForEach */ false);
assert.equal(shimSetIteratorResult, expectedResult, "shimSet-iterator");
});
it("create from Array", () => {
const set = new SetShim(["a"]);
assert.equal(set.size, 1);
assert.isTrue(set.has("a"));
});
it("create from set", () => {
const set1 = new SetShim(["a"]);
const set2 = new SetShim(set1);
assert.equal(set1.size, 1);
assert.equal(set2.size, 1);
assert.isTrue(set2.has("a"));
});
it("add when not present", () => {
const set = new SetShim<string>();
const result = set.add("a");
assert.equal(set.size, 1);
assert.strictEqual(result, set);
assert.isTrue(set.has("a"));
});
it("add when present", () => {
const set = new SetShim<string>();
set.add("a");
const result = set.add("a");
assert.equal(set.size, 1);
assert.strictEqual(result, set);
assert.isTrue(set.has("a"));
});
it("has when not present", () => {
const set = new SetShim<string>();
assert.isFalse(set.has("a"));
});
it("has when present", () => {
const set = new SetShim<string>();
set.add("a");
assert.isTrue(set.has("a"));
});
it("delete when not present", () => {
const set = new SetShim<string>();
assert.isFalse(set.delete("a"));
});
it("delete when present", () => {
const set = new SetShim<string>();
set.add("a");
assert.isTrue(set.delete("a"));
});
it("delete twice when present", () => {
const set = new SetShim<string>();
set.add("a");
assert.isTrue(set.delete("a"));
assert.isFalse(set.delete("a"));
});
it("remove only item and iterate", () => {
const set = new SetShim<string>();
set.add("a");
set.delete("a");
const actual = arrayFrom(set.keys());
assert.deepEqual(actual, []);
});
it("remove first item and iterate", () => {
const set = new SetShim<string>();
set.add("a");
set.add("c");
set.delete("a");
assert.deepEqual(arrayFrom(set.keys()), ["c"]);
assert.deepEqual(arrayFrom(set.values()), ["c"]);
assert.deepEqual(arrayFrom(set.entries()), [["c", "c"]]);
});
it("remove last item and iterate", () => {
const set = new SetShim<string>();
set.add("a");
set.add("c");
set.delete("c");
assert.deepEqual(arrayFrom(set.keys()), ["a"]);
assert.deepEqual(arrayFrom(set.values()), ["a"]);
assert.deepEqual(arrayFrom(set.entries()), [["a", "a"]]);
});
it("remove middle item and iterate", () => {
const set = new SetShim<string>();
set.add("a");
set.add("c");
set.add("e");
set.delete("c");
assert.deepEqual(arrayFrom(set.keys()), ["a", "e"]);
assert.deepEqual(arrayFrom(set.values()), ["a", "e"]);
assert.deepEqual(arrayFrom(set.entries()), [["a", "a"], ["e", "e"]]);
});
it("keys", () => {
const set = new SetShim<string>();
set.add("c");
set.add("a");
assert.deepEqual(arrayFrom(set.keys()), ["c", "a"]);
});
it("values", () => {
const set = new SetShim<string>();
set.add("c");
set.add("a");
assert.deepEqual(arrayFrom(set.values()), ["c", "a"]);
});
it("entries", () => {
const set = new SetShim<string>();
set.add("c");
set.add("a");
assert.deepEqual(arrayFrom(set.entries()), [["c", "c"], ["a", "a"]]);
});
it("forEach", () => {
const set = new SetShim<string>();
set.add("c");
set.add("a");
const actual: [string, string][] = [];
set.forEach((value, key) => actual.push([key, value]));
assert.deepEqual(actual, [["c", "c"], ["a", "a"]]);
});
});
} | the_stack |
import {
AlgorithmSuite,
AlgorithmSuiteIdentifier,
WebCryptoEncryption,
WebCryptoHash,
WebCryptoECDHCurve,
AlgorithmSuiteTypeWebCrypto,
MessageFormat,
Commitment,
AesGcm,
AlgBasic,
AlgKdf,
AlgKdfSigned,
AlgCommitted,
AlgCommittedSigned,
} from './algorithm_suites'
import { needs } from './needs'
interface WebCryptoAesGcm extends AesGcm {
encryption: WebCryptoEncryption
}
interface WebCryptoAlgBasic extends AlgBasic {
encryption: WebCryptoEncryption
}
interface WebCryptoAlgKdf extends AlgKdf {
encryption: WebCryptoEncryption
kdfHash: WebCryptoHash
}
interface WebCryptoAlgKdfSigned extends AlgKdfSigned {
encryption: WebCryptoEncryption
kdfHash: WebCryptoHash
signatureCurve: WebCryptoECDHCurve
signatureHash: WebCryptoHash
}
interface WebCryptoAlgCommitted extends AlgCommitted {
encryption: WebCryptoEncryption
kdfHash: WebCryptoHash
commitmentHash: WebCryptoHash
}
interface WebCryptoAlgCommittedSigned extends AlgCommittedSigned {
encryption: WebCryptoEncryption
kdfHash: WebCryptoHash
signatureCurve: WebCryptoECDHCurve
signatureHash: WebCryptoHash
}
type WebCryptoAlgUnion = Readonly<
| WebCryptoAlgBasic
| WebCryptoAlgKdf
| WebCryptoAlgKdfSigned
| WebCryptoAlgCommitted
| WebCryptoAlgCommittedSigned
>
type WebCryptoAlgorithmSuiteValues = WebCryptoAesGcm &
Partial<Omit<AlgBasic, keyof WebCryptoAesGcm>> &
Partial<Omit<AlgKdf, keyof WebCryptoAesGcm>> &
Partial<Omit<AlgKdfSigned, keyof WebCryptoAesGcm>> &
Partial<Omit<AlgCommitted, keyof WebCryptoAesGcm>> &
Partial<Omit<AlgCommittedSigned, keyof WebCryptoAesGcm>>
/* References to https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/algorithms-reference.html
* These are the composed parameters for each algorithm suite specification for
* for the WebCrypto environment.
*/
const webCryptoAlgAes128GcmIv12Tag16: WebCryptoAlgBasic = {
id: AlgorithmSuiteIdentifier.ALG_AES128_GCM_IV12_TAG16,
messageFormat: MessageFormat.V1,
encryption: 'AES-GCM',
keyLength: 128,
ivLength: 12,
tagLength: 128,
cacheSafe: false,
commitment: 'NONE',
}
/* Web browsers do not support 192 bit key lengths at this time. */
const webCryptoAlgAes192GcmIv12Tag16: WebCryptoAlgBasic = {
id: AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16,
messageFormat: MessageFormat.V1,
encryption: 'AES-GCM',
keyLength: 192,
ivLength: 12,
tagLength: 128,
cacheSafe: false,
commitment: 'NONE',
}
const webCryptoAlgAes256GcmIv12Tag16: WebCryptoAlgBasic = {
id: AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16,
messageFormat: MessageFormat.V1,
encryption: 'AES-GCM',
keyLength: 256,
ivLength: 12,
tagLength: 128,
cacheSafe: false,
commitment: 'NONE',
}
const webCryptoAlgAes128GcmIv12Tag16HkdfSha256: WebCryptoAlgKdf = {
id: AlgorithmSuiteIdentifier.ALG_AES128_GCM_IV12_TAG16_HKDF_SHA256,
messageFormat: MessageFormat.V1,
encryption: 'AES-GCM',
keyLength: 128,
ivLength: 12,
tagLength: 128,
kdf: 'HKDF',
kdfHash: 'SHA-256',
cacheSafe: true,
commitment: 'NONE',
}
/* Web browsers do not support 192 bit key lengths at this time. */
const webCryptoAlgAes192GcmIv12Tag16HkdfSha256: WebCryptoAlgKdf = {
id: AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16_HKDF_SHA256,
messageFormat: MessageFormat.V1,
encryption: 'AES-GCM',
keyLength: 192,
ivLength: 12,
tagLength: 128,
kdf: 'HKDF',
kdfHash: 'SHA-256',
cacheSafe: true,
commitment: 'NONE',
}
const webCryptoAlgAes256GcmIv12Tag16HkdfSha256: WebCryptoAlgKdf = {
id: AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA256,
messageFormat: MessageFormat.V1,
encryption: 'AES-GCM',
keyLength: 256,
ivLength: 12,
tagLength: 128,
kdf: 'HKDF',
kdfHash: 'SHA-256',
cacheSafe: true,
commitment: 'NONE',
}
const webCryptoAlgAes128GcmIv12Tag16HkdfSha256EcdsaP256: WebCryptoAlgKdfSigned =
{
id: AlgorithmSuiteIdentifier.ALG_AES128_GCM_IV12_TAG16_HKDF_SHA256_ECDSA_P256,
messageFormat: MessageFormat.V1,
encryption: 'AES-GCM',
keyLength: 128,
ivLength: 12,
tagLength: 128,
kdf: 'HKDF',
kdfHash: 'SHA-256',
cacheSafe: true,
signatureCurve: 'P-256',
signatureHash: 'SHA-256',
commitment: 'NONE',
}
/* Web browsers do not support 192 bit key lengths at this time. */
const webCryptoAlgAes192GcmIv12Tag16HkdfSha384EcdsaP384: WebCryptoAlgKdfSigned =
{
id: AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384,
messageFormat: MessageFormat.V1,
encryption: 'AES-GCM',
keyLength: 192,
ivLength: 12,
tagLength: 128,
kdf: 'HKDF',
kdfHash: 'SHA-384',
cacheSafe: true,
signatureCurve: 'P-384',
signatureHash: 'SHA-384',
commitment: 'NONE',
}
const webCryptoAlgAes256GcmIv12Tag16HkdfSha384EcdsaP384: WebCryptoAlgKdfSigned =
{
id: AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384,
messageFormat: MessageFormat.V1,
encryption: 'AES-GCM',
keyLength: 256,
ivLength: 12,
tagLength: 128,
kdf: 'HKDF',
kdfHash: 'SHA-384',
cacheSafe: true,
signatureCurve: 'P-384',
signatureHash: 'SHA-384',
commitment: 'NONE',
}
const webCryptoAlgAes256GcmHkdfSha512Committing: WebCryptoAlgCommitted = {
id: AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA512_COMMIT_KEY,
messageFormat: MessageFormat.V2,
encryption: 'AES-GCM',
keyLength: 256,
ivLength: 12,
tagLength: 128,
kdf: 'HKDF',
kdfHash: 'SHA-512',
cacheSafe: true,
commitment: 'KEY',
commitmentHash: 'SHA-512',
suiteDataLength: 32,
commitmentLength: 256,
saltLengthBytes: 32,
}
const webCryptoAlgAes256GcmHkdfSha512CommittingEcdsaP384: WebCryptoAlgCommittedSigned =
{
id: AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA512_COMMIT_KEY_ECDSA_P384,
messageFormat: MessageFormat.V2,
encryption: 'AES-GCM',
keyLength: 256,
ivLength: 12,
tagLength: 128,
kdf: 'HKDF',
kdfHash: 'SHA-512',
cacheSafe: true,
signatureCurve: 'P-384',
signatureHash: 'SHA-384',
commitment: 'KEY',
commitmentHash: 'SHA-512',
suiteDataLength: 32,
commitmentLength: 256,
saltLengthBytes: 32,
}
type WebCryptoAlgorithms = Readonly<{
[id in AlgorithmSuiteIdentifier]: WebCryptoAlgUnion
}>
const webCryptoAlgorithms: WebCryptoAlgorithms = Object.freeze({
[AlgorithmSuiteIdentifier.ALG_AES128_GCM_IV12_TAG16]: Object.freeze(
webCryptoAlgAes128GcmIv12Tag16
),
[AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16]: Object.freeze(
webCryptoAlgAes192GcmIv12Tag16
),
[AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16]: Object.freeze(
webCryptoAlgAes256GcmIv12Tag16
),
[AlgorithmSuiteIdentifier.ALG_AES128_GCM_IV12_TAG16_HKDF_SHA256]:
Object.freeze(webCryptoAlgAes128GcmIv12Tag16HkdfSha256),
[AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16_HKDF_SHA256]:
Object.freeze(webCryptoAlgAes192GcmIv12Tag16HkdfSha256),
[AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA256]:
Object.freeze(webCryptoAlgAes256GcmIv12Tag16HkdfSha256),
[AlgorithmSuiteIdentifier.ALG_AES128_GCM_IV12_TAG16_HKDF_SHA256_ECDSA_P256]:
Object.freeze(webCryptoAlgAes128GcmIv12Tag16HkdfSha256EcdsaP256),
[AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384]:
Object.freeze(webCryptoAlgAes192GcmIv12Tag16HkdfSha384EcdsaP384),
[AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384]:
Object.freeze(webCryptoAlgAes256GcmIv12Tag16HkdfSha384EcdsaP384),
[AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA512_COMMIT_KEY]:
Object.freeze(webCryptoAlgAes256GcmHkdfSha512Committing),
[AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA512_COMMIT_KEY_ECDSA_P384]:
Object.freeze(webCryptoAlgAes256GcmHkdfSha512CommittingEcdsaP384),
})
/* Web browsers do not support 192 bit key lengths at this time.
* To maintain type compatibility and TypeScript happiness between Algorithm Suites
* I need to have the same list of AlgorithmSuiteIdentifier.
* This list is maintained here to make sure that the error message is helpful.
*/
type WebCryptoAlgorithmSuiteIdentifier = Exclude<
Exclude<
Exclude<
AlgorithmSuiteIdentifier,
AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16
>,
AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16_HKDF_SHA256
>,
AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384
>
type SupportedWebCryptoAlgorithms = Readonly<{
[id in WebCryptoAlgorithmSuiteIdentifier]: WebCryptoAlgUnion
}>
const supportedWebCryptoAlgorithms: SupportedWebCryptoAlgorithms =
Object.freeze({
[AlgorithmSuiteIdentifier.ALG_AES128_GCM_IV12_TAG16]: Object.freeze(
webCryptoAlgAes128GcmIv12Tag16
),
// [AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16]: Object.freeze(webCryptoAlgAes192GcmIv12Tag16),
[AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16]: Object.freeze(
webCryptoAlgAes256GcmIv12Tag16
),
[AlgorithmSuiteIdentifier.ALG_AES128_GCM_IV12_TAG16_HKDF_SHA256]:
Object.freeze(webCryptoAlgAes128GcmIv12Tag16HkdfSha256),
// [AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16_HKDF_SHA256]: Object.freeze(webCryptoAlgAes192GcmIv12Tag16HkdfSha256),
[AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA256]:
Object.freeze(webCryptoAlgAes256GcmIv12Tag16HkdfSha256),
[AlgorithmSuiteIdentifier.ALG_AES128_GCM_IV12_TAG16_HKDF_SHA256_ECDSA_P256]:
Object.freeze(webCryptoAlgAes128GcmIv12Tag16HkdfSha256EcdsaP256),
// [AlgorithmSuiteIdentifier.ALG_AES192_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384]: Object.freeze(webCryptoAlgAes192GcmIv12Tag16HkdfSha384EcdsaP384),
[AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384]:
Object.freeze(webCryptoAlgAes256GcmIv12Tag16HkdfSha384EcdsaP384),
[AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA512_COMMIT_KEY]:
Object.freeze(webCryptoAlgAes256GcmHkdfSha512Committing),
[AlgorithmSuiteIdentifier.ALG_AES256_GCM_IV12_TAG16_HKDF_SHA512_COMMIT_KEY_ECDSA_P384]:
Object.freeze(webCryptoAlgAes256GcmHkdfSha512CommittingEcdsaP384),
})
export class WebCryptoAlgorithmSuite
extends AlgorithmSuite
implements WebCryptoAlgorithmSuiteValues
{
declare messageFormat: MessageFormat
declare encryption: WebCryptoEncryption
declare commitment: Commitment
declare kdfHash?: WebCryptoHash
declare signatureCurve?: WebCryptoECDHCurve
declare signatureHash?: WebCryptoHash
type: AlgorithmSuiteTypeWebCrypto = 'webCrypto'
declare commitmentHash?: WebCryptoHash
constructor(id: AlgorithmSuiteIdentifier) {
super(webCryptoAlgorithms[id])
/* Precondition: Browsers do not support 192 bit keys so the AlgorithmSuiteIdentifier is removed.
* This is primarily an error in decrypt but this make it clear.
* The error can manifest deep in the decrypt loop making it hard to debug.
*/
needs(
Object.prototype.hasOwnProperty.call(supportedWebCryptoAlgorithms, id),
'192-bit AES keys are not supported'
)
Object.setPrototypeOf(this, WebCryptoAlgorithmSuite.prototype)
Object.freeze(this)
}
}
Object.freeze(WebCryptoAlgorithmSuite.prototype)
Object.freeze(WebCryptoAlgorithmSuite) | the_stack |
namespace evaluator {
declare let Symbol: SymbolConstructor;
const sourceFile = vpath.combine(vfs.srcFolder, "source.ts");
const sourceFileJs = vpath.combine(vfs.srcFolder, "source.js");
// Define a custom "Symbol" constructor to attach missing built-in symbols without
// modifying the global "Symbol" constructor
const FakeSymbol: SymbolConstructor = ((description?: string) => Symbol(description)) as any;
(FakeSymbol as any).prototype = Symbol.prototype;
for (const key of Object.getOwnPropertyNames(Symbol)) {
Object.defineProperty(FakeSymbol, key, Object.getOwnPropertyDescriptor(Symbol, key)!);
}
// Add "asyncIterator" if missing
if (!ts.hasProperty(FakeSymbol, "asyncIterator")) Object.defineProperty(FakeSymbol, "asyncIterator", { value: Symbol.for("Symbol.asyncIterator"), configurable: true });
export function evaluateTypeScript(source: string | { files: vfs.FileSet, rootFiles: string[], main: string }, options?: ts.CompilerOptions, globals?: Record<string, any>) {
if (typeof source === "string") source = { files: { [sourceFile]: source }, rootFiles: [sourceFile], main: sourceFile };
const fs = vfs.createFromFileSystem(Harness.IO, /*ignoreCase*/ false, { files: source.files });
const compilerOptions: ts.CompilerOptions = {
target: ts.ScriptTarget.ES5,
module: ts.ModuleKind.CommonJS,
lib: ["lib.esnext.d.ts", "lib.dom.d.ts"],
...options
};
const host = new fakes.CompilerHost(fs, compilerOptions);
const result = compiler.compileFiles(host, source.rootFiles, compilerOptions);
if (ts.some(result.diagnostics)) {
assert.ok(/*value*/ false, "Syntax error in evaluation source text:\n" + ts.formatDiagnostics(result.diagnostics, {
getCanonicalFileName: file => file,
getCurrentDirectory: () => "",
getNewLine: () => "\n"
}));
}
const output = result.getOutput(source.main, "js")!;
assert.isDefined(output);
globals = { Symbol: FakeSymbol, ...globals };
const loader = getLoader(compilerOptions, fs, globals);
return loader.import(output.file);
}
export function evaluateJavaScript(sourceText: string, globals?: Record<string, any>, sourceFile = sourceFileJs) {
globals = { Symbol: FakeSymbol, ...globals };
const fs = new vfs.FileSystem(/*ignoreCase*/ false, { files: { [sourceFile]: sourceText } });
return new CommonJsLoader(fs, globals).import(sourceFile);
}
function getLoader(compilerOptions: ts.CompilerOptions, fs: vfs.FileSystem, globals: Record<string, any>): Loader<unknown> {
const moduleKind = ts.getEmitModuleKind(compilerOptions);
switch (moduleKind) {
case ts.ModuleKind.UMD:
case ts.ModuleKind.CommonJS:
return new CommonJsLoader(fs, globals);
case ts.ModuleKind.System:
return new SystemLoader(fs, globals);
case ts.ModuleKind.AMD:
case ts.ModuleKind.None:
default:
throw new Error(`ModuleKind '${ts.ModuleKind[moduleKind]}' not supported by evaluator.`);
}
}
abstract class Loader<TModule> {
protected readonly fs: vfs.FileSystem;
protected readonly globals: Record<string, any>;
private moduleCache = new ts.Map<string, TModule>();
constructor(fs: vfs.FileSystem, globals: Record<string, any>) {
this.fs = fs;
this.globals = globals;
}
protected isFile(file: string) {
return this.fs.existsSync(file) && this.fs.statSync(file).isFile();
}
protected abstract evaluate(text: string, file: string, module: TModule): void;
protected abstract createModule(file: string): TModule;
protected abstract getExports(module: TModule): any;
protected load(file: string): TModule {
if (!ts.isExternalModuleNameRelative(file)) throw new Error(`Module '${file}' could not be found.`);
let module = this.moduleCache.get(file);
if (module) return module;
this.moduleCache.set(file, module = this.createModule(file));
try {
const sourceText = this.fs.readFileSync(file, "utf8");
this.evaluate(sourceText, file, module);
return module;
}
catch (e) {
this.moduleCache.delete(file);
throw e;
}
}
protected resolve(id: string, base: string) {
return vpath.resolve(base, id);
}
import(id: string, base = this.fs.cwd()) {
if (!ts.isExternalModuleNameRelative(id)) throw new Error(`Module '${id}' could not be found.`);
const file = this.resolve(id, base);
const module = this.load(file);
if (!module) throw new Error(`Module '${id}' could not be found.`);
return this.getExports(module);
}
}
interface CommonJSModule {
exports: any;
}
class CommonJsLoader extends Loader<CommonJSModule> {
private resolveAsFile(file: string) {
if (this.isFile(file)) return file;
if (this.isFile(file + ".js")) return file + ".js";
return undefined;
}
private resolveIndex(dir: string) {
const indexFile = vpath.resolve(dir, "index.js");
if (this.isFile(indexFile)) return indexFile;
return undefined;
}
private resolveAsDirectory(dir: string) {
const packageFile = vpath.resolve(dir, "package.json");
if (this.isFile(packageFile)) {
const text = this.fs.readFileSync(packageFile, "utf8");
const json = JSON.parse(text);
if (json.main) {
const main = vpath.resolve(dir, json.main);
const result = this.resolveAsFile(main) || this.resolveIndex(main);
if (result === undefined) throw new Error("Module not found");
}
}
return this.resolveIndex(dir);
}
protected resolve(id: string, base: string) {
const file = vpath.resolve(base, id);
const resolved = this.resolveAsFile(file) || this.resolveAsDirectory(file);
if (!resolved) throw new Error(`Module '${id}' could not be found.`);
return resolved;
}
protected createModule(): CommonJSModule {
return { exports: {} };
}
protected getExports(module: CommonJSModule) {
return module.exports;
}
protected evaluate(text: string, file: string, module: CommonJSModule): void {
const globalNames: string[] = [];
const globalArgs: any[] = [];
for (const name in this.globals) {
if (ts.hasProperty(this.globals, name)) {
globalNames.push(name);
globalArgs.push(this.globals[name]);
}
}
const base = vpath.dirname(file);
const localRequire = (id: string) => this.import(id, base);
const evaluateText = `(function (module, exports, require, __dirname, __filename, ${globalNames.join(", ")}) { ${text} })`;
// eslint-disable-next-line no-eval
const evaluateThunk = (void 0, eval)(evaluateText) as (module: any, exports: any, require: (id: string) => any, dirname: string, filename: string, ...globalArgs: any[]) => void;
evaluateThunk.call(this.globals, module, module.exports, localRequire, vpath.dirname(file), file, ...globalArgs);
}
}
interface SystemModule {
file: string;
exports: any;
hasExports: boolean;
state: SystemModuleState;
dependencies: SystemModule[];
dependers: SystemModule[];
setters: SystemModuleDependencySetter[];
requestedDependencies?: string[];
declaration?: SystemModuleDeclaration;
hasError?: boolean;
error?: any;
}
const enum SystemModuleState {
// Instantiation phases:
Uninstantiated,
Instantiated,
// Linker phases:
AddingDependencies,
AllDependenciesAdded,
AllDependenciesInstantiated,
WiringSetters,
Linked,
// Evaluation phases:
Evaluating,
Ready,
}
interface SystemModuleExporter {
<T>(name: string, value: T): T;
<T extends object>(values: T): T;
}
interface SystemModuleContext {
import: (id: string) => Promise<any>;
meta: any;
}
type SystemModuleRegisterCallback = (exporter: SystemModuleExporter, context: SystemModuleContext) => SystemModuleDeclaration;
type SystemModuleDependencySetter = (dependency: any) => void;
interface SystemModuleDeclaration {
setters: SystemModuleDependencySetter[];
execute: () => void;
}
interface SystemGlobal {
register(dependencies: string[], declare: SystemModuleRegisterCallback): void;
}
class SystemLoader extends Loader<SystemModule> {
protected createModule(file: string): SystemModule {
return {
file,
// eslint-disable-next-line no-null/no-null
exports: Object.create(/*o*/ null),
dependencies: [],
dependers: [],
setters: [],
hasExports: false,
state: SystemModuleState.Uninstantiated
};
}
protected getExports(module: SystemModule) {
if (module.state < SystemModuleState.Ready) {
this.resetDependers(module, []);
this.evaluateModule(module, []);
if (module.state < SystemModuleState.Ready) {
const error = new Error("Module graph could not be loaded");
this.handleError(module, error);
throw error;
}
}
if (module.hasError) {
throw module.error;
}
return module.exports;
}
private handleError(module: SystemModule, error: any) {
if (!module.hasError) {
module.hasError = true;
module.error = error;
module.state = SystemModuleState.Ready;
}
}
protected evaluate(text: string, _file: string, module: SystemModule): void {
const globalNames: string[] = [];
const globalArgs: any[] = [];
for (const name in this.globals) {
if (ts.hasProperty(this.globals, name)) {
globalNames.push(name);
globalArgs.push(this.globals[name]);
}
}
const localSystem: SystemGlobal = {
register: (dependencies, declare) => this.instantiateModule(module, dependencies, declare)
};
const evaluateText = `(function (System, ${globalNames.join(", ")}) { ${text} })`;
try {
// eslint-disable-next-line no-eval
const evaluateThunk = (void 0, eval)(evaluateText) as (System: any, ...globalArgs: any[]) => void;
evaluateThunk.call(this.globals, localSystem, ...globalArgs);
}
catch (e) {
this.handleError(module, e);
throw e;
}
}
private instantiateModule(module: SystemModule, dependencies: string[], registration?: SystemModuleRegisterCallback) {
function exporter<T>(name: string, value: T): T;
function exporter<T>(value: T): T;
function exporter<T>(...args: [string, T] | [T]) {
module.hasExports = true;
const name = args.length === 1 ? undefined : args[0];
const value = args.length === 1 ? args[0] : args[1];
if (name !== undefined) {
module.exports[name] = value;
}
else {
for (const name in value) {
module.exports[name] = value[name];
}
}
for (const setter of module.setters) {
setter(module.exports);
}
return value;
}
const context: SystemModuleContext = {
import: (_id) => { throw new Error("Dynamic import not implemented."); },
meta: {
url: ts.isUrl(module.file) ? module.file : `file:///${ts.normalizeSlashes(module.file).replace(/^\//, "").split("/").map(encodeURIComponent).join("/")}`
}
};
module.requestedDependencies = dependencies;
try {
module.declaration = registration?.(exporter, context);
module.state = SystemModuleState.Instantiated;
for (const depender of module.dependers) {
this.linkModule(depender);
}
this.linkModule(module);
}
catch (e) {
this.handleError(module, e);
throw e;
}
}
private linkModule(module: SystemModule) {
try {
for (;;) {
switch (module.state) {
case SystemModuleState.Uninstantiated: {
throw new Error("Module not yet instantiated");
}
case SystemModuleState.Instantiated: {
// Module has been instantiated, start requesting dependencies.
// Set state so that re-entry while adding dependencies does nothing.
module.state = SystemModuleState.AddingDependencies;
const base = vpath.dirname(module.file);
const dependencies = module.requestedDependencies || [];
for (const dependencyId of dependencies) {
const dependency = this.load(this.resolve(dependencyId, base));
module.dependencies.push(dependency);
dependency.dependers.push(module);
}
// All dependencies have been added, switch state
// to check whether all dependencies are instantiated
module.state = SystemModuleState.AllDependenciesAdded;
continue;
}
case SystemModuleState.AddingDependencies: {
// in the middle of adding dependencies for this module, do nothing
return;
}
case SystemModuleState.AllDependenciesAdded: {
// all dependencies have been added, advance state if all dependencies are instantiated.
for (const dependency of module.dependencies) {
if (dependency.state === SystemModuleState.Uninstantiated) {
return;
}
}
// indicate all dependencies are instantiated for this module.
module.state = SystemModuleState.AllDependenciesInstantiated;
// trigger links for dependers of this module.
for (const depender of module.dependers) {
this.linkModule(depender);
}
continue;
}
case SystemModuleState.AllDependenciesInstantiated: {
// all dependencies have been instantiated, start wiring setters
module.state = SystemModuleState.WiringSetters;
for (let i = 0; i < module.dependencies.length; i++) {
const dependency = module.dependencies[i];
const setter = module.declaration?.setters[i];
if (setter) {
dependency.setters.push(setter);
if (dependency.hasExports || dependency.state === SystemModuleState.Ready) {
// wire hoisted exports or ready dependencies.
setter(dependency.exports);
}
}
}
module.state = SystemModuleState.Linked;
// ensure graph is fully linked
for (const depender of module.dependers) {
this.linkModule(depender);
}
continue;
}
case SystemModuleState.WiringSetters: // in the middle of wiring setters for this module, nothing to do
case SystemModuleState.Linked: // module has already been linked, nothing to do
case SystemModuleState.Evaluating: // module is currently evaluating, nothing to do
case SystemModuleState.Ready: // module is done evaluating, nothing to do
return;
}
}
}
catch (e) {
this.handleError(module, e);
throw e;
}
}
private resetDependers(module: SystemModule, stack: SystemModule[]) {
if (stack.lastIndexOf(module) !== -1) {
return;
}
stack.push(module);
module.dependers.length = 0;
for (const dependency of module.dependencies) {
this.resetDependers(dependency, stack);
}
stack.pop();
}
private evaluateModule(module: SystemModule, stack: SystemModule[]) {
if (module.state < SystemModuleState.Linked) throw new Error("Invalid state for evaluation.");
if (module.state !== SystemModuleState.Linked) return;
if (stack.lastIndexOf(module) !== -1) {
// we are already evaluating this module
return;
}
stack.push(module);
module.state = SystemModuleState.Evaluating;
try {
for (const dependency of module.dependencies) {
this.evaluateModule(dependency, stack);
}
module.declaration?.execute?.();
module.state = SystemModuleState.Ready;
}
catch (e) {
this.handleError(module, e);
throw e;
}
}
}
} | the_stack |
import * as chai from "chai";
import { Configuration, SourceFile, createProject } from "../src/index";
import { JSDOM } from "jsdom";
import { buildSimpleIFrameDom, getEditorIFrame, buildMultiIFrameDom, getEditorIFrames, buildDoubleIFrameDom } from "./domUtilities";
import * as chaiAsPromised from "chai-as-promised";
import { registerForRegionFromFile, registerForRequestIdGeneration, registerForEditorMessages, notifyEditorReady, EditorState, trackSetWorkspaceRequests, raiseTextChange } from "./messagingMocks";
import { wait } from "./wait";
import { createReadySession, createReadySessionWithMultipleEditors } from "./sessionFactory";
import { ApiMessage, SET_WORKSPACE_REQUEST } from "../src/internals/apiMessages";
import { IWorkspace } from "../src/internals/workspace";
import { expect } from "chai";
chai.use(chaiAsPromised);
chai.should();
describe("A user", () => {
let configuration: Configuration;
let dom: JSDOM;
let editorIFrame: HTMLIFrameElement;
beforeEach(() => {
configuration = { hostOrigin: "https://docs.microsoft.com" };
dom = buildSimpleIFrameDom(configuration);
editorIFrame = getEditorIFrame(dom);
});
describe("with a trydotnet session", () => {
it("can open a document", async () => {
let session = await createReadySession(configuration, editorIFrame, dom.window);
let project = await createProject({ packageName: "console", files: [{ name: "program.cs", content: "file content" }] });
await session.openProject(project);
let document = await session.openDocument({ fileName: "program.cs" });
document.id().should.equal("program.cs");
document.getContent().should.equal("file content");
});
it("creates a empty document when the project does not have a matching file", async () => {
let session = await createReadySession(configuration, editorIFrame, dom.window);
let project = await createProject({ packageName: "console", files: [{ name: "program.cs", content: "file content" }] });
await session.openProject(project);
let document = await session.openDocument({ fileName: "program_two.cs" });
expect(document).to.not.be.null;
document.id().should.equal("program_two.cs");
document.getContent().should.equal("");
});
it("creates a empty document when using region and the project does not have a matching file", async () => {
let session = await createReadySession(configuration, editorIFrame, dom.window);
let project = await createProject({ packageName: "console", files: [{ name: "program.cs", content: "file content" }] });
await session.openProject(project);
let document = await session.openDocument({ fileName: "program_two.cs", region: "controller" });
expect(document).to.not.be.null;
document.id().should.equal("program_two.cs@controller");
document.getContent().should.equal("");
});
it("can open a document with region as identifier", async () => {
let session = await createReadySession(configuration, editorIFrame, dom.window);
let project = await createProject({ packageName: "console", files: [{ name: "program.cs", content: "//pre\n#region controller\n//content\n e#endregion\n//post/n" }] });
await session.openProject(project);
registerForRequestIdGeneration(configuration, editorIFrame, dom.window, (_rid) => "TestRun");
registerForRegionFromFile(configuration, editorIFrame, dom.window, (files: SourceFile[]) => {
if (files) {
return [{ id: "program.cs@controller", content: "//content" }]
}
return null;
})
let document = await session.openDocument({ fileName: "program.cs", region: "controller" });
document.id().should.equal("program.cs@controller");
document.getContent().should.equal("//content");
});
it("can open a document and bind it immediately to an editor", async () => {
let editorState = { content: "", documentId: "" };
let session = await createReadySession(configuration, editorIFrame, dom.window);
let defaultEditor = session.getTextEditor();
let project = await createProject({ packageName: "console", files: [{ name: "program.cs", content: "file content" }] });
registerForRequestIdGeneration(configuration, editorIFrame, dom.window, _r => "TestRun0");
await session.openProject(project);
registerForEditorMessages(configuration, editorIFrame, dom.window, editorState);
await session.openDocument({ fileName: "program.cs", editorId: defaultEditor.id() });
await wait(1000);
editorState.content.should.equal("file content");
editorState.documentId.should.equal("program.cs");
});
it("can return the open documents", async() => {
let session = await createReadySession(configuration, editorIFrame, dom.window);
let defaultEditor = session.getTextEditor();
let project = await createProject({ packageName: "console", files: [{ name: "program.cs", content: "file content" }]});
await session.openProject(project);
await session.openDocument({ fileName: "program.cs", editorId: defaultEditor.id(), content:"i am a document" });
let documents = session.getOpenDocuments();
documents.should.have.length(1);
documents[0].getContent().should.equal("i am a document");
});
describe("and with a document", () => {
it("can set the content and affect editor", async () => {
let editorState = { content: "", documentId: "" };
let session = await createReadySession(configuration, editorIFrame, dom.window);
let defaultEditor = session.getTextEditor();
let project = await createProject({ packageName: "console", files: [{ name: "program.cs", content: "file content" }] });
await session.openProject(project);
registerForRequestIdGeneration(configuration, editorIFrame, dom.window, _r => "TestRun1");
await session.openProject(project);
let document = await session.openDocument({ fileName: "program.cs", editorId: defaultEditor.id() });
registerForEditorMessages(configuration, editorIFrame, dom.window, editorState);
await document.setContent("new content");
document.getContent().should.equal("new content");
await wait(1000);
editorState.content.should.equal("new content");
});
it("can track editor changes", async () => {
let editorState = { content: "", documentId: "" };
let session = await createReadySession(configuration, editorIFrame, dom.window);
let defaultEditor = session.getTextEditor();
let project = await createProject({ packageName: "console", files: [{ name: "program.cs", content: "file content" }] });
await session.openProject(project);
registerForRequestIdGeneration(configuration, editorIFrame, dom.window, _r => "TestRun2");
registerForEditorMessages(configuration, editorIFrame, dom.window, editorState);
let document = await session.openDocument({ fileName: "program.cs", editorId: defaultEditor.id() });
let editor = session.getTextEditor();
await editor.setContent("new editor content");
await wait(1000);
document.getContent().should.equal("new editor content");
});
it("can track editor changes with multiple editors", async () => {
dom = buildMultiIFrameDom(configuration);
let editorIFrames = getEditorIFrames(dom);
let editorStates: { [key: string]: EditorState } = {};
let session = await createReadySessionWithMultipleEditors(configuration, editorIFrames, dom.window);
let project = await createProject(
{
packageName: "console", files: [
{ name: "program.cs", content: "the program" },
{ name: "otherFile.cs", content: "other file content" }
]
});
await session.openProject(project);
for (let iframe of editorIFrames) {
editorStates[iframe.dataset.trydotnetEditorId] = { content: "", documentId: "" };
registerForRequestIdGeneration(configuration, iframe, dom.window, _r => "TestRun2");
registerForEditorMessages(configuration, iframe, dom.window, editorStates[iframe.dataset.trydotnetEditorId]);
}
let editorIds = Object.getOwnPropertyNames(editorStates);
let lastIndex = editorIds.length - 1;
let programDocument = await session.openDocument({ editorId: editorIds[lastIndex], fileName: "program.cs" });
let otherFileDocument = await session.openDocument({ editorId: editorIds[0], fileName: "otherFile.cs" });
raiseTextChange(configuration, dom.window, "new editor content", programDocument.id());
raiseTextChange(configuration, dom.window, "new content in the other file!", otherFileDocument.id());
await wait(1000);
programDocument.getContent().should.equal("new editor content");
otherFileDocument.getContent().should.equal("new content in the other file!");
});
it("can dispatch editor change messages with multiple editors", async () => {
dom = buildDoubleIFrameDom(configuration);
let editorIFrames = getEditorIFrames(dom);
let editorMessageStacks: { [key: string]: ApiMessage[] } = {};
let session = await createReadySessionWithMultipleEditors(configuration, editorIFrames, dom.window);
let project = await createProject(
{
packageName: "console", files: [
{ name: "program.cs", content: "the program" },
{ name: "otherFile.cs", content: "other file content" }
]
});
await session.openProject(project);
for (let iframe of editorIFrames) {
editorMessageStacks[iframe.dataset.trydotnetEditorId] = [];
registerForRequestIdGeneration(configuration, iframe, dom.window, _r => "TestRun2");
trackSetWorkspaceRequests(configuration, iframe, dom.window, editorMessageStacks[iframe.dataset.trydotnetEditorId]);
}
let editorIds = Object.getOwnPropertyNames(editorMessageStacks);
let lastIndex = editorIds.length - 1;
let programDocument = await session.openDocument({ editorId: editorIds[lastIndex], fileName: "program.cs" });
let otherFileDocument = await session.openDocument({ editorId: editorIds[0], fileName: "otherFile.cs" });
raiseTextChange(configuration, dom.window, "new content in program!", programDocument.id());
raiseTextChange(configuration, dom.window, "new content in the other file!", otherFileDocument.id());
await wait(1100);
let programEditorMessages = editorMessageStacks[editorIds[lastIndex]];
let otherFileEditorMessages = editorMessageStacks[editorIds[0]];
programEditorMessages.length.should.be.greaterThan(0);
otherFileEditorMessages.length.should.be.greaterThan(0);
let lastProgramEditorMessage = <{
type: typeof SET_WORKSPACE_REQUEST,
workspace: any,
bufferId: string,
requestId: string
}>programEditorMessages[programEditorMessages.length - 1];
let lastOtherFileEditorMessage = <{
type: typeof SET_WORKSPACE_REQUEST,
workspace: any,
bufferId: string,
requestId: string
}>otherFileEditorMessages[otherFileEditorMessages.length - 1];
lastProgramEditorMessage.type.should.equal(SET_WORKSPACE_REQUEST);
lastProgramEditorMessage.type.should.equal(lastOtherFileEditorMessage.type);
lastProgramEditorMessage.workspace.should.deep.equal(lastOtherFileEditorMessage.workspace);
lastProgramEditorMessage.bufferId.should.be.equal(programDocument.id());
lastOtherFileEditorMessage.bufferId.should.be.equal(otherFileDocument.id());
lastProgramEditorMessage.workspace.buffers.should.deep.equal(lastOtherFileEditorMessage.workspace.buffers);
(<IWorkspace>(lastProgramEditorMessage.workspace)).buffers.find(b => b.id === "program.cs").content.should.be.equal("new content in program!");
(<IWorkspace>(lastProgramEditorMessage.workspace)).buffers.find(b => b.id === "otherFile.cs").content.should.be.equal("new content in the other file!");
});
});
});
}); | the_stack |
import '@material/mwc-button';
import '@material/mwc-icon';
import { deepEqual } from 'fast-equals';
import { css, customElement, html } from 'lit-element';
import { classMap } from 'lit-html/directives/class-map';
import { repeat } from 'lit-html/directives/repeat';
import { styleMap } from 'lit-html/directives/style-map';
import { computed, observable, reaction } from 'mobx';
import '../dot_spinner';
import './tvt_column_header';
import './test_variant_entry';
import { AppState, consumeAppState } from '../../context/app_state';
import { consumeInvocationState, InvocationState } from '../../context/invocation_state';
import { consumeConfigsStore, UserConfigsStore } from '../../context/user_configs';
import { VARIANT_STATUS_CLASS_MAP } from '../../libs/constants';
import { consumer } from '../../libs/context';
import { reportErrorAsync } from '../../libs/error_handler';
import { getPropKeyLabel, TestVariant, TestVariantStatus } from '../../services/resultdb';
import colorClasses from '../../styles/color_classes.css';
import commonStyle from '../../styles/common_style.css';
import { MiloBaseElement } from '../milo_base';
import { TestVariantEntryElement } from './test_variant_entry';
/**
* Displays test variants in a table.
*/
@customElement('milo-test-variants-table')
@consumer
export class TestVariantsTableElement extends MiloBaseElement {
@observable.ref @consumeAppState() appState!: AppState;
@observable.ref @consumeConfigsStore() configsStore!: UserConfigsStore;
@observable.ref @consumeInvocationState() invocationState!: InvocationState;
@computed private get hasCustomGroupingKey() {
return !deepEqual(this.invocationState.groupingKeys, ['status']);
}
toggleAllVariants(expand: boolean) {
this.shadowRoot!.querySelectorAll<TestVariantEntryElement>('milo-test-variant-entry').forEach(
(e) => (e.expanded = expand)
);
}
connectedCallback() {
super.connectedCallback();
// When a new test loader is received, load the first page.
this.addDisposer(
reaction(
() => this.invocationState.testLoader,
(testLoader) => reportErrorAsync(this, async () => testLoader?.loadFirstPageOfTestVariants())(),
{ fireImmediately: true }
)
);
}
private loadMore = reportErrorAsync(this, async () => this.invocationState.testLoader?.loadNextTestVariants());
private renderAllVariants() {
const testLoader = this.invocationState.testLoader;
const groupers = this.invocationState.groupers;
return html`
${
// Indicates that there are no unexpected test variants.
testLoader?.loadedAllUnexpectedVariants && testLoader.unexpectedTestVariants.length === 0
? this.renderVariantGroup([['status', TestVariantStatus.UNEXPECTED]], [])
: ''
}
${(testLoader?.groupedNonExpectedVariants || []).map((group) =>
this.renderVariantGroup(
groupers.map(([key, getter]) => [key, getter(group[0])]),
group
)
)}
${this.renderVariantGroup(
[['status', TestVariantStatus.EXPECTED]],
testLoader?.expectedTestVariants || [],
this.hasCustomGroupingKey ? html`<b>note: custom grouping doesn't apply to expected tests</b>` : ''
)}
<div id="variant-list-tail">
${testLoader?.testVariantCount === testLoader?.unfilteredTestVariantCount
? html`
Showing ${testLoader?.testVariantCount || 0} /
${testLoader?.unfilteredTestVariantCount || 0}${testLoader?.loadedAllVariants ? '' : '+'} tests.
`
: html`
Showing
<i>${testLoader?.testVariantCount || 0}</i>
test${testLoader?.testVariantCount === 1 ? '' : 's'} that
<i>match${testLoader?.testVariantCount === 1 ? 'es' : ''} the filter</i>, out of
<i>${testLoader?.unfilteredTestVariantCount || 0}${testLoader?.loadedAllVariants ? '' : '+'}</i> tests.
`}
<span
class="active-text"
style=${styleMap({ display: this.invocationState.testLoader?.loadedAllVariants ?? true ? 'none' : '' })}
>${this.renderLoadMore()}</span
>
</div>
`;
}
@observable private collapsedVariantGroups = new Set<string>();
private renderVariantGroup(groupDef: [string, unknown][], variants: TestVariant[], note: unknown = null) {
const groupId = JSON.stringify(groupDef);
const expanded = !this.collapsedVariantGroups.has(groupId);
return html`
<div
class=${classMap({
expanded,
empty: variants.length === 0,
'group-header': true,
})}
@click=${() => {
if (expanded) {
this.collapsedVariantGroups.add(groupId);
} else {
this.collapsedVariantGroups.delete(groupId);
}
}}
>
<mwc-icon class="group-icon">${expanded ? 'expand_more' : 'chevron_right'}</mwc-icon>
<div>
<b>${variants.length} test variant${variants.length === 1 ? '' : 's'}:</b>
${groupDef.map(
([k, v]) =>
html`<span class="group-kv"
><span>${getPropKeyLabel(k)}=</span
><span class=${k === 'status' ? VARIANT_STATUS_CLASS_MAP[v as TestVariantStatus] : ''}>${v}</span></span
>`
)}
${note}
</div>
</div>
${repeat(
expanded ? variants : [],
(v) => `${v.testId} ${v.variantHash}`,
(v) => html`
<milo-test-variant-entry
.variant=${v}
.columnGetters=${this.invocationState.displayedColumnGetters}
.expanded=${this.invocationState.testLoader?.testVariantCount === 1}
></milo-test-variant-entry>
`
)}
`;
}
private renderLoadMore() {
const state = this.invocationState;
return html`
<span
style=${styleMap({ display: state.testLoader?.isLoading ?? true ? 'none' : '' })}
@click=${() => this.loadMore()}
>
[load more]
</span>
<span
style=${styleMap({
display: state.testLoader?.isLoading ?? true ? '' : 'none',
cursor: 'initial',
})}
>
loading <milo-dot-spinner></milo-dot-spinner>
</span>
`;
}
private tableHeaderEle?: HTMLElement;
protected updated() {
this.tableHeaderEle = this.shadowRoot!.getElementById('table-header')!;
}
protected render() {
return html`
<div id="table-header">
<div><!-- Expand toggle --></div>
<milo-tvt-column-header
.propKey=${'status'}
.label=${/* invis char */ '\u2002' + 'S'}
.canHide=${false}
></milo-tvt-column-header>
${this.invocationState.displayedColumns.map(
(col, i) => html`<milo-tvt-column-header
.colIndex=${i}
.resizeTo=${(newWidth: number, finalized: boolean) => {
if (!finalized) {
const newColWidths = this.invocationState.columnWidths.slice();
newColWidths[i] = newWidth;
// Update the style directly so lit-element doesn't need to
// re-render the component frequently.
// Live updating the width of the entire column can cause a bit
// of lag when there are many rows. Live updating just the
// column header is good enough.
this.tableHeaderEle?.style.setProperty('--columns', newColWidths.map((w) => w + 'px').join(' '));
return;
}
this.tableHeaderEle?.style.removeProperty('--columns');
this.configsStore.userConfigs.testResults.columnWidths[col] = newWidth;
}}
.propKey=${col}
.label=${getPropKeyLabel(col)}
></milo-tvt-column-header>`
)}
<milo-tvt-column-header .propKey=${'name'} .label=${'Name'} .canHide=${false} .canGroup=${false}>
</milo-tvt-column-header>
</div>
<div id="test-variant-list" tabindex="0">${this.renderAllVariants()}</div>
`;
}
static styles = [
commonStyle,
colorClasses,
css`
#table-header {
display: grid;
grid-template-columns: 24px 24px var(--columns) 1fr;
grid-gap: 5px;
line-height: 24px;
padding: 2px 2px 2px 10px;
font-weight: bold;
position: sticky;
top: 39px;
border-bottom: 1px solid var(--divider-color);
background-color: var(--block-background-color);
z-index: 2;
}
#no-invocation {
padding: 10px;
}
#test-variant-list > * {
padding-left: 10px;
}
milo-test-variant-entry {
margin: 2px 0px;
}
#integration-hint {
border-bottom: 1px solid var(--divider-color);
padding: 0 0 5px 15px;
}
.group-header {
display: grid;
grid-template-columns: auto auto 1fr;
grid-gap: 5px;
padding: 2px 2px 2px 10px;
position: sticky;
top: 67px;
font-size: 14px;
background-color: var(--block-background-color);
border-top: 1px solid var(--divider-color);
cursor: pointer;
line-height: 24px;
z-index: 1;
}
.group-header:first-child {
top: 68px;
border-top: none;
}
.group-header.expanded:not(.empty) {
border-bottom: 1px solid var(--divider-color);
}
.group-kv:not(:last-child)::after {
content: ', ';
}
.group-kv > span:first-child {
color: var(--light-text-color);
}
.group-kv > span:nth-child(2) {
font-weight: 500;
font-style: italic;
}
.inline-icon {
--mdc-icon-size: 1.2em;
vertical-align: bottom;
}
#variant-list-tail {
padding: 5px 0 5px 15px;
}
#variant-list-tail:not(:first-child) {
border-top: 1px solid var(--divider-color);
}
#load {
color: var(--active-text-color);
}
`,
];
} | the_stack |
import { Observable, of, BehaviorSubject } from 'rxjs';
import { tap, finalize, take, filter, map } from 'rxjs/operators';
import { PblDataSourceTriggerChangedEvent, DataSourceOf, PblDataSourceConfigurableTriggers } from '@pebula/ngrid/core';
import { PblInfiniteScrollFactoryOptions, PblInfiniteScrollDsOptions, PblInfiniteScrollTriggerChangedEvent } from './infinite-scroll-datasource.types';
import { PblInfiniteScrollDataSourceCache } from './infinite-scroll-datasource.cache';
import { normalizeOptions, shouldTriggerInvisibleScroll, tryAddVirtualRowsBlock, updateCacheAndDataSource, upgradeChangeEventToInfinite } from './utils';
import { PblInfiniteScrollDataSource } from './infinite-scroll-datasource';
import { PblInfiniteScrollDataSourceAdapter } from './infinite-scroll-datasource-adapter';
import { TriggerExecutionQueue } from './trigger-execution-queue';
import { CacheBlock } from './caching';
import { EventState } from './event-state';
// const LOG = msg => console.log(msg) ;
declare module '@pebula/ngrid/core/lib/data-source/adapter/types' {
interface PblDataSourceTriggerChangedEventSource {
/**
* The source of the event was from a scroll that reached into a group of rows that the grid needs to fetch.
*/
infiniteScroll: true;
}
}
export class PblInfiniteScrollDSContext<T, TData = any> {
options: PblInfiniteScrollDsOptions;
totalLength: number;
cache: PblInfiniteScrollDataSourceCache<T, TData>;
private ds: PblInfiniteScrollDataSource<T, TData>;
private adapter: PblInfiniteScrollDataSourceAdapter<T, TData>;
private currentSessionToken: any;
private queue: TriggerExecutionQueue<T, TData>;
private onVirtualLoading = new BehaviorSubject<boolean>(false);
private virtualLoadingSessions = 0;
private pendingTrigger$: Observable<T[]>;
private customTriggers: false | Partial<Record<keyof PblDataSourceConfigurableTriggers, boolean>>;
private timeoutCancelTokens = new Set<number>();
private ignoreScrolling: boolean = false;
private lastEventState = new EventState<T>();
constructor(private factoryOptions: PblInfiniteScrollFactoryOptions<T, TData>) {
this.options = normalizeOptions(factoryOptions.infiniteOptions);
if (this.options.initialVirtualSize > 0) {
this.totalLength = this.options.initialVirtualSize;
}
this.queue = new TriggerExecutionQueue<T, TData>(this.factoryOptions.onTrigger);
}
onTrigger(rawEvent: PblDataSourceTriggerChangedEvent<TData>): false | DataSourceOf<T> {
if (rawEvent.isInitial) {
return this.invokeInitialOnTrigger(rawEvent);
}
if (this.pendingTrigger$) {
// LOG(`HAS pendingTrigger$`);
const pendingTrigger$ = this.pendingTrigger$;
this.pendingTrigger$ = undefined;
if (rawEvent.data.changed && (rawEvent.data.curr as any) === pendingTrigger$) {
// LOG(`PENDING - MATCHED!`);
this.currentSessionToken = undefined;
return pendingTrigger$
.pipe(
finalize(() => {
// LOG(`PENDING - RESULT DONE`);
this.deferSyncRows(16, () => this.tickVirtualLoading(-1));
}));
}
}
if (this.currentSessionToken && rawEvent.data.changed && rawEvent.data.curr === this.currentSessionToken) {
if (this.ds.hostGrid.viewport.isScrolling) {
this.handleScrolling(rawEvent);
return of(this.ds.source);
}
const { result, event } = this.invokeRuntimeOnTrigger(rawEvent);
if (!result || !event) { // !event for type gate, because if we have "result: then "event" is always set
// LOG('NO SCROLL - FALSE TRIGGER!');
this.currentSessionToken = undefined;
return false;
} else {
const { source } = this.ds;
if (tryAddVirtualRowsBlock(source, event, this.options.blockSize)) {
this.pendingTrigger$ = result;
this.tickVirtualLoading(1);
// LOG('NO SCROLL - VIRTUAL ROWS ADDED');
return of(source)
.pipe(
finalize(() => {
this.deferSyncRows();
// LOG('NO SCROLL - VIRTUAL ROWS RENDERED');
this.currentSessionToken = undefined;
this.ds.refresh(result as any);
}));
} else {
// LOG('NO SCROLL - NO VIRTUAL ROWS ADDED');
return result
.pipe(
finalize(() => {
// LOG(`NO SCROLL - RESULT DONE`);
this.deferSyncRows(16);
this.currentSessionToken = undefined;
}));
}
}
}
if (rawEvent.data.changed || (this.customTriggers && PblInfiniteScrollDataSourceAdapter.isCustomBehaviorEvent(rawEvent, this.customTriggers))) {
this.cache.clear();
rawEvent.isInitial = true;
return this.invokeInitialOnTrigger(rawEvent);
}
return false;
// throw new Error('Invalid');
}
getAdapter(): PblInfiniteScrollDataSourceAdapter<T, TData> {
if (!this.adapter) {
this.customTriggers = this.factoryOptions.customTriggers || false;
// we can't allow any internal trigger handlers to run
// It will throw the entire datasource out of sync, infinite ds can't do that
this.adapter = new PblInfiniteScrollDataSourceAdapter<T, TData>(this, { filter: true, sort: true, pagination: true }, this.onVirtualLoading);
}
return this.adapter;
}
getDataSource(): PblInfiniteScrollDataSource<T, TData> {
if (!this.ds) {
this.ds = new PblInfiniteScrollDataSource<T, TData>(this, this.factoryOptions.dsOptions)
this.cache = new PblInfiniteScrollDataSourceCache<T, TData>(this, this.factoryOptions.cacheOptions);
this.ds.onRenderedDataChanged.subscribe(() => this.onRenderedDataChanged() );
if (this.factoryOptions.onCreated) {
this.factoryOptions.onCreated(this.ds);
}
}
return this.ds;
}
dispose() {
this.onVirtualLoading.complete();
for (const t of this.timeoutCancelTokens.values()) {
clearTimeout(t);
}
}
/**
* This is where we detect if we need to internally invoke a trigger because we've reached an area
* in the grid where row's does not exists but we show the dummy row, hence we need to fetch them.
* The grid will never trigger an event here since from the grid's perspective a row is showing...
* This detection also handle's scrolling and session so we don't invoke the trigger to much.
*/
private onRenderedDataChanged() {
if (this.lastEventState.skipNextRender()) {
// if the current event returned items that did not occupy the whole range of the event
// stop, we don't want to check anything cause we already know we are missing items.
// since we know we're missing items, we also know we're going to call the same range again which
// did not return anyway, so it is useless and in the worst case might cause infinite loop
// LOG(`RENDER DATA SKIPPING DUE TO SKIP NEXT RENDER!`);
return;
}
if (!this.currentSessionToken) {
if (shouldTriggerInvisibleScroll(this)) {
// LOG(`RENDER DATA CHANGED FROM ROW ${this.ds.renderStart}`);
const t = this.currentSessionToken = {};
this.safeAsyncOp(() => {
if (this.currentSessionToken === t) {
this.ds.refresh(t as any);
}
}, 0);
}
} else {
// LOG(`RENDER DATA WITH SESSION FROM ROW ${this.ds.renderStart}`);
if (!this.ds.hostGrid.viewport.isScrolling) {
// LOG(`SESSION OVERRIDE`);
this.ds.refresh(this.currentSessionToken = {} as any);
} else {
if (!this.ignoreScrolling) {
this.ignoreScrolling = true;
this.ds.hostGrid.viewport.scrolling
.pipe(
filter( d => d === 0),
take(1),
)
.subscribe(d => {
this.ignoreScrolling = false;
if (shouldTriggerInvisibleScroll(this)) {
// LOG(`OVERRIDING AFTER SCROLL SESSION`);
this.currentSessionToken = undefined;
this.onRenderedDataChanged();
}
});
}
}
}
}
/**
* Create a new event state for the given event, store it in the lastEventState property
* and returns a pipe that will sync the state of the event as the call progress.
* @param event
*/
private wrapEventState(event: PblInfiniteScrollTriggerChangedEvent<TData>) {
return (this.lastEventState = new EventState<T>(event)).pipe();
}
private deferSyncRows(ms = 0, runBefore?: () => void, runAfter?: () => void) {
this.safeAsyncOp(() => {
runBefore && runBefore();
this.ds.hostGrid.rowsApi.syncRows('data', true);
runAfter && runAfter();
}, ms);
}
private safeAsyncOp(fn: () => void, delay: number) {
const cancelToken = setTimeout(() => {
this.timeoutCancelTokens.delete(cancelToken);
fn();
}, delay) as unknown as number;
this.timeoutCancelTokens.add(cancelToken);
}
private tickVirtualLoading(value: -1 | 1) {
this.virtualLoadingSessions = this.virtualLoadingSessions + value;
const inVirtualLoad = this.onVirtualLoading.value;
switch (this.virtualLoadingSessions) {
case 0:
inVirtualLoad && this.onVirtualLoading.next(false);
break;
case 1:
!inVirtualLoad && this.onVirtualLoading.next(true);
break;
default:
if (this.virtualLoadingSessions < 0) {
this.virtualLoadingSessions = 0;
}
break;
}
}
private handleScrolling(rawEvent: PblDataSourceTriggerChangedEvent<TData>) {
this.tickVirtualLoading(1);
const newBlock = this.cache.matchNewBlock();
const event = newBlock ? this.tryGetInfiniteEvent(rawEvent, newBlock) : false as const;
if (event !== false) {
if (tryAddVirtualRowsBlock(this.ds.source, event, this.options.blockSize)) {
// LOG('SCROLL - VIRTUAL ROWS ADDED');
}
}
this.ds.hostGrid.viewport.scrolling
.pipe(
filter( d => d === 0),
take(1),
)
.subscribe(d => {
const { result } = this.invokeRuntimeOnTrigger(rawEvent);
if (!!result) {
if (this.pendingTrigger$) {
this.tickVirtualLoading(-1);
}
// LOG('SCROLL DONE - HAS RESULT - HAS PENDING');
this.ds.refresh(this.pendingTrigger$ = result as any);
} else if (!this.pendingTrigger$) {
// LOG('SCROLL DONE = NO RESULT - NOT HAS PENDING');
this.ds.refresh(this.pendingTrigger$ = of(this.ds.source) as any);
} else {
// LOG('SCROLL DONE = NO RESULT - HAS PENDING');
this.tickVirtualLoading(-1);
}
});
}
private invokeInitialOnTrigger(rawEvent: PblDataSourceTriggerChangedEvent<TData>): false | DataSourceOf<T> {
const event = this.tryGetInfiniteEvent(rawEvent, rawEvent.isInitial ? this.cache.createInitialBlock() : this.cache.createInitialBlock());
const result = this.queue.execute(event);
return result && result.pipe(
this.wrapEventState(event),
tap( values => {
this.cache.clear();
if(values.length > 1) {
this.cache.update(0, values.length - 1, 1);
}
PblInfiniteScrollDataSource.updateVirtualSize(this.options.initialVirtualSize, values);
if (!rawEvent.isInitial) {
this.ds.hostGrid.viewport.scrollToOffset(0);
}
}),
);
}
private invokeRuntimeOnTrigger(rawEvent: PblDataSourceTriggerChangedEvent<TData>): { result?: Observable<T[]>; event: false | PblInfiniteScrollTriggerChangedEvent<TData> } {
const newBlock = this.cache.matchNewBlock();
const event = newBlock ? this.tryGetInfiniteEvent(rawEvent, newBlock) : false as const;
if(event !== false) {
if (this.lastEventState.isDone() && this.lastEventState.rangeEquals(event)) {
return { event: false };
}
event.eventSource = 'infiniteScroll';
const triggerResult = this.queue.execute(event, true);
if (triggerResult !== false) {
return {
event,
result: triggerResult
.pipe(
// tap( () => LOG(`TRIGGER[${event.id}]: ${event.fromRow} - ${event.toRow}`)),
this.wrapEventState(event),
map( values => updateCacheAndDataSource(this, event, values) ),
),
};
}
}
return { event };
}
private tryGetInfiniteEvent(rawEvent: PblDataSourceTriggerChangedEvent<TData>, block: CacheBlock) {
const totalLength = this.totalLength || 0;
rawEvent.updateTotalLength = (totalLength: number) => { this.totalLength = totalLength; };
(rawEvent as PblInfiniteScrollTriggerChangedEvent).totalLength = totalLength;
return upgradeChangeEventToInfinite<T, TData>(totalLength, rawEvent, block);
}
} | the_stack |
import { Interface } from '@ethersproject/abi';
import { Contract } from '@ethersproject/contracts';
import { getLuminance } from '@openpalette/color';
import { CHAIN_ID } from '@openpalette/contract';
import {
CheckIcon,
ExternalLinkIcon,
Pencil1Icon,
} from '@radix-ui/react-icons';
import {
Blockquote,
Body,
ConnectionDisplay,
Divider,
FormRow,
Heading2,
Heading3,
HStack,
Small,
SpacerHorizontal,
SpacerVertical,
VStack,
} from 'components';
import { useAddress, useChainId, useWeb3API, useWeb3Data } from 'contexts';
import { useReadOnlyContractData } from 'contract-data';
import { parseCSSColor } from 'csscolorparser-ts';
import { Button, InputField } from 'designsystem';
import { useFetch } from 'hooks';
import React, { useEffect, useMemo, useRef, useState } from 'react';
import {
createInitialMintStyle,
mintStyleReducer,
MintStyleState,
} from 'state';
import { getEtherActorBaseURL, getEtherscanAddressUrl } from 'web3-utils';
import delegatedAbi from '../assets/slim-gwei-abi.json';
import { BackgroundFill } from '../components/mint/BackgroundFill';
import { ColorPicker } from '../components/mint/ColorPicker';
import { MintingCard } from '../components/mint/MintingCard';
import { SimplePrimaryButton } from '../components/mint/MintingCardDetails';
import {
decodeConfigParameter,
useUrlConfigReducer,
} from '../hooks/useUrlConfigReducer';
interface ServerProps {
isAddressLocked: boolean;
isCreatorLocked: boolean;
abi: any;
}
// If this is a delegated contract, we need to add in the delegated
// ABI to be able to call those functions. This primitive check for
// `implementation` could be improved, but is probably fine for now
//
// Currently it seems ether.actor doesn't support calling the delegated
// functions, so this will only work if there's a connected wallet
function addDelegatedImplementation(abi: any) {
const isDelegatedContract = abi.find(
(item: any) => item.name === 'implementation',
);
return isDelegatedContract ? [...abi, ...delegatedAbi] : abi;
}
export default function Mint({
isAddressLocked,
isCreatorLocked,
abi: serverAbi,
}: ServerProps) {
const { connect } = useWeb3API();
const address = useAddress();
const provider = useWeb3Data()?.provider;
const chainId = useChainId() ?? CHAIN_ID.MAINNET;
const [style, dispatch] = useUrlConfigReducer({
reducer: mintStyleReducer,
createInitialState: createInitialMintStyle,
});
const [showStyleEditor, setShowStyleEditor] = useState(false);
const etherActorAbi = useFetch<{ abi: any }>(
style.contractAddress
? `${getEtherActorBaseURL(style.chainId || chainId)}/${
style.contractAddress
}.json`
: undefined,
);
useEffect(() => {
if (!serverAbi && etherActorAbi.type !== 'success') return;
// If there's already a chainId, don't overwrite
if (style.chainId) return;
// Note that this also updates the url in the case where we have a serverAbi
// but no style.chainId. It will default to mainnet.
dispatch({ type: 'setChainId', value: chainId });
}, [serverAbi, etherActorAbi, style.chainId, chainId, dispatch]);
useEffect(() => {
if (isCreatorLocked || !address) return;
dispatch({ type: 'setCreatorAddress', value: address });
}, [isCreatorLocked, address, dispatch]);
const abi = useMemo(() => {
if (serverAbi) {
return new Interface(addDelegatedImplementation(serverAbi));
}
if (etherActorAbi.type !== 'success') return;
return new Interface(addDelegatedImplementation(etherActorAbi.value.abi));
}, [etherActorAbi, serverAbi]);
const contract = useMemo(() => {
if (!abi) return;
return new Contract(style.contractAddress, abi, provider);
}, [style.contractAddress, abi, provider]);
const inputRef = useRef<HTMLInputElement>(null);
useEffect(() => {
inputRef.current?.focus();
}, []);
const parsedBackground = parseCSSColor(style.background);
const parsedCardBackground = parseCSSColor(style.cardBackground);
const actualBackground =
parsedCardBackground && parsedCardBackground[3] > 0
? parsedCardBackground
: parsedBackground;
const invertForeground = actualBackground
? getLuminance({
r: actualBackground[0] / 255,
g: actualBackground[1] / 255,
b: actualBackground[2] / 255,
}) > 0.5
: false;
const nameFunction = abi ? abi.functions['name()'] : undefined;
const nameResult = useReadOnlyContractData<string>({
fragment: nameFunction,
contract,
chainId: style.chainId,
});
// TODO: When abi is fetched and we know contract address is OK, put
// chainname in config and use that to fetch from server. Show error if
// user's wallet is connected to the wrong network
return (
<VStack flex={'1 1 auto'} position="relative">
{!abi && <BackgroundFill background={style.background} />}
{!serverAbi && etherActorAbi.type !== 'success' && (
<VStack flex="1" alignItems="center" justifyContent="center">
<VStack width={450} gap={20}>
<VStack>
<Heading2 textAlign="center">Contract Address</Heading2>
<SpacerVertical size={20} />
<HStack>
<InputField.Root id="input-contract-address" flex="1">
<InputField.Input
ref={inputRef}
disabled={isAddressLocked}
value={style.contractAddress}
placeholder={'Enter Contract Address'}
type="text"
style={{
textAlign: 'center',
padding: '8px 8px',
fontSize: '16px',
fontFamily: 'monospace',
}}
onChange={(value) => {
dispatch({ type: 'setContractAddress', value });
}}
/>
</InputField.Root>
</HStack>
<SpacerVertical size={10} />
{style.contractAddress && etherActorAbi.type === 'pending' && (
<>
<Body
textAlign="center"
fontFamily="monospace"
className="flickerAnimation"
>
Loading...
</Body>
</>
)}
{etherActorAbi.type === 'failure' && (
<>
<Body textAlign="center" color="red" fontFamily="monospace">
{etherActorAbi.value.message}
</Body>
{!provider && (
<>
<SpacerVertical size={10} />
<Blockquote>
If this contract is on a network other than Ethereum
mainnet, please connect your wallet first.
</Blockquote>
</>
)}
</>
)}
</VStack>
<Divider variant="light" />
<VStack>
<HStack alignItems="center">
<Heading3>Connected Wallet</Heading3>
<SpacerHorizontal />
{provider ? (
<ConnectionDisplay />
) : (
<SimplePrimaryButton onClick={connect}>
Connect wallet
</SimplePrimaryButton>
)}
</HStack>
</VStack>
</VStack>
</VStack>
)}
{abi && style.chainId && (
<HStack flex="1 1 auto" position="relative">
<BackgroundFill background={style.background} />
<VStack
flex="2"
padding={40}
alignItems="center"
justifyContent="center"
position="relative"
breakpoints={{
[600]: {
padding: 20,
},
}}
>
<VStack position="relative" gap={20}>
<MintingCard
dispatch={dispatch}
editing={showStyleEditor}
intrinsicName={nameResult}
title={style.title}
description={style.description || undefined}
abi={abi}
contract={contract}
background={style.cardBackground}
invertForeground={invertForeground}
coverAsset={style.coverAsset}
contractChainId={style.chainId}
dataSources={style.dataSources}
/>
{(!isAddressLocked || style.creatorAddress === address) && (
<VStack
position="absolute"
left={'calc(100% + 20px)'}
top={0}
alignItems="flex-start"
gap={20}
breakpoints={{
[800]: {
position: 'unset',
},
}}
>
<HStack background="#222" borderRadius={4}>
<Button
onClick={() => setShowStyleEditor(!showStyleEditor)}
>
{showStyleEditor ? <CheckIcon /> : <Pencil1Icon />}
<SpacerHorizontal size={8} inline />
{showStyleEditor ? 'Confirm' : 'Edit'}
</Button>
</HStack>
{showStyleEditor && (
<VStack
minWidth={300}
gap={8}
background="#222"
borderRadius={4}
padding="4px 6px"
>
<FormRow
variant="small"
title="Background"
tooltip={
<VStack gap={20}>
<Small>
This field supports any CSS background value. You
can use solid colors, gradients, images, multiple
backgrounds, and more.
</Small>
<Small>
Example gradient:{' '}
<code
style={{
background: '#222',
padding: '2px 4px',
borderRadius: '2px',
}}
>
linear-gradient(pink, black)
</code>
</Small>
<Small>
Example Image:{' '}
<code
style={{
background: '#222',
padding: '2px 4px',
borderRadius: '2px',
}}
>
center/cover
url("https://picsum.photos/id/237/500/500")
</code>
</Small>
</VStack>
}
>
<HStack flex="1" gap={6}>
<ColorPicker
color={style.background}
onChange={(value) => {
dispatch({ type: 'setBackground', value });
}}
/>
<InputField.Root>
<InputField.Input
value={style.background}
onChange={(value) => {
dispatch({ type: 'setBackground', value });
}}
/>
</InputField.Root>
</HStack>
</FormRow>
<FormRow variant="small" title="Card Color">
<HStack flex="1" gap={6}>
<ColorPicker
color={style.cardBackground}
onChange={(value) => {
dispatch({ type: 'setCardBackground', value });
}}
/>
<InputField.Root>
<InputField.Input
value={style.cardBackground}
onChange={(value) => {
dispatch({
type: 'setCardBackground',
value,
});
}}
/>
</InputField.Root>
</HStack>
</FormRow>
</VStack>
)}
{showStyleEditor && (
<VStack
minWidth={300}
gap={8}
background="#222"
borderRadius={4}
padding="4px 6px"
alignSelf="start"
>
<Heading3>How it works</Heading3>
<Small>
The configuration for this page is stored in your
browser's URL (address bar). When you're done editing,
you can share the URL and collectors can mint with it.
</Small>
<Small>
We recommend using a URL shortener like{' '}
<a
href="https://bitly.com/"
target="_blank"
rel="noreferrer"
>
bitly →
</a>
, both so the URL looks nicer, and so you can make
updates to it after sharing.
</Small>
<Small>
You can edit your page if you connect with the same
wallet. If you make edits, you'll need to re-share the
new URL and/or update any shortened URL links.
</Small>
</VStack>
)}
</VStack>
)}
</VStack>
</VStack>
</HStack>
)}
{abi && (
<HStack
position="relative"
padding={'20px 40px'}
gap={20}
breakpoints={{
[1000]: {
flexDirection: 'column',
padding: '20px',
order: 0,
},
}}
>
<BackgroundFill
background={style.cardBackground || style.background}
/>
<HStack
flex="1"
alignItems="center"
gap={20}
filter={invertForeground ? 'invert()' : undefined}
>
<Heading3>NFT Contract Address</Heading3>
<HStack alignSelf="stretch" gap={10} flex="1" maxWidth={470}>
<InputField.Root id="input-multi-mint" flex="1">
<InputField.Input
value={style.contractAddress}
// disabled
type="text"
style={{
textAlign: 'center',
padding: '8px 8px',
fontSize: '16px',
fontFamily: 'monospace',
}}
onChange={(value) => {}}
/>
</InputField.Root>
<Button
as="a"
{...{
href: getEtherscanAddressUrl(chainId, style.contractAddress),
target: '_blank',
rel: 'noreferrer',
}}
>
<ExternalLinkIcon />
</Button>
</HStack>
</HStack>
{address && (
<HStack
alignItems="center"
gap={20}
filter={invertForeground ? 'invert()' : undefined}
>
<Heading3>Wallet</Heading3>
<ConnectionDisplay />
</HStack>
)}
</HStack>
)}
</VStack>
);
}
export async function getServerSideProps(
context: any,
): Promise<{ props: ServerProps }> {
const parsedConfig: Partial<MintStyleState> = decodeConfigParameter(
context.query.config,
);
let abi: any;
try {
if (parsedConfig.contractAddress) {
const response = await fetch(
`${getEtherActorBaseURL(parsedConfig.chainId ?? CHAIN_ID.MAINNET)}/${
parsedConfig.contractAddress
}.json`,
);
const data = await response.json();
abi = data.abi;
}
} catch {
//
}
return {
props: {
isAddressLocked: !!parsedConfig.contractAddress,
isCreatorLocked: !!parsedConfig.creatorAddress,
...(abi && { abi }),
},
};
} | the_stack |
import { Scope } from '@sentry/browser';
import { fireEvent, render, screen } from '@testing-library/react';
import * as React from 'react';
import { useState } from 'react';
import {
ErrorBoundary,
ErrorBoundaryProps,
isAtLeastReact17,
UNKNOWN_COMPONENT,
withErrorBoundary,
} from '../src/errorboundary';
const mockCaptureException = jest.fn();
const mockShowReportDialog = jest.fn();
const EVENT_ID = 'test-id-123';
jest.mock('@sentry/browser', () => {
const actual = jest.requireActual('@sentry/browser');
return {
...actual,
captureException: (...args: unknown[]) => {
mockCaptureException(...args);
return EVENT_ID;
},
showReportDialog: (options: any) => {
mockShowReportDialog(options);
},
};
});
function Boo({ title }: { title: string }): JSX.Element {
throw new Error(title);
}
function Bam(): JSX.Element {
const [title] = useState('boom');
return <Boo title={title} />;
}
const TestApp: React.FC<ErrorBoundaryProps> = ({ children, ...props }) => {
const [isError, setError] = React.useState(false);
return (
<ErrorBoundary
{...props}
onReset={(...args) => {
setError(false);
if (props.onReset) {
props.onReset(...args);
}
}}
>
{isError ? <Bam /> : children}
<button
data-testid="errorBtn"
onClick={() => {
setError(true);
}}
/>
</ErrorBoundary>
);
};
describe('withErrorBoundary', () => {
it('sets displayName properly', () => {
const TestComponent = () => <h1>Hello World</h1>;
const Component = withErrorBoundary(TestComponent, { fallback: <h1>fallback</h1> });
expect(Component.displayName).toBe('errorBoundary(TestComponent)');
});
it('defaults to an unknown displayName', () => {
const Component = withErrorBoundary(() => <h1>Hello World</h1>, { fallback: <h1>fallback</h1> });
expect(Component.displayName).toBe(`errorBoundary(${UNKNOWN_COMPONENT})`);
});
});
describe('ErrorBoundary', () => {
jest.spyOn(console, 'error').mockImplementation();
afterEach(() => {
mockCaptureException.mockClear();
mockShowReportDialog.mockClear();
});
it('renders null if not given a valid `fallback` prop', () => {
const { container } = render(
// @ts-ignore Passing wrong type on purpose
<ErrorBoundary fallback="Not a ReactElement">
<Bam />
</ErrorBoundary>,
);
expect(container.innerHTML).toBe('');
});
it('renders null if not given a valid `fallback` prop function', () => {
const { container } = render(
// @ts-ignore Passing wrong type on purpose
<ErrorBoundary fallback={() => 'Not a ReactElement'}>
<Bam />
</ErrorBoundary>,
);
expect(container.innerHTML).toBe('');
});
it('renders a fallback on error', () => {
const { container } = render(
<ErrorBoundary fallback={<h1>Error Component</h1>}>
<Bam />
</ErrorBoundary>,
);
expect(container.innerHTML).toBe('<h1>Error Component</h1>');
});
it('calls `onMount` when mounted', () => {
const mockOnMount = jest.fn();
render(
<ErrorBoundary fallback={<h1>Error Component</h1>} onMount={mockOnMount}>
<h1>children</h1>
</ErrorBoundary>,
);
expect(mockOnMount).toHaveBeenCalledTimes(1);
});
it('calls `onUnmount` when unmounted', () => {
const mockOnUnmount = jest.fn();
const { unmount } = render(
<ErrorBoundary fallback={<h1>Error Component</h1>} onUnmount={mockOnUnmount}>
<h1>children</h1>
</ErrorBoundary>,
);
expect(mockOnUnmount).toHaveBeenCalledTimes(0);
unmount();
expect(mockOnUnmount).toHaveBeenCalledTimes(1);
expect(mockOnUnmount).toHaveBeenCalledWith(null, null, null);
});
it('renders children correctly when there is no error', () => {
const { container } = render(
<ErrorBoundary fallback={<h1>Error Component</h1>}>
<h1>children</h1>
</ErrorBoundary>,
);
expect(container.innerHTML).toBe('<h1>children</h1>');
});
it('supports rendering children as a function', () => {
const { container } = render(
<ErrorBoundary fallback={<h1>Error Component</h1>}>{() => <h1>children</h1>}</ErrorBoundary>,
);
expect(container.innerHTML).toBe('<h1>children</h1>');
});
describe('fallback', () => {
it('renders a fallback component', async () => {
const { container } = render(
<TestApp fallback={<p>You have hit an error</p>}>
<h1>children</h1>
</TestApp>,
);
expect(container.innerHTML).toContain('<h1>children</h1>');
const btn = screen.getByTestId('errorBtn');
fireEvent.click(btn);
expect(container.innerHTML).not.toContain('<h1>children</h1>');
expect(container.innerHTML).toBe('<p>You have hit an error</p>');
});
it('renders a render props component', async () => {
let errorString = '';
let compStack = '';
let eventIdString = '';
const { container } = render(
<TestApp
fallback={({ error, componentStack, eventId }) => {
if (error && componentStack && eventId) {
errorString = error.toString();
compStack = componentStack;
eventIdString = eventId;
}
return <div>Fallback here</div>;
}}
>
<h1>children</h1>
</TestApp>,
);
expect(container.innerHTML).toContain('<h1>children</h1>');
const btn = screen.getByTestId('errorBtn');
fireEvent.click(btn);
expect(container.innerHTML).not.toContain('<h1>children</h1');
expect(container.innerHTML).toBe('<div>Fallback here</div>');
expect(errorString).toBe('Error: boom');
/*
at Boo (/path/to/sentry-javascript/packages/react/test/errorboundary.test.tsx:23:20)
at Bam (/path/to/sentry-javascript/packages/react/test/errorboundary.test.tsx:40:11)
at ErrorBoundary (/path/to/sentry-javascript/packages/react/src/errorboundary.tsx:2026:39)
at TestApp (/path/to/sentry-javascript/packages/react/test/errorboundary.test.tsx:22:23)
*/
expect(compStack).toMatch(
/\s+(at Boo) \(.*?\)\s+(at Bam) \(.*?\)\s+(at ErrorBoundary) \(.*?\)\s+(at TestApp) \(.*?\)/g,
);
expect(eventIdString).toBe(EVENT_ID);
});
});
describe('error', () => {
it('calls `componentDidCatch() when an error occurs`', () => {
const mockOnError = jest.fn();
render(
<TestApp fallback={<p>You have hit an error</p>} onError={mockOnError}>
<h1>children</h1>
</TestApp>,
);
expect(mockOnError).toHaveBeenCalledTimes(0);
expect(mockCaptureException).toHaveBeenCalledTimes(0);
const btn = screen.getByTestId('errorBtn');
fireEvent.click(btn);
expect(mockOnError).toHaveBeenCalledTimes(1);
expect(mockOnError).toHaveBeenCalledWith(expect.any(Error), expect.any(String), expect.any(String));
expect(mockCaptureException).toHaveBeenCalledTimes(1);
expect(mockCaptureException).toHaveBeenLastCalledWith(expect.any(Error), {
contexts: { react: { componentStack: expect.any(String) } },
});
expect(mockOnError.mock.calls[0][0]).toEqual(mockCaptureException.mock.calls[0][0]);
// Check if error.cause -> react component stack
const error = mockCaptureException.mock.calls[0][0];
const cause = error.cause;
expect(cause.stack).toEqual(mockCaptureException.mock.calls[0][1].contexts.react.componentStack);
expect(cause.name).toContain('React ErrorBoundary');
expect(cause.message).toEqual(error.message);
});
it('calls `beforeCapture()` when an error occurs', () => {
const mockBeforeCapture = jest.fn();
const testBeforeCapture = (...args: any[]) => {
expect(mockCaptureException).toHaveBeenCalledTimes(0);
mockBeforeCapture(...args);
};
render(
<TestApp fallback={<p>You have hit an error</p>} beforeCapture={testBeforeCapture}>
<h1>children</h1>
</TestApp>,
);
expect(mockBeforeCapture).toHaveBeenCalledTimes(0);
expect(mockCaptureException).toHaveBeenCalledTimes(0);
const btn = screen.getByTestId('errorBtn');
fireEvent.click(btn);
expect(mockBeforeCapture).toHaveBeenCalledTimes(1);
expect(mockBeforeCapture).toHaveBeenLastCalledWith(expect.any(Scope), expect.any(Error), expect.any(String));
expect(mockCaptureException).toHaveBeenCalledTimes(1);
});
it('shows a Sentry Report Dialog with correct options', () => {
const options = { title: 'custom title' };
render(
<TestApp fallback={<p>You have hit an error</p>} showDialog dialogOptions={options}>
<h1>children</h1>
</TestApp>,
);
expect(mockShowReportDialog).toHaveBeenCalledTimes(0);
const btn = screen.getByTestId('errorBtn');
fireEvent.click(btn);
expect(mockShowReportDialog).toHaveBeenCalledTimes(1);
expect(mockShowReportDialog).toHaveBeenCalledWith({ ...options, eventId: EVENT_ID });
});
it('resets to initial state when reset', async () => {
const { container } = render(
<TestApp fallback={({ resetError }) => <button data-testid="reset" onClick={resetError} />}>
<h1>children</h1>
</TestApp>,
);
expect(container.innerHTML).toContain('<h1>children</h1>');
const btn = screen.getByTestId('errorBtn');
fireEvent.click(btn);
expect(container.innerHTML).toContain('<button data-testid="reset">');
const reset = screen.getByTestId('reset');
fireEvent.click(reset);
expect(container.innerHTML).toContain('<h1>children</h1>');
});
it('calls `onReset()` when reset', () => {
const mockOnReset = jest.fn();
render(
<TestApp
onReset={mockOnReset}
fallback={({ resetError }) => <button data-testid="reset" onClick={resetError} />}
>
<h1>children</h1>
</TestApp>,
);
expect(mockOnReset).toHaveBeenCalledTimes(0);
const btn = screen.getByTestId('errorBtn');
fireEvent.click(btn);
expect(mockOnReset).toHaveBeenCalledTimes(0);
const reset = screen.getByTestId('reset');
fireEvent.click(reset);
expect(mockOnReset).toHaveBeenCalledTimes(1);
expect(mockOnReset).toHaveBeenCalledWith(expect.any(Error), expect.any(String), expect.any(String));
});
});
});
describe('isAtLeastReact17', () => {
test.each([
['React 15 with no patch', '15.0', false],
['React 15 with no patch and no minor', '15.5', false],
['React 16', '16.0.4', false],
['React 17', '17.0.0', true],
['React 17 with no patch', '17.4', true],
['React 17 with no patch and no minor', '17', true],
['React 18', '18.1.0', true],
['React 19', '19.0.0', true],
])('%s', (_: string, input: string, output: ReturnType<typeof isAtLeastReact17>) => {
expect(isAtLeastReact17(input)).toBe(output);
});
}); | the_stack |
import * as React from 'react';
import { DefaultButton, CommandButton } from 'office-ui-fabric-react';
import { IWebPartContext } from '@microsoft/sp-webpart-base';
import { Logger, LogLevel } from "@pnp/logging";
import styles from './BoxButtonWebPart.module.scss';
import * as strings from 'boxButtonWebPartStrings';
import { IBoxButton } from '../BoxButtonWebPart';
import LinkPickerPanel from '../../../components/LinkPickerPanel/LinkPickerPanel';
import { LinkType } from '../../../components/LinkPickerPanel/ILinkPickerPanelProps';
import ElemUtil from "../../../utilities/element/elemUtil";
import { DisplayMode } from '@microsoft/sp-core-library';
import WebPartTitle from "../../../components/WebPartTitle/WebPartTitle";
const urlField = "URL";
const iconField = "Font Awesome Icon";
const isThemedField = "Has Blue Background";
const openNewTabField = "Open Link in New Tab";
export interface IBoxButtonProps {
name: string;
fontAwesomeIcon: string;
url: string;
isThemed: boolean;
newTab: boolean;
data: IBoxButton[];
isEdit: boolean;
title: string;
usesListMode: boolean;
advancedCamlQuery: string;
advancedCamlData: string;
links: any[];
setTitle: (title: string) => void;
setUrl: Function;
editItem: Function;
deleteItem: Function;
rearrangeItems: Function;
context: IWebPartContext;
displayMode: DisplayMode;
}
export interface IBoxButtonState {
}
export default class BoxButton extends React.Component<IBoxButtonProps, IBoxButtonState> {
private LOG_SOURCE = "BoxButton";
private linkPickerPanel: LinkPickerPanel;
private _dragElement: any;
public get dragElement(): any {
return this._dragElement;
}
public set dragElement(v: any) {
this._dragElement = v;
}
private _mouseTarget: any;
public get mouseTarget(): any {
return this._mouseTarget;
}
public set mouseTarget(v: any) {
this._mouseTarget = v;
}
private _eventDone: boolean;
public get eventDone(): boolean {
return this._eventDone;
}
public set eventDone(v: boolean) {
this._eventDone = v;
}
public setTitle(event) {
this.props.setTitle(event.target.value);
}
// ** Event handlers for link picker **
// Open the link picker - called from onClick of Change (link) button
public openLinkPicker = (event) => {
if (this.linkPickerPanel) {
this.linkPickerPanel.pickLink()
.then(({ name, url }) => {
this.props.setUrl(name, url);
});
}
}
// ** Event handlers for buttons **/
// User clicks + button to add a link
public addBox = (event) => {
this.props.editItem(-1);
}
// User clicks edit button on a link
public editBox = (event) => {
try {
event.stopPropagation();
event.preventDefault();
this.props.editItem(ElemUtil.closest(event.target, '[data-index]').getAttribute("data-index"));
} catch (err) {
Logger.write(`${err} - ${this.LOG_SOURCE} (editBox)`, LogLevel.Error);
}
return false;
}
// User clicks delete button on a link
public deleteBox = (event) => {
try {
event.stopPropagation();
event.preventDefault();
if (confirm(strings.DeleteItemConfirmMessage))
this.props.deleteItem(ElemUtil.closest(event.target, '[data-index]').getAttribute("data-index"));
} catch (err) {
Logger.write(`${err} - ${this.LOG_SOURCE} (deleteBox)`, LogLevel.Error);
}
return false;
}
public checkEventDone = (event) => {
if (this.eventDone) {
this.eventDone = false;
return false;
}
}
// Event handlers for drag and drop
public mouseDragDown = (event) => {
this.mouseTarget = event.target;
}
public startDrag = (event) => {
try {
event.stopPropagation();
if (event.currentTarget.querySelector('#drag-handle').contains(this.mouseTarget)) {
this.dragElement = event.currentTarget;
event.dataTransfer.eventAllowed = "move";
event.dataTransfer.setData('text/plan', 'drag-handle');
}
else {
event.preventDefault();
}
} catch (err) {
Logger.write(`${err} - ${this.LOG_SOURCE} (startDrag)`, LogLevel.Error);
}
}
public isbefore(a, b) {
if (a.parentNode == b.parentNode) {
for (var cur = a; cur; cur = cur.previousSibling) {
if (cur === b) {
return true;
}
}
}
return false;
}
public endDrag = (event) => {
try {
const indexArr: number[] = [];
const currentElements = ElemUtil.closest(event.currentTarget, '[data-reactroot]').querySelectorAll('[data-index]');
currentElements.forEach((element) => { indexArr.push(parseInt(element.getAttribute('data-index'))); });
this.props.rearrangeItems(indexArr);
} catch (err) {
Logger.write(`${err} - ${this.LOG_SOURCE} (endDrag)`, LogLevel.Error);
}
}
public moveItem = (event) => {
try {
if (this.isbefore(this.dragElement, ElemUtil.closest(event.target, '[data-index]'))) {
ElemUtil.closest(event.target, '[data-index]').parentNode.insertBefore(this.dragElement, ElemUtil.closest(event.target, '[data-index]'));
}
else {
if (!this.dragElement.contains(ElemUtil.closest(event.target, '[data-index]')))
ElemUtil.closest(event.target, '[data-index]').parentNode.insertBefore(this.dragElement, ElemUtil.closest(event.target, '[data-index]').nextSibling);
}
} catch (err) {
Logger.write(`${err} - ${this.LOG_SOURCE} (moveItem)`, LogLevel.Error);
}
}
// ** Render functions **
public render(): React.ReactElement<IBoxButtonProps> {
let body = (this.props.usesListMode) ? this.renderAdvancedWebPart() : this.renderBasicWebPart();
// Insert retired web part message
return (
<>
{(this.props.displayMode == DisplayMode.Edit) &&
<div className={styles.editMode}>{strings.RetiredMessage}</div>
}
{body}
</>
);
}
// Render the "basic" web part with editable links
public renderBasicWebPart(): JSX.Element {
try {
return (
<div data-component="BoxButton-Basic">
<WebPartTitle editMode={this.props.isEdit} title={this.props.title} updateTitle={this.props.setTitle} />
{this.props.isEdit &&
<CommandButton className={styles["new-item"]} iconProps={{ iconName: 'Add' }} onClick={this.addBox.bind(this)}>{strings.AddNewButtonText}</CommandButton>
}
{this.props.data.length > 0 && this.props.data.map((item) => {
return this.renderBasicDefaultLayout(item);
})
}
{this.props.data.length === 0 &&
<div className={styles["box-link"]}>
<div className={styles["empty-box"]}>
<div role="button" onClick={this.openLinkPicker.bind(this)}>{strings.PlaceholderButtonText}</div>
</div>
</div>
}
{this.props.isEdit &&
<LinkPickerPanel
webPartContext={this.props.context}
className={styles["link-picker"]}
webAbsUrl={this.props.context.pageContext.web.absoluteUrl}
linkType={LinkType.any}
ref={(ref) => { this.linkPickerPanel = ref; }} />
}
</div>
);
} catch (err) {
Logger.write(`${err} - ${this.LOG_SOURCE} (renderBasicWebPart)`, LogLevel.Error);
return null;
}
}
public renderBasicDefaultLayout(item: IBoxButton): JSX.Element {
try {
return (
<div className={styles["box-link"]} role="link" id={"item-" + this.props.data.indexOf(item)} key={"item-" + this.props.data.indexOf(item)} draggable={this.props.isEdit} onDragStart={this.startDrag.bind(this)} onMouseDown={this.mouseDragDown.bind(this)} onDragEnter={this.moveItem.bind(this)} onDragEnd={this.endDrag.bind(this)} data-index={this.props.data.indexOf(item)}>
{item.openNew &&
<a href={item.url} target="blank" data-interception="off">
<div className={styles["box-button"] + " " + (item.isBlue ? styles["themed"] : "") + " " + (this.props.isEdit ? styles["edit"] : "")}>
<i className={item.icon ? "fa " + item.icon : ""}></i>
{item.name}
</div>
</a>
}
{!item.openNew &&
<a href={item.url}>
<div className={styles["box-button"] + " " + (item.isBlue ? styles["themed"] : "") + " " + (this.props.isEdit ? styles["edit"] : "")}>
<i className={item.icon ? "fa " + item.icon : ""}></i>
{item.name}
</div>
</a>
}
{this.props.isEdit &&
<div className={styles["edit-controls"]}>
<DefaultButton iconProps={{ iconName: "Clear" }} onClick={this.deleteBox.bind(this)} />
<DefaultButton iconProps={{ iconName: "Edit" }} onClick={this.editBox.bind(this)} />
<i className="ms-Icon ms-Icon--Move" id="drag-handle" aria-hidden="true"></i>
</div>
}
</div>);
} catch (err) {
Logger.write(`${err} - ${this.LOG_SOURCE} (renderBasicDefaultLayout)`, LogLevel.Error);
return null;
}
}
// Render the "advanced" web part, which is list-driven
public renderAdvancedWebPart(): JSX.Element {
try {
return (
<div data-component="BoxButton-Advanced">
<WebPartTitle editMode={this.props.isEdit} title={this.props.title} updateTitle={this.props.setTitle} />
{this.props.links.length > 0 && this.props.links.map((item) => {
return this.renderAdvancedDefaultLayout(item);
})}
</div>
);
} catch (err) {
Logger.write(`${err} - ${this.LOG_SOURCE} (renderAdvancedWebPart)`, LogLevel.Error);
return null;
}
}
public renderAdvancedDefaultLayout(item: any): JSX.Element {
try {
return (
<div className={styles["box-link"]} role="link" key={"item-" + this.props.links.indexOf(item)}>
{item[openNewTabField] &&
<a href={item[urlField]} target="blank" data-interception="off">
<div className={styles["box-button"] + " " + (item[isThemedField] ? styles["themed"] : "")}>
<i className={item[iconField] ? "fa " + item[iconField] : ""}></i>
{item[urlField + "_text"]}
</div>
</a>
}
{!item[openNewTabField] &&
<a href={item[urlField]}>
<div className={styles["box-button"] + " " + (item[isThemedField] ? styles["themed"] : "")}>
<i className={item[iconField] ? "fa " + item[iconField] : ""}></i>
{item[urlField + "_text"]}
</div>
</a>
}
</div>
);
} catch (err) {
Logger.write(`${err} - ${this.LOG_SOURCE} (renderAdvancedDefaultLayout)`, LogLevel.Error);
return null;
}
}
} | the_stack |
import { Client } from '../../api/client';
import { SessionKeepAlive } from '../session-keep-alive';
import { Bounds, GenericEvent, ActiveMapTool, ImageFormat, RefreshMode, SelectionVariant, ClientKind, LayerTransparencySet, Size, BLANK_SIZE, IMapGuideViewerSupport, Dictionary } from '../../api/common';
import { IQueryMapFeaturesOptions } from '../../api/request-builder';
import { QueryMapFeaturesResponse, FeatureSet } from '../../api/contracts/query';
import WKTFormat from "ol/format/WKT";
import Polygon, { fromExtent } from 'ol/geom/Polygon';
import Geometry from 'ol/geom/Geometry';
import { queryMapFeatures, setMouseCoordinates, setFeatureTooltipsEnabled } from '../../actions/map';
import View from 'ol/View';
import debounce from 'lodash.debounce';
import { layerTransparencyChanged, areViewsCloseToEqual } from '../../utils/viewer-state';
import { areArraysDifferent } from '../../utils/array';
import { MgLayerSetGroup } from "../../api/mg-layer-set-group";
import { FeatureQueryTooltip } from '../tooltips/feature';
import { RuntimeMap } from '../../api/contracts/runtime-map';
import { debug, warn } from '../../utils/logger';
import { getSiteVersion, canUseQueryMapFeaturesV4 } from '../../utils/site-version';
import { BLANK_GIF_DATA_URI } from '../../constants';
import { isSessionExpiredError } from '../../api/error';
import { BaseMapProviderContext, IMapProviderState, IViewerComponent, IMapProviderStateExtras, recursiveFindLayer } from './base';
import { assertIsDefined } from '../../utils/assert';
import { strIsNullOrEmpty, STR_EMPTY } from '../../utils/string';
import { ensureParameters } from '../../utils/url';
import { ActionType } from '../../constants/actions';
import { buildSelectionXml, getActiveSelectedFeatureXml } from '../../api/builders/deArrayify';
import { MapGuideMockMode } from '../mapguide-debug-context';
import { useViewerImageFormat, useConfiguredAgentUri, useConfiguredAgentKind, useViewerPointSelectionBuffer, useViewerFeatureTooltipsEnabled, useConfiguredManualFeatureTooltips, useViewerSelectionColor, useViewerSelectionImageFormat, useViewerActiveFeatureSelectionColor, useActiveMapSelectionSet, useConfiguredLoadIndicatorPositioning, useConfiguredLoadIndicatorColor, useViewerActiveTool, useActiveMapView, useViewerViewRotation, useViewerViewRotationEnabled, useActiveMapName, useViewerLocale, useActiveMapExternalBaseLayers, useConfiguredCancelDigitizationKey, useConfiguredUndoLastPointKey, useActiveMapLayers, useActiveMapInitialExternalLayers, useViewerIsStateless, useCustomAppSettings, useViewerBusyCount } from '../../containers/hooks';
import { useActiveMapState, useActiveMapSessionId, useActiveMapSelectableLayerNames, useActiveMapLayerTransparency, useActiveMapShowGroups, useActiveMapHideGroups, useActiveMapShowLayers, useActiveMapHideLayers, useActiveMapActiveSelectedFeature } from '../../containers/hooks-mapguide';
import { useReduxDispatch } from './context';
import { UTFGridTrackingTooltip } from '../tooltips/utfgrid';
import olTileLayer from "ol/layer/Tile";
import olUtfGridSource from "ol/source/UTFGrid";
import TileSource from 'ol/source/Tile';
import { useActiveMapSubjectLayer } from '../../containers/hooks-generic';
import { IGenericSubjectMapLayer } from '../../actions/defs';
import { isRuntimeMap } from '../../utils/type-guards';
export function isMapGuideProviderState(arg: any): arg is IMapGuideProviderState {
return typeof (arg.agentUri) == 'string'
&& typeof (arg.agentKind) == 'string';
}
function useMapGuideViewerState() {
const activeTool = useViewerActiveTool();
const view = useActiveMapView();
const viewRotation = useViewerViewRotation();
const viewRotationEnabled = useViewerViewRotationEnabled();
const mapName = useActiveMapName();
const locale = useViewerLocale();
const externalBaseLayers = useActiveMapExternalBaseLayers(true);
const cancelDigitizationKey = useConfiguredCancelDigitizationKey();
const undoLastPointKey = useConfiguredUndoLastPointKey();
const layers = useActiveMapLayers();
const initialExternalLayers = useActiveMapInitialExternalLayers();
const dispatch = useReduxDispatch();
const busyWorkers = useViewerBusyCount();
const appSettings = useCustomAppSettings();
// ============== Generic ============== //
const subject = useActiveMapSubjectLayer();
// ============== MapGuide-specific ================== //
const stateless = useViewerIsStateless();
const imageFormat = useViewerImageFormat();
const agentUri = useConfiguredAgentUri();
const agentKind = useConfiguredAgentKind();
const map = useActiveMapState();
const pointSelectionBuffer = useViewerPointSelectionBuffer();
const featureTooltipsEnabled = useViewerFeatureTooltipsEnabled();
const manualFeatureTooltips = useConfiguredManualFeatureTooltips();
const sessionId = useActiveMapSessionId();
const selectionColor = useViewerSelectionColor();
const selectionImageFormat = useViewerSelectionImageFormat();
const selectableLayerNames = useActiveMapSelectableLayerNames();
const layerTransparency = useActiveMapLayerTransparency();
const showGroups = useActiveMapShowGroups();
const hideGroups = useActiveMapHideGroups();
const showLayers = useActiveMapShowLayers();
const hideLayers = useActiveMapHideLayers();
const activeSelectedFeature = useActiveMapActiveSelectedFeature();
const activeSelectedFeatureColor = useViewerActiveFeatureSelectionColor();
const selection = useActiveMapSelectionSet();
let bgColor: string | undefined;
if (map) {
bgColor = `#${map.BackgroundColor.substring(2)}`;
}
let activeSelectedFeatureXml;
if (activeSelectedFeature && selection && selection.FeatureSet) {
activeSelectedFeatureXml = getActiveSelectedFeatureXml(selection.FeatureSet, activeSelectedFeature);
}
let theMap = map ?? subject;
let isReady = false;
// Regardless of inferred readiness, the map/subject must be set
if (!theMap) {
isReady = false;
theMap = {} as IGenericSubjectMapLayer;
} else {
if (subject && layerTransparency) {
isReady = true;
} else if (agentUri && theMap && layerTransparency) {
if (!stateless) {
if (isRuntimeMap(theMap) && sessionId) {
isReady = true;
}
} else {
isReady = true;
}
}
}
const nextState: IMapGuideProviderState & IMapProviderStateExtras = {
stateless,
activeTool,
busyWorkers,
view,
viewRotation,
viewRotationEnabled,
mapName,
locale,
externalBaseLayers,
cancelDigitizationKey,
undoLastPointKey,
initialExternalLayers,
appSettings: appSettings ?? {},
// ========== IMapProviderStateExtras ========== //
isReady,
bgColor,
layers,
// =========== MapGuide-specific ============== //
imageFormat,
agentUri,
agentKind,
map: theMap,
pointSelectionBuffer,
featureTooltipsEnabled,
manualFeatureTooltips,
sessionId,
selectionColor,
selectionImageFormat,
selectableLayerNames,
layerTransparency,
showGroups: showGroups ?? [],
hideGroups: hideGroups ?? [],
showLayers: showLayers ?? [],
hideLayers: hideLayers ?? [],
activeSelectedFeatureXml: activeSelectedFeatureXml ?? STR_EMPTY,
activeSelectedFeatureColor,
selection
};
return nextState;
}
export interface IMapGuideProviderState extends IMapProviderState {
/**
* @since 0.14
*/
stateless: boolean;
imageFormat: ImageFormat;
/**
* @since 0.14
*/
appSettings: Dictionary<string>;
agentUri: string | undefined;
agentKind: ClientKind;
map: RuntimeMap | IGenericSubjectMapLayer;
pointSelectionBuffer: number;
manualFeatureTooltips: boolean;
featureTooltipsEnabled: boolean;
sessionId: string | undefined;
selectionColor: string;
selectionImageFormat: ImageFormat;
selectableLayerNames: string[];
layerTransparency: LayerTransparencySet | undefined;
showGroups: string[];
hideGroups: string[];
showLayers: string[];
hideLayers: string[];
activeSelectedFeatureXml: string;
activeSelectedFeatureColor: string;
selection: QueryMapFeaturesResponse | null;
}
export class MapGuideMapProviderContext extends BaseMapProviderContext<IMapGuideProviderState, MgLayerSetGroup> implements IMapGuideViewerSupport {
/**
* This is a throttled version of _refreshOnStateChange(). Call this on any
* modifications to pendingStateChanges
*
* @private
*/
private refreshOnStateChange: (mapName: string,
showGroups: string[] | undefined,
showLayers: string[] | undefined,
hideGroups: string[] | undefined,
hideLayers: string[] | undefined) => void;
// ============= MapGuide-specific private state ============== //
private _client: Client;
private _keepAlive: SessionKeepAlive | undefined;
private _featureTooltip: FeatureQueryTooltip | undefined;
private _utfGridTooltip: UTFGridTrackingTooltip | undefined;
private _wktFormat: WKTFormat;
// ============================================================= //
constructor(public mockMode: MapGuideMockMode | undefined = undefined) {
super();
this._wktFormat = new WKTFormat();
this.refreshOnStateChange = debounce(this._refreshOnStateChange.bind(this), 500);
}
/**
* @override
*/
public getHookFunction(): () => IMapProviderState & IMapProviderStateExtras {
return useMapGuideViewerState;
}
public setMockMode(mode: MapGuideMockMode | undefined): void {
this.mockMode = mode;
}
/**
* @override
*/
public hideAllPopups() {
super.hideAllPopups();
this._featureTooltip?.setEnabled(false);
}
/**
*
* @override
* @protected
* @returns {(MapGuideMockMode | undefined)}
* @memberof MapGuideMapProviderContext
*/
protected getMockMode(): MapGuideMockMode | undefined { return this.mockMode; }
protected getInitialProviderState(): Omit<IMapGuideProviderState, keyof IMapProviderState> {
return {
stateless: false,
imageFormat: "PNG8",
agentUri: undefined,
agentKind: "mapagent",
map: {} as IGenericSubjectMapLayer,
pointSelectionBuffer: 2,
featureTooltipsEnabled: true,
manualFeatureTooltips: false,
sessionId: undefined,
selectionColor: "0000FF",
selectionImageFormat: "PNG8",
selectableLayerNames: [],
layerTransparency: {},
appSettings: {},
showGroups: [],
hideGroups: [],
showLayers: [],
hideLayers: [],
activeSelectedFeatureXml: STR_EMPTY,
activeSelectedFeatureColor: "FF0000",
selection: null
}
}
public getProviderName(): string { return "MapGuide"; }
/**
* @override
* @returns {(IMapGuideViewerSupport | undefined)}
* @memberof MapGuideMapProviderContext
*/
mapguideSupport(): IMapGuideViewerSupport | undefined {
return this;
}
//#region IMapGuideViewerSupport
getSelection(): QueryMapFeaturesResponse | null {
return this._state.selection;
}
getSelectionXml(selection: FeatureSet, layerIds?: string[] | undefined): string {
return buildSelectionXml(selection, layerIds);
}
getSessionId(): string {
return this._state.sessionId!;
}
setFeatureTooltipEnabled(enabled: boolean): void {
this._comp?.onDispatch(setFeatureTooltipsEnabled(enabled));
}
//#endregion
//#region IMapViewerContextCallback
private onSessionExpired() {
}
protected onProviderMapClick(px: [number, number]): void {
if (this._state.mapName && this._state.sessionId) {
if (this._state.manualFeatureTooltips && this._state.featureTooltipsEnabled) {
this.queryFeatureTooltip(px);
} else if (this._state.activeTool === ActiveMapTool.Select) {
const ptBuffer = this._state.pointSelectionBuffer ?? 2;
const box = this.getPointSelectionBox(px, ptBuffer);
const geom = fromExtent(box);
const options = this.buildDefaultQueryOptions(geom);
options.maxfeatures = 1;
this.sendSelectionQuery(options);
}
}
}
//#endregion
//#region Map Context
/**
* @override
* @protected
* @param {GenericEvent} e
* @returns
* @memberof BaseMapProviderContext
*/
protected onMouseMove(e: GenericEvent) {
if (this._comp) {
this.handleMouseTooltipMouseMove(e);
if (this._state.activeTool == ActiveMapTool.Select) {
this.handleHighlightHover(e);
}
if (this._comp.isContextMenuOpen()) {
return;
}
if (!this._state.manualFeatureTooltips) {
this.handleFeatureTooltipMouseMove(e);
}
if (this._utfGridTooltip) {
this._utfGridTooltip.onMouseMove(e);
}
if (this._state.mapName) {
this._comp.onDispatch?.(setMouseCoordinates(this._state.mapName, e.coordinate));
}
}
}
private queryFeatureTooltip(pixel: [number, number]) {
if (!this._state.stateless && this._featureTooltip && this._featureTooltip.isEnabled()) {
this._featureTooltip.raiseQueryFromPoint(pixel);
}
}
private handleFeatureTooltipMouseMove(e: GenericEvent) {
if (!this._state.stateless && this._featureTooltip && this._featureTooltip.isEnabled()) {
this._featureTooltip.onMouseMove(e);
}
}
private enableFeatureTooltips(enabled: boolean): void {
this._featureTooltip?.setEnabled(enabled);
}
private refreshMapInternal(name: string, mode: RefreshMode = RefreshMode.LayersOnly | RefreshMode.SelectionOnly): void {
const layerSet = this.getLayerSetGroup(name);
layerSet?.refreshMap(mode);
}
private async showSelectedFeature(mapExtent: Bounds, size: Size, map: RuntimeMap, selectionColor: string, featureXml: string | undefined) {
const sv = getSiteVersion(map);
// This operation requires v4.0.0 QUERYMAPFEATURES, so bail if this ain't the right version
if (!canUseQueryMapFeaturesV4(sv)) {
return;
}
const layerSet = this.getLayerSetGroup(map.Name);
try {
if (featureXml) {
const r = await this._client.queryMapFeatures_v4({
mapname: map.Name,
session: map.SessionId,
selectionformat: "PNG",
featurefilter: featureXml,
selectioncolor: selectionColor,
requestdata: 2, //Inline selection
layerattributefilter: 0,
persist: 0 //IMPORTANT: This is a transient selection
});
if (r.InlineSelectionImage) {
const dataUri = `data:${r.InlineSelectionImage.MimeType};base64,${r.InlineSelectionImage.Content}`;
layerSet?.showActiveSelectedFeature(mapExtent, size, dataUri);
} else {
layerSet?.showActiveSelectedFeature(mapExtent, BLANK_SIZE, BLANK_GIF_DATA_URI);
}
} else {
layerSet?.showActiveSelectedFeature(mapExtent, BLANK_SIZE, BLANK_GIF_DATA_URI);
}
} catch (e) {
layerSet?.showActiveSelectedFeature(mapExtent, BLANK_SIZE, BLANK_GIF_DATA_URI);
}
}
//#endregion
/**
* DO NOT CALL DIRECTLY, call this.refreshOnStateChange() instead, which is a throttled version
* of this method
* @private
*/
private _refreshOnStateChange(mapName: string,
showGroups: string[] | undefined,
showLayers: string[] | undefined,
hideGroups: string[] | undefined,
hideLayers: string[] | undefined) {
if (showGroups || showLayers || hideGroups || hideLayers) {
//this.refreshOnStateChange(map, showGroups, showLayers, hideGroups, hideLayers);
const layerSet = this.getLayerSetGroup(mapName);
if (layerSet instanceof MgLayerSetGroup) {
layerSet.setMapGuideMocking(this.getMockMode());
layerSet.update(showGroups, showLayers, hideGroups, hideLayers);
}
}
}
/**
* @override
* @protected
* @param {GenericEvent} e
* @memberof MapGuideMapProviderContext
*/
protected onImageError(e: GenericEvent) {
this._keepAlive?.lastTry().catch(err => {
if (isSessionExpiredError(err)) {
this.onSessionExpired();
}
});
}
private getSelectableLayers(): string[] {
return this._state.selectableLayerNames ?? [];
}
private buildDefaultQueryOptions(geom: Geometry | string, reqQueryFeatures: number = 1 /* Attributes */): IQueryMapFeaturesOptions {
assertIsDefined(this._state.sessionId);
assertIsDefined(this._state.mapName);
const names = this.getSelectableLayers();
let wkt: string;
if (typeof geom === 'string') {
wkt = geom;
} else {
wkt = this._wktFormat.writeGeometry(geom);
}
return {
mapname: this._state.mapName,
session: this._state.sessionId,
geometry: wkt,
requestdata: reqQueryFeatures,
layernames: names.length > 0 ? names.join(",") : undefined,
persist: 1
};
}
/**
* @virtual
* @protected
* @param {Polygon} geom
* @memberof MapGuideMapProviderContext
*/
protected selectFeaturesByExtent(geom: Polygon) {
if (!this._state.mapName || !this._comp || !this._state.sessionId || !this._state.selectionColor) {
return;
}
this.sendSelectionQuery(this.buildDefaultQueryOptions(geom));
}
private onOpenTooltipLink = (url: string) => {
let fixedUrl = url;
if (this._state.mapName && this._state.sessionId) {
fixedUrl = ensureParameters(url, this._state.mapName, this._state.sessionId, this._state.locale);
}
this._comp?.onDispatch({
type: ActionType.TASK_INVOKE_URL,
payload: {
url: fixedUrl
}
});
};
/**
* @override
* @protected
*/
protected initLayerSet(nextState: IMapGuideProviderState): MgLayerSetGroup {
const { mapName } = nextState;
assertIsDefined(mapName);
assertIsDefined(this._state.map);
const layerSet = new MgLayerSetGroup(nextState, {
getImageLoaders: () => super.getImageSourceLoaders(mapName),
getTileLoaders: () => super.getTileSourceLoaders(mapName),
getBaseTileLoaders: () => super.getBaseTileSourceLoaders(mapName),
getMockMode: () => this.getMockMode(),
incrementBusyWorker: () => this.incrementBusyWorker(),
decrementBusyWorker: () => this.decrementBusyWorker(),
addImageLoading: () => this._comp?.addImageLoading(),
addImageLoaded: () => this._comp?.addImageLoaded(),
onImageError: (e) => this.onImageError(e),
onSessionExpired: () => this.onSessionExpired(),
getSelectableLayers: () => this.getSelectableLayers(),
getClient: () => this._client,
isContextMenuOpen: () => this._comp?.isContextMenuOpen() ?? false,
getAgentUri: () => this._state.agentUri!,
getAgentKind: () => this._state.agentKind,
getMapName: () => this._state.mapName!,
getSessionId: () => this._state.sessionId!,
getLocale: () => this._state.locale,
isFeatureTooltipEnabled: () => this.isFeatureTooltipEnabled(),
getPointSelectionBox: (pt) => this.getPointSelectionBox(pt, this._state.pointSelectionBuffer),
openTooltipLink: (url) => this.onOpenTooltipLink(url),
addFeatureToHighlight: (feat, bAppend) => this.addFeatureToHighlight(feat, bAppend)
});
this._layerSetGroups[mapName] = layerSet;
layerSet.update(nextState.showGroups, nextState.showLayers, nextState.hideGroups, nextState.hideLayers);
return layerSet;
}
/**
* @override
* @readonly
* @memberof MapGuideMapProviderContext
*/
public isMouseOverTooltip() { return this._featureTooltip?.isMouseOver == true || this._selectTooltip?.isMouseOver == true; }
/**
* @override
*/
public detachFromComponent(): void {
this._keepAlive?.dispose();
this._keepAlive = undefined;
this._featureTooltip?.dispose();
this._featureTooltip = undefined;
super.detachFromComponent();
}
/**
* @override
* @param {HTMLElement} el
* @param {IViewerComponent} comp
* @memberof MapGuideMapProviderContext
*/
public attachToComponent(el: HTMLElement, comp: IViewerComponent): void {
super.attachToComponent(el, comp);
const bCheckSession = (this._state.map && isRuntimeMap(this._state.map)) ?? false;
this._keepAlive = new SessionKeepAlive(() => this._state.sessionId!, this._client, this.onSessionExpired.bind(this), bCheckSession);
const utfGridLayer = recursiveFindLayer(this._map!.getLayers(), oll => {
if (oll instanceof olTileLayer) {
const source = oll.getSource();
if (source instanceof olUtfGridSource) {
return true;
}
}
return false;
});
if (utfGridLayer) {
const source = (utfGridLayer as olTileLayer<TileSource>).getSource() as olUtfGridSource;
this._utfGridTooltip = new UTFGridTrackingTooltip(this._map!, source, this._comp?.isContextMenuOpen ?? (() => false));
}
const bEnable = (this._state.map && isRuntimeMap(this._state.map)) ?? false;
if (bEnable) {
this._featureTooltip = new FeatureQueryTooltip(this._map!, {
incrementBusyWorker: () => this.incrementBusyWorker(),
decrementBusyWorker: () => this.decrementBusyWorker(),
onSessionExpired: () => this.onSessionExpired(),
getAgentUri: () => this._state.agentUri!,
getAgentKind: () => this._state.agentKind,
getMapName: () => this._state.mapName!,
getSessionId: () => this._state.sessionId!,
getLocale: () => this._state.locale,
getPointSelectionBox: (pt) => this.getPointSelectionBox(pt, this._state.pointSelectionBuffer),
openTooltipLink: (url) => this.onOpenTooltipLink(url)
});
this._featureTooltip.setEnabled(this._state.featureTooltipsEnabled);
}
}
/**
* @override
* @param {RefreshMode} [mode=RefreshMode.LayersOnly | RefreshMode.SelectionOnly]
* @memberof MapGuideMapProviderContext
*/
public refreshMap(mode: RefreshMode = RefreshMode.LayersOnly | RefreshMode.SelectionOnly): void {
assertIsDefined(this._state.mapName);
this.refreshMapInternal(this._state.mapName, mode);
}
/**
* @override
* @protected
* @param {MgLayerSetGroup} layerSetGroup
* @memberof MapGuideMapProviderContext
*/
protected onBeforeAttachingLayerSetGroup(layerSetGroup: MgLayerSetGroup): void {
layerSetGroup.setMapGuideMocking(this.getMockMode());
}
/**
*
* @virtual
* @param {IMapGuideProviderState} nextState
* @memberof MapGuideMapProviderContext
*/
public setProviderState(nextState: IMapGuideProviderState): void {
// If viewer not mounted yet, just accept the next state and bail
if (!this._comp || !this._map) {
if (nextState.agentUri) {
this._client = new Client(nextState.agentUri, nextState.agentKind);
}
this._state = nextState;
return;
}
//
// React (no pun intended) to prop changes
//
if (nextState.imageFormat != this._state.imageFormat) {
warn(`Unsupported change of props: imageFormat`);
}
if (nextState.agentUri && nextState.agentUri != this._state.agentUri) {
warn(`Unsupported change of props: agentUri`);
this._client = new Client(nextState.agentUri, nextState.agentKind);
}
if (nextState.agentUri && nextState.agentKind != this._state.agentKind) {
warn(`Unsupported change of props: agentKind`);
this._client = new Client(nextState.agentUri, nextState.agentKind);
}
let bChangedView = false;
//map
if (nextState.mapName != this._state.mapName && this._map && this._ovMap) {
this.hideAllPopups();
const oldLayerSet = this.getLayerSetGroup(this._state.mapName);
const newLayerSet = this.ensureAndGetLayerSetGroup(nextState);
//Clear any stray hover highlighted features as part of switch
oldLayerSet?.clearHighlightedFeatures();
newLayerSet.clearHighlightedFeatures();
oldLayerSet?.detach(this._map, this._ovMap);
newLayerSet.setMapGuideMocking(this.getMockMode());
newLayerSet.attach(this._map, this._ovMap);
//This would happen if we switch to a map we haven't visited yet
if (!nextState.view) {
newLayerSet.fitViewToExtent();
bChangedView = true;
} else {
const layerSet = this.getLayerSetGroup(nextState.mapName);
if (layerSet) {
this.applyView(layerSet, nextState.view);
}
}
}
//selectionColor
if (nextState.selectionColor && nextState.selectionColor != this._state.selectionColor) {
const layerSet = this.getLayerSetGroup(nextState.mapName);
layerSet?.updateSelectionColor(nextState.selectionColor);
}
//featureTooltipsEnabled
if (nextState.featureTooltipsEnabled != this._state.featureTooltipsEnabled) {
this.enableFeatureTooltips(nextState.featureTooltipsEnabled);
}
//externalBaseLayers
if (nextState.externalBaseLayers != null &&
nextState.externalBaseLayers.length > 0) {
const layerSet = this.getLayerSetGroup(nextState.mapName);
layerSet?.updateExternalBaseLayers(nextState.externalBaseLayers);
}
//Layer transparency
if (nextState.layerTransparency && layerTransparencyChanged(nextState.layerTransparency, this._state.layerTransparency)) {
const layerSet = this.getLayerSetGroup(nextState.mapName);
layerSet?.updateTransparency(nextState.layerTransparency);
}
//Layer/Group visibility
if (nextState.mapName && (areArraysDifferent(nextState.showGroups, this._state.showGroups) ||
areArraysDifferent(nextState.hideGroups, this._state.hideGroups) ||
areArraysDifferent(nextState.showLayers, this._state.showLayers) ||
areArraysDifferent(nextState.hideLayers, this._state.hideLayers))) {
this.refreshOnStateChange(nextState.mapName, nextState.showGroups, nextState.showLayers, nextState.hideGroups, nextState.hideLayers);
}
//view
let bViewChanged = false;
if (!areViewsCloseToEqual(nextState.view, this._state.view)) {
const vw = nextState.view;
if (vw != null && !bChangedView) {
const layerSet = this.ensureAndGetLayerSetGroup(nextState);
this.applyView(layerSet, vw);
bViewChanged = true;
} else {
debug(`Skipping zoomToView as next/current views are close enough or target view is null`);
}
}
//overviewMapElement
if (nextState.overviewMapElementSelector) {
this.updateOverviewMapElement(nextState.overviewMapElementSelector);
}
//viewRotation
if (this._state.viewRotation != nextState.viewRotation) {
this.getOLView().setRotation(nextState.viewRotation);
}
//viewRotationEnabled
if (this._state.viewRotationEnabled != nextState.viewRotationEnabled) {
if (this._map) {
const view = this.getOLView();
const newView = new View({
enableRotation: nextState.viewRotationEnabled,
rotation: nextState.viewRotation,
center: view.getCenter(),
resolution: view.getResolution(),
resolutions: view.getResolutions(),
minResolution: view.getMinResolution(),
maxResolution: view.getMaxResolution(),
maxZoom: view.getMaxZoom(),
minZoom: view.getMinZoom(),
//constrainRotation: view.constrainRotation(),
projection: view.getProjection(),
zoom: view.getZoom()
});
this._map.setView(newView);
}
}
//activeSelectedFeatureXml
const bDiffSelectionXml = this._state.activeSelectedFeatureXml != nextState.activeSelectedFeatureXml;
const bRefreshActiveFeatureSelection = !strIsNullOrEmpty(nextState.activeSelectedFeatureXml) && bViewChanged;
if (bDiffSelectionXml || bRefreshActiveFeatureSelection) {
if (this._map && nextState.map) {
const ms = this._map.getSize();
if (ms && isRuntimeMap(nextState.map)) {
const nmap = nextState.map;
// We don't want to request for an updated feature selection while there are still rendering operations in progress
// otherwise we may request a feature selection for a map whose internal view has been changed by the in-progress rendering
// operation
const checkReady = () => {
if (this._busyWorkers == 0) {
//console.log("Ready to request updated feature selection");
const view = this.getOLView();
const me: any = view.calculateExtent(ms);
const size = { w: ms[0], h: ms[1] };
this.showSelectedFeature(me, size, nmap, nextState.activeSelectedFeatureColor, nextState.activeSelectedFeatureXml);
} else {
//console.log("Still busy. Hold off on request for updated feature selection");
window.setTimeout(checkReady, 100);
}
};
checkReady();
}
}
}
this._state = nextState;
}
public setSelectionXml(xml: string, queryOpts?: Partial<IQueryMapFeaturesOptions>, success?: (res: QueryMapFeaturesResponse) => void, failure?: (err: Error) => void): void {
if (!this._state.mapName || !this._comp || !this._state.sessionId || !this._state.selectionColor) {
return;
}
//NOTE: A quirk of QUERYMAPFEATURES is that when passing in selection XML (instead of geometry),
//you must set the layerattributefilter to the full bit mask otherwise certain features in the
//selection XML will not be rendered because they may not pass the layer attribute filter
const reqQueryFeatures = 1; //Attributes
this.incrementBusyWorker();
const mapName = this._state.mapName;
const qOrig = {
mapname: mapName,
session: this._state.sessionId,
persist: 1,
featurefilter: xml,
selectioncolor: this._state.selectionColor,
selectionformat: this._state.selectionImageFormat ?? "PNG8",
maxfeatures: -1,
requestdata: reqQueryFeatures
};
const queryOptions = { ...qOrig, ...queryOpts };
const action = queryMapFeatures(mapName, {
options: queryOptions,
callback: res => {
this.decrementBusyWorker();
if (success) {
success(res);
}
},
errBack: err => {
this.decrementBusyWorker();
if (failure) {
failure(err);
}
}
});
this._comp.onDispatch(action);
}
public clearSelection(): void {
this.setSelectionXml("");
}
public selectByGeometry(geom: Geometry, selectionMethod?: SelectionVariant): void {
const options = this.buildDefaultQueryOptions(geom);
if (selectionMethod) {
options.selectionvariant = selectionMethod;
}
this.sendSelectionQuery(options);
}
public queryMapFeatures(options: IQueryMapFeaturesOptions, success?: (res: QueryMapFeaturesResponse) => void, failure?: (err: Error) => void): void {
this.sendSelectionQuery(options, success, failure);
}
public isFeatureTooltipEnabled(): boolean {
return this._featureTooltip?.isEnabled() == true;
}
// ================= MapGuide-specific =================== //
private sendSelectionQuery(queryOpts?: IQueryMapFeaturesOptions, success?: (res: QueryMapFeaturesResponse) => void, failure?: (err: Error) => void) {
if (!this._state.mapName || !this._comp || !this._state.sessionId || !this._state.selectionColor || (queryOpts != null && (queryOpts.layernames ?? []).length == 0)) {
return;
}
this.incrementBusyWorker();
const mapName = this._state.mapName;
const qOrig: Partial<IQueryMapFeaturesOptions> = {
mapname: mapName,
session: this._state.sessionId,
persist: 1,
selectionvariant: "INTERSECTS",
selectioncolor: this._state.selectionColor,
selectionformat: this._state.selectionImageFormat ?? "PNG8",
maxfeatures: -1
};
const queryOptions: IQueryMapFeaturesOptions = { ...qOrig, ...queryOpts } as IQueryMapFeaturesOptions;
const action = queryMapFeatures(mapName, {
options: queryOptions,
callback: res => {
this.decrementBusyWorker();
if (success) {
success(res);
}
},
errBack: err => {
this.decrementBusyWorker();
if (failure) {
failure(err);
}
}
});
this._comp.onDispatch(action);
}
} | the_stack |
import { TextBlock, StackPanel, AdvancedDynamicTexture, Image, Button, Rectangle, Control, Grid } from "@babylonjs/gui";
import { Scene, Sound, ParticleSystem, PostProcess, Effect, SceneSerializer } from "@babylonjs/core";
export class Hud {
private _scene: Scene;
//Game Timer
public time: number; //keep track to signal end game REAL TIME
private _prevTime: number = 0;
private _clockTime: TextBlock = null; //GAME TIME
private _startTime: number;
private _stopTimer: boolean;
private _sString = "00";
private _mString = 11;
private _lanternCnt: TextBlock;
//Animated UI sprites
private _sparklerLife: Image;
private _spark: Image;
//Timer handlers
public stopSpark: boolean;
private _handle;
private _sparkhandle;
//Pause toggle
public gamePaused: boolean;
//Quit game
public quit: boolean;
public transition: boolean = false;
//UI Elements
public pauseBtn: Button;
public fadeLevel: number;
private _playerUI;
private _pauseMenu;
private _controls;
public tutorial;
public hint;
//Mobile
public isMobile: boolean;
public jumpBtn: Button;
public dashBtn: Button;
public leftBtn: Button;
public rightBtn: Button;
public upBtn: Button;
public downBtn: Button;
//Sounds
public quitSfx: Sound;
private _sfx: Sound;
private _pause: Sound;
private _sparkWarningSfx: Sound;
constructor(scene: Scene) {
this._scene = scene;
const playerUI = AdvancedDynamicTexture.CreateFullscreenUI("UI");
this._playerUI = playerUI;
this._playerUI.idealHeight = 720;
const lanternCnt = new TextBlock();
lanternCnt.name = "lantern count";
lanternCnt.textVerticalAlignment = TextBlock.VERTICAL_ALIGNMENT_CENTER;
lanternCnt.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT;
lanternCnt.verticalAlignment = Control.VERTICAL_ALIGNMENT_TOP;
lanternCnt.fontSize = "22px";
lanternCnt.color = "white";
lanternCnt.text = "Lanterns: 1 / 22";
lanternCnt.top = "32px";
lanternCnt.left = "-64px";
lanternCnt.width = "25%";
lanternCnt.fontFamily = "Viga";
lanternCnt.resizeToFit = true;
playerUI.addControl(lanternCnt);
this._lanternCnt = lanternCnt;
const stackPanel = new StackPanel();
stackPanel.height = "100%";
stackPanel.width = "100%";
stackPanel.top = "14px";
stackPanel.verticalAlignment = 0;
playerUI.addControl(stackPanel);
//Game timer text
const clockTime = new TextBlock();
clockTime.name = "clock";
clockTime.textHorizontalAlignment = TextBlock.HORIZONTAL_ALIGNMENT_CENTER;
clockTime.fontSize = "48px";
clockTime.color = "white";
clockTime.text = "11:00";
clockTime.resizeToFit = true;
clockTime.height = "96px";
clockTime.width = "220px";
clockTime.fontFamily = "Viga";
stackPanel.addControl(clockTime);
this._clockTime = clockTime;
//sparkler bar animation
const sparklerLife = new Image("sparkLife", "./sprites/sparkLife.png");
sparklerLife.width = "54px";
sparklerLife.height = "162px";
sparklerLife.cellId = 0;
sparklerLife.cellHeight = 108;
sparklerLife.cellWidth = 36;
sparklerLife.sourceWidth = 36;
sparklerLife.sourceHeight = 108;
sparklerLife.horizontalAlignment = 0;
sparklerLife.verticalAlignment = 0;
sparklerLife.left = "14px";
sparklerLife.top = "14px";
playerUI.addControl(sparklerLife);
this._sparklerLife = sparklerLife;
const spark = new Image("spark", "./sprites/spark.png");
spark.width = "40px";
spark.height = "40px";
spark.cellId = 0;
spark.cellHeight = 20;
spark.cellWidth = 20;
spark.sourceWidth = 20;
spark.sourceHeight = 20;
spark.horizontalAlignment = 0;
spark.verticalAlignment = 0;
spark.left = "21px";
spark.top = "20px";
playerUI.addControl(spark);
this._spark = spark;
const pauseBtn = Button.CreateImageOnlyButton("pauseBtn", "./sprites/pauseBtn.png");
pauseBtn.width = "48px";
pauseBtn.height = "86px";
pauseBtn.thickness = 0;
pauseBtn.verticalAlignment = 0;
pauseBtn.horizontalAlignment = 1;
pauseBtn.top = "-16px";
playerUI.addControl(pauseBtn);
pauseBtn.zIndex = 10;
this.pauseBtn = pauseBtn;
//when the button is down, make pause menu visable and add control to it
pauseBtn.onPointerDownObservable.add(() => {
this._pauseMenu.isVisible = true;
playerUI.addControl(this._pauseMenu);
this.pauseBtn.isHitTestVisible = false;
//when game is paused, make sure that the next start time is the time it was when paused
this.gamePaused = true;
this._prevTime = this.time;
//--SOUNDS--
this._scene.getSoundByName("gameSong").pause();
this._pause.play(); //play pause music
});
//popup tutorials + hint
const tutorial = new Rectangle();
tutorial.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT;
tutorial.verticalAlignment = Control.VERTICAL_ALIGNMENT_TOP;
tutorial.top = "12%";
tutorial.left = "-1%";
tutorial.height = 0.2;
tutorial.width = 0.2;
tutorial.thickness = 0;
tutorial.alpha = 0.6;
this._playerUI.addControl(tutorial);
this.tutorial = tutorial;
//movement image, will disappear once you attempt all of the moves
let movementPC = new Image("pause", "sprites/tutorial.jpeg");
tutorial.addControl(movementPC);
const hint = new Rectangle();
hint.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT;
hint.verticalAlignment = Control.VERTICAL_ALIGNMENT_TOP;
hint.top = "14%";
hint.left = "-4%";
hint.height = 0.08;
hint.width = 0.08;
hint.thickness = 0;
hint.alpha = 0.6;
hint.isVisible = false;
this._playerUI.addControl(hint);
this.hint = hint;
//hint to the first lantern, will disappear once you light it
const lanternHint = new Image("lantern1", "sprites/arrowBtn.png");
lanternHint.rotation = Math.PI / 2;
lanternHint.stretch = Image.STRETCH_UNIFORM;
lanternHint.height = 0.8;
lanternHint.verticalAlignment = Control.VERTICAL_ALIGNMENT_TOP;
hint.addControl(lanternHint);
const moveHint = new TextBlock("move", "Move Right");
moveHint.color = "white";
moveHint.fontSize = "12px";
moveHint.fontFamily = "Viga";
moveHint.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM;
moveHint.textWrapping = true;
moveHint.resizeToFit = true;
hint.addControl(moveHint);
this._createPauseMenu();
this._createControlsMenu();
this._loadSounds(scene);
//Check if Mobile, add button controls
if (/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent)) {
this.isMobile = true; // tells inputController to track mobile inputs
//tutorial image
movementPC.isVisible = false;
let movementMobile = new Image("pause", "sprites/tutorialMobile.jpeg");
tutorial.addControl(movementMobile);
//--ACTION BUTTONS--
// container for action buttons (right side of screen)
const actionContainer = new Rectangle();
actionContainer.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT;
actionContainer.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM;
actionContainer.height = 0.4;
actionContainer.width = 0.2;
actionContainer.left = "-2%";
actionContainer.top = "-2%";
actionContainer.thickness = 0;
playerUI.addControl(actionContainer);
//grid for action button placement
const actionGrid = new Grid();
actionGrid.addColumnDefinition(.5);
actionGrid.addColumnDefinition(.5);
actionGrid.addRowDefinition(.5);
actionGrid.addRowDefinition(.5);
actionContainer.addControl(actionGrid);
const dashBtn = Button.CreateImageOnlyButton("dash", "./sprites/aBtn.png");
dashBtn.thickness = 0;
dashBtn.alpha = 0.8;
dashBtn.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT;
this.dashBtn = dashBtn;
const jumpBtn = Button.CreateImageOnlyButton("jump", "./sprites/bBtn.png");
jumpBtn.thickness = 0;
jumpBtn.alpha = 0.8;
jumpBtn.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_LEFT;
this.jumpBtn = jumpBtn;
actionGrid.addControl(dashBtn, 0, 1);
actionGrid.addControl(jumpBtn, 1, 0);
//--MOVEMENT BUTTONS--
// container for movement buttons (section left side of screen)
const moveContainer = new Rectangle();
moveContainer.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_LEFT;
moveContainer.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM;
moveContainer.height = 0.4;
moveContainer.width = 0.4;
moveContainer.left = "2%";
moveContainer.top = "-2%";
moveContainer.thickness = 0;
playerUI.addControl(moveContainer);
//grid for placement of arrow keys
const grid = new Grid();
grid.addColumnDefinition(.4);
grid.addColumnDefinition(.4);
grid.addColumnDefinition(.4);
grid.addRowDefinition(.5);
grid.addRowDefinition(.5);
moveContainer.addControl(grid);
const leftBtn = Button.CreateImageOnlyButton("left", "./sprites/arrowBtn.png");
leftBtn.thickness = 0;
leftBtn.rotation = -Math.PI / 2;
leftBtn.color = "white";
leftBtn.alpha = 0.8;
leftBtn.width = 0.8;
leftBtn.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_LEFT;
this.leftBtn = leftBtn;
const rightBtn = Button.CreateImageOnlyButton("right", "./sprites/arrowBtn.png");
rightBtn.rotation = Math.PI / 2;
rightBtn.thickness = 0;
rightBtn.color = "white";
rightBtn.alpha = 0.8;
rightBtn.width = 0.8;
rightBtn.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT;
this.rightBtn = rightBtn;
const upBtn = Button.CreateImageOnlyButton("up", "./sprites/arrowBtn.png");
upBtn.thickness = 0;
upBtn.alpha = 0.8;
upBtn.color = "white";
this.upBtn = upBtn;
const downBtn = Button.CreateImageOnlyButton("down", "./sprites/arrowBtn.png");
downBtn.thickness = 0;
downBtn.rotation = Math.PI;
downBtn.color = "white";
downBtn.alpha = 0.8;
this.downBtn = downBtn;
//arrange the buttons in the grid
grid.addControl(leftBtn, 1, 0);
grid.addControl(rightBtn, 1, 2);
grid.addControl(upBtn, 0, 1);
grid.addControl(downBtn, 1, 1);
}
}
public updateHud(): void {
if (!this._stopTimer && this._startTime != null) {
let curTime = Math.floor((new Date().getTime() - this._startTime) / 1000) + this._prevTime; // divide by 1000 to get seconds
this.time = curTime; //keeps track of the total time elapsed in seconds
this._clockTime.text = this._formatTime(curTime);
}
}
public updateLanternCount(numLanterns: number): void {
this._lanternCnt.text = "Lanterns: " + numLanterns + " / 22";
}
//---- Game Timer ----
public startTimer(): void {
this._startTime = new Date().getTime();
this._stopTimer = false;
}
public stopTimer(): void {
this._stopTimer = true;
}
//format the time so that it is relative to 11:00 -- game time
private _formatTime(time: number): string {
let minsPassed = Math.floor(time / 60); //seconds in a min
let secPassed = time % 240; // goes back to 0 after 4mins/240sec
//gameclock works like: 4 mins = 1 hr
// 4sec = 1/15 = 1min game time
if (secPassed % 4 == 0) {
this._mString = Math.floor(minsPassed / 4) + 11;
this._sString = (secPassed / 4 < 10 ? "0" : "") + secPassed / 4;
}
let day = (this._mString == 11 ? " PM" : " AM");
return (this._mString + ":" + this._sString + day);
}
//---- Sparkler Timers ----
//start and restart sparkler, handles setting the texture and animation frame
public startSparklerTimer(sparkler: ParticleSystem): void {
//reset the sparkler timers & animation frames
this.stopSpark = false;
this._sparklerLife.cellId = 0;
this._spark.cellId = 0;
if (this._handle) {
clearInterval(this._handle);
}
if (this._sparkhandle) {
clearInterval(this._sparkhandle);
}
//--SOUNDS--
this._sparkWarningSfx.stop(); // if you restart the sparkler while this was playing (it technically would never reach cellId==10, so you need to stop the sound)
//reset the sparkler (particle system and light)
if (sparkler != null) {
sparkler.start();
this._scene.getLightByName("sparklight").intensity = 35;
}
//sparkler animation, every 2 seconds update for 10 bars of sparklife
this._handle = setInterval(() => {
if (!this.gamePaused) {
if (this._sparklerLife.cellId < 10) {
this._sparklerLife.cellId++;
}
if (this._sparklerLife.cellId == 9) {
this._sparkWarningSfx.play();
}
if (this._sparklerLife.cellId == 10) {
this.stopSpark = true;
clearInterval(this._handle);
//sfx
this._sparkWarningSfx.stop();
}
} else { // if the game is paused, also pause the warning SFX
this._sparkWarningSfx.pause();
}
}, 2000);
this._sparkhandle = setInterval(() => {
if (!this.gamePaused) {
if (this._sparklerLife.cellId < 10 && this._spark.cellId < 5) {
this._spark.cellId++;
} else if (this._sparklerLife.cellId < 10 && this._spark.cellId >= 5) {
this._spark.cellId = 0;
}
else {
this._spark.cellId = 0;
clearInterval(this._sparkhandle);
}
}
}, 185);
}
//stop the sparkler, resets the texture
public stopSparklerTimer(sparkler: ParticleSystem): void {
this.stopSpark = true;
if (sparkler != null) {
sparkler.stop();
this._scene.getLightByName("sparklight").intensity = 0;
}
}
//---- Pause Menu Popup ----
private _createPauseMenu(): void {
this.gamePaused = false;
const pauseMenu = new Rectangle();
pauseMenu.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_CENTER;
pauseMenu.verticalAlignment = Control.VERTICAL_ALIGNMENT_CENTER;
pauseMenu.height = 0.8;
pauseMenu.width = 0.5;
pauseMenu.thickness = 0;
pauseMenu.isVisible = false;
//background image
const image = new Image("pause", "sprites/pause.jpeg");
pauseMenu.addControl(image);
//stack panel for the buttons
const stackPanel = new StackPanel();
stackPanel.width = .83;
pauseMenu.addControl(stackPanel);
const resumeBtn = Button.CreateSimpleButton("resume", "RESUME");
resumeBtn.width = 0.18;
resumeBtn.height = "44px";
resumeBtn.color = "white";
resumeBtn.fontFamily = "Viga";
resumeBtn.paddingBottom = "14px";
resumeBtn.cornerRadius = 14;
resumeBtn.fontSize = "12px";
resumeBtn.textBlock.resizeToFit = true;
resumeBtn.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_LEFT;
resumeBtn.verticalAlignment = Control.VERTICAL_ALIGNMENT_CENTER;
stackPanel.addControl(resumeBtn);
this._pauseMenu = pauseMenu;
//when the button is down, make menu invisable and remove control of the menu
resumeBtn.onPointerDownObservable.add(() => {
this._pauseMenu.isVisible = false;
this._playerUI.removeControl(pauseMenu);
this.pauseBtn.isHitTestVisible = true;
//game unpaused, our time is now reset
this.gamePaused = false;
this._startTime = new Date().getTime();
//--SOUNDS--
this._scene.getSoundByName("gameSong").play();
this._pause.stop();
if(this._sparkWarningSfx.isPaused) {
this._sparkWarningSfx.play();
}
this._sfx.play(); //play transition sound
});
const controlsBtn = Button.CreateSimpleButton("controls", "CONTROLS");
controlsBtn.width = 0.18;
controlsBtn.height = "44px";
controlsBtn.color = "white";
controlsBtn.fontFamily = "Viga";
controlsBtn.paddingBottom = "14px";
controlsBtn.cornerRadius = 14;
controlsBtn.fontSize = "12px";
resumeBtn.textBlock.resizeToFit = true;
controlsBtn.verticalAlignment = Control.VERTICAL_ALIGNMENT_CENTER;
controlsBtn.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_LEFT;
stackPanel.addControl(controlsBtn);
//when the button is down, make menu invisable and remove control of the menu
controlsBtn.onPointerDownObservable.add(() => {
//open controls screen
this._controls.isVisible = true;
this._pauseMenu.isVisible = false;
//play transition sound
this._sfx.play();
});
const quitBtn = Button.CreateSimpleButton("quit", "QUIT");
quitBtn.width = 0.18;
quitBtn.height = "44px";
quitBtn.color = "white";
quitBtn.fontFamily = "Viga";
quitBtn.paddingBottom = "12px";
quitBtn.cornerRadius = 14;
quitBtn.fontSize = "12px";
resumeBtn.textBlock.resizeToFit = true;
quitBtn.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_LEFT;
quitBtn.verticalAlignment = Control.VERTICAL_ALIGNMENT_CENTER;
stackPanel.addControl(quitBtn);
//set up transition effect
Effect.RegisterShader("fade",
"precision highp float;" +
"varying vec2 vUV;" +
"uniform sampler2D textureSampler; " +
"uniform float fadeLevel; " +
"void main(void){" +
"vec4 baseColor = texture2D(textureSampler, vUV) * fadeLevel;" +
"baseColor.a = 1.0;" +
"gl_FragColor = baseColor;" +
"}");
this.fadeLevel = 1.0;
quitBtn.onPointerDownObservable.add(() => {
const postProcess = new PostProcess("Fade", "fade", ["fadeLevel"], null, 1.0, this._scene.getCameraByName("cam"));
postProcess.onApply = (effect) => {
effect.setFloat("fadeLevel", this.fadeLevel);
};
this.transition = true;
//--SOUNDS--
this.quitSfx.play();
if(this._pause.isPlaying){
this._pause.stop();
}
})
}
//---- Controls Menu Popup ----
private _createControlsMenu(): void {
const controls = new Rectangle();
controls.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_CENTER;
controls.verticalAlignment = Control.VERTICAL_ALIGNMENT_CENTER;
controls.height = 0.8;
controls.width = 0.5;
controls.thickness = 0;
controls.color = "white";
controls.isVisible = false;
this._playerUI.addControl(controls);
this._controls = controls;
//background image
const image = new Image("controls", "sprites/controls.jpeg");
controls.addControl(image);
const title = new TextBlock("title", "CONTROLS");
title.resizeToFit = true;
title.verticalAlignment = Control.VERTICAL_ALIGNMENT_TOP;
title.fontFamily = "Viga";
title.fontSize = "32px";
title.top = "14px";
controls.addControl(title);
const backBtn = Button.CreateImageOnlyButton("back", "./sprites/lanternbutton.jpeg");
backBtn.width = "40px";
backBtn.height = "40px";
backBtn.top = "14px";
backBtn.thickness = 0;
backBtn.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT;
backBtn.verticalAlignment = Control.VERTICAL_ALIGNMENT_TOP;
controls.addControl(backBtn);
//when the button is down, make menu invisable and remove control of the menu
backBtn.onPointerDownObservable.add(() => {
this._pauseMenu.isVisible = true;
this._controls.isVisible = false;
//play transition sound
this._sfx.play();
});
}
//load all sounds needed for game ui interactions
private _loadSounds(scene: Scene): void {
this._pause = new Sound("pauseSong", "./sounds/Snowland.wav", scene, function () {
}, {
volume: 0.2
});
this._sfx = new Sound("selection", "./sounds/vgmenuselect.wav", scene, function () {
});
this.quitSfx = new Sound("quit", "./sounds/Retro Event UI 13.wav", scene, function () {
});
this._sparkWarningSfx = new Sound("sparkWarning", "./sounds/Retro Water Drop 01.wav", scene, function () {
}, {
loop: true,
volume: 0.5,
playbackRate: 0.6
});
}
} | the_stack |
import { constants } from '@scalecube/utils';
import { createMS, ASYNC_MODEL_TYPES } from '../../mocks/microserviceFactory';
import {
getInvalidMethodReferenceError,
SERVICES_IS_NOT_ARRAY,
SERVICE_IS_NOT_OBJECT,
getInvalidServiceReferenceError,
getServiceReferenceNotProvidedError,
} from '../../../src/helpers/constants';
import { getQualifier, constants as commonConstants } from '@scalecube/utils';
const {
getIncorrectMethodValueError,
getInvalidAsyncModelError,
DEFINITION_MISSING_METHODS,
getServiceNameInvalid,
SERVICE_NAME_NOT_PROVIDED,
getAsynModelNotProvidedError,
} = commonConstants;
describe('Test the creation of Microservice', () => {
const baseServiceDefinition = {
serviceName: 'GreetingService',
};
// #1 - definition has a method that is not contained in the reference.
const scenario1service = {
definition: {
...baseServiceDefinition,
methods: {
hello: {
asyncModel: ASYNC_MODEL_TYPES.REQUEST_RESPONSE,
},
},
},
reference: {},
};
// #2 - definition has a method that is not contained in the reference.
const scenario2service = {
definition: {
...baseServiceDefinition,
methods: {
hello: {
asyncModel: ASYNC_MODEL_TYPES.REQUEST_STREAM,
},
},
},
reference: {},
};
const qualifier = getQualifier({ serviceName: baseServiceDefinition.serviceName, methodName: 'hello' });
test.each([
{
service: scenario1service,
exceptionMsg: getInvalidMethodReferenceError(qualifier),
},
{
service: scenario2service,
exceptionMsg: getInvalidMethodReferenceError(qualifier),
},
])(
`
Scenario: Fail to register a service,
Given 'serviceData' with 'service' and 'exceptionMsg'
scenario |service |definition | reference |
#1. definition does not match reference |greetingService |hello : REQUEST_RESPONSE | |
#2. definition does not match reference |greetingService |hello : REQUEST_STREAM | |
When creating microservice with a given 'service'
Then an exception will occur.
`,
(serviceData) => {
const { service, exceptionMsg } = serviceData;
expect.assertions(1);
try {
createMS({ services: [service] });
} catch (error) {
expect(error.message).toMatch(exceptionMsg);
}
}
);
test(`
Scenario: Service name is not provided in service definition
Given Service definition without serviceName
When creating a microservice
Then exception will occur: serviceDefinition.serviceName is not defined
`, () => {
expect.assertions(1);
const service = {
definition: {
methods: {
hello: {
asyncModel: ASYNC_MODEL_TYPES.REQUEST_RESPONSE,
},
},
},
reference: {},
};
try {
// @ts-ignore
createMS({ services: [service] });
} catch (error) {
expect(error.message).toMatch(SERVICE_NAME_NOT_PROVIDED);
}
});
// @ts-ignore
test.each(['', [], {}, false, true, 10, null, Symbol()])(
`
Scenario: serviceDefinition with invalid 'serviceName' value
Given invalid 'serviceName' value
When creating a microservice
And serviceDefinition has invalid 'serviceName' values
|definition | value
|empty string | ''
|array | []
|object | {}
|boolean | false
|boolean | true
|number | 10
|null | null
|symbol | Symbol()
Then invalid service error will occur
`,
(serviceName) => {
const service = {
definition: {
serviceName,
},
reference: {},
};
expect.assertions(1);
try {
// @ts-ignore
createMS({ services: [service] });
} catch (error) {
expect(error.message).toMatch(getServiceNameInvalid(serviceName));
}
}
);
test(`
Scenario: serviceDefinition without 'methods' key
Given a serviceDefinition without a 'methods' key
When creating a microservice
Then exception will occur: Definition missing methods:object`, () => {
expect.assertions(1);
const service = {
definition: {
serviceName: 'service',
// no methods key
},
reference: {},
};
try {
// @ts-ignore
createMS({ services: [service] });
} catch (e) {
expect(e.message).toBe(DEFINITION_MISSING_METHODS);
}
});
// @ts-ignore
test.each(['string', -100, 10, 0, 1, 10.1, [], {}, undefined, null, Symbol('10')])(
`
Scenario: serviceDefinition with invalid 'method' value
Given invalid 'method' value
When creating a microservice
And serviceDefinition has invalid 'method' values
|definition | value
|string | 'string'
|negative number | -100
|number | 10
|false convert | 0
|true convert | 1
|double | 10.1
|array | []
|object | {}
|undefined | undefined
|null | null
|symbol | Symbol('10')
Then invalid service error will occur
`,
(methodValue) => {
const service = {
definition: {
...baseServiceDefinition,
methods: {
hello: methodValue,
},
},
reference: {},
};
expect.assertions(1);
try {
// @ts-ignore
createMS({ services: [service] });
} catch (error) {
expect(error.message).toMatch(getIncorrectMethodValueError(qualifier));
}
}
);
test(`
Scenario: Async model not provided
Given Service definition
And no async model provided for hello method
When creating a microservice
Then exception will occur: Async model is not provided in service definition for service/method.
`, () => {
expect.assertions(1);
const service = {
definition: {
...baseServiceDefinition,
methods: {
hello: {
noAsyncModel: 'xxx',
},
},
},
reference: {},
};
try {
// @ts-ignore
createMS({ services: [service] });
} catch (error) {
expect(error.message).toMatch(getAsynModelNotProvidedError(qualifier));
}
});
// @ts-ignore
test.each(['string', -100, 0, 1, 10.1, [], {}, null, Symbol()])(
`
Scenario: serviceDefinition with invalid 'asyncModel' values
Given invalid 'asyncModel' value
When creating a microservice
And serviceDefinition has invalid 'asyncModel' values
|definition | value
|string | 'string'
|negative number | -100
|false convert | 0
|true convert | 1
|double | 10.1
|array | []
|object | {}
|null | null
|Symbol | Symbol()
Then exeption will occur: Invalid async model in service definition for ${qualifier}
`,
(asyncModel) => {
const service = {
definition: {
...baseServiceDefinition,
methods: {
hello: {
asyncModel,
},
},
},
reference: {},
};
expect.assertions(1);
try {
// @ts-ignore
createMS({ services: [service] });
} catch (error) {
expect(error.message).toMatch(getInvalidAsyncModelError(qualifier));
}
}
);
test(`
Scenario: Service reference not provided
Given Service reference is not provided
And a definition with 'hello service'
When creating a microservice
Then exception will occur: service reference is not defined.
`, () => {
expect.assertions(1);
const service = {
definition: {
...baseServiceDefinition,
methods: {
hello: {
asyncModel: ASYNC_MODEL_TYPES.REQUEST_RESPONSE,
},
},
},
// no reference
};
try {
// @ts-ignore
createMS({ services: [service] });
} catch (error) {
expect(error.message).toMatch(getServiceReferenceNotProvidedError(baseServiceDefinition.serviceName));
}
});
test.each([() => 'test', () => 123456, () => true, () => [], null, 'hello', 3, true, false, []])(
`
Scenario: Testing reference format
type | value |
function | ()=>'test' |
function | ()=>123456 |
function | ()=>true |
null | const hello = null |
string | const hello = 'hello' |
number | const hello = 3 |
boolean | const hello = true |
boolean | const hello = false |
array | const hello = [] |
Given a reference for 'hello service' of type 'value'
And a definition with 'hello service'
When creating a microservice
Then exception will occur: definition has a method but the reference is not a function.
`,
(helloService) => {
const service = {
definition: {
...baseServiceDefinition,
methods: {
hello: {
asyncModel: ASYNC_MODEL_TYPES.REQUEST_RESPONSE,
},
},
},
reference: helloService,
};
expect.assertions(1);
try {
// @ts-ignore
createMS({ services: [service] });
} catch (error) {
expect(error.message).toMatch(getInvalidServiceReferenceError(baseServiceDefinition.serviceName));
}
}
);
test.each([null, 'hello', 3, true, false, []])(
`
Scenario: Testing service reference method format
type | value |
null | const hello = null |
undefined |const hello = undefined |
string | const hello = 'hello' |
number | const hello = 3 |
boolean | const hello = true |
boolean | const hello = false |
array | const hello = [] |
Given a reference for 'hello service' with hello method of type 'value'
And a definition with 'hello service'
When creating a microservice
Then exception will occur: definition has a method but the reference is not a function.
`,
(hello) => {
const service = {
definition: {
...baseServiceDefinition,
methods: {
hello: {
asyncModel: ASYNC_MODEL_TYPES.REQUEST_RESPONSE,
},
},
},
reference: { hello },
};
expect.assertions(1);
try {
// @ts-ignore
createMS({ services: [service] });
} catch (error) {
expect(error.message).toMatch(getInvalidMethodReferenceError(qualifier));
}
}
);
// @ts-ignore
test.each(['test', '', 0, 1, true, false, -100, 10, 10.1, {}, null])(
`
Scenario: services not of type array
Given a 'service'
And using it as 'service'.
When creating a Microservice from the 'service'
Then exception will occur.
|definition | value
|string | 'test'
|empty string | ''
|number false | 0
|number true | 1
|boolean | true
|boolean | false
|negative number | -100
|positive number | 10
|double | 10.1
|object | {}
|null | null
Then invalid service error will occur
`,
(services) => {
expect.assertions(1);
try {
// @ts-ignore
createMS({ services });
} catch (error) {
expect(error.message).toMatch(SERVICES_IS_NOT_ARRAY);
}
}
);
// @ts-ignore
test.each([null, undefined])(
`
Scenario: service is null or undefined
Given a 'service'
And using it as 'service'.
When creating a Microservice from the 'service'
Then exception will occur.
|service | value
|null | null
|undefined | undefined
Then invalid service error will occur
`,
(service) => {
expect.assertions(1);
try {
// @ts-ignore
createMS({ services: [service] });
} catch (error) {
expect(error.message).toMatch(SERVICE_IS_NOT_OBJECT);
}
}
);
// @ts-ignore
test.each([null, undefined])(
`
Scenario: microservice options is null or undefined
Given a 'microserviceOptions'
And using it as 'microserviceOptions'.
When creating a Microservice from the 'microserviceOptions'
Then no error thrown and microservice instantiates with default options
|microserviceOptions | value
|null | null
|undefined | undefined
Then invalid service error will occur
`,
(microserviceOptions) => {
expect.assertions(1);
expect(() => createMS(microserviceOptions)).not.toThrow();
}
);
// @ts-ignore
test.each([[], false, true, 10, null, Symbol()])(
`
Scenario: microservise option with invalid seedAddress value
Given a 'microserviceOptions'
And seedAddress has invalid value
|definition | value
|array | []
|boolean | false
|boolean | true
|number | 10
|null | null
|symbol | Symbol()
When creating a microservice
Then exception will occur: seed address should be non empty string
`,
(seedAddress) => {
expect.assertions(1);
try {
// @ts-ignore
createMS({ services: [], seedAddress });
} catch (error) {
expect(error.message).toMatch(constants.NOT_VALID_ADDRESS);
}
}
);
});
// TODO : silent failing scenario | the_stack |
import { XPortalService, XPortalOverlayRef, XPortalConnectedPosition } from '@ng-nest/ui/portal';
import { Subject } from 'rxjs';
import {
Component,
OnInit,
ViewEncapsulation,
ChangeDetectionStrategy,
ChangeDetectorRef,
Renderer2,
ElementRef,
OnChanges,
ViewContainerRef,
ViewChild,
SimpleChanges
} from '@angular/core';
import { XDatePickerModelType, XDateRangePrefix, XDateRangeProperty } from './date-picker.property';
import {
XIsEmpty,
XIsDate,
XIsNumber,
XIsChange,
XCorner,
XClearClass,
XIsString,
XConfigService,
XIsUndefined,
XBoolean
} from '@ng-nest/ui/core';
import { XInputComponent, XInputGroupComponent } from '@ng-nest/ui/input';
import { DatePipe } from '@angular/common';
import { Overlay, OverlayConfig, FlexibleConnectedPositionStrategy, ConnectedOverlayPositionChange } from '@angular/cdk/overlay';
import { takeUntil } from 'rxjs/operators';
import { XValueAccessor } from '@ng-nest/ui/base-form';
import { XDateRangePortalComponent } from './date-range-portal.component';
@Component({
selector: `${XDateRangePrefix}`,
templateUrl: './date-range.component.html',
styleUrls: ['./date-range.component.scss'],
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush,
providers: [XValueAccessor(XDateRangeComponent), DatePipe]
})
export class XDateRangeComponent extends XDateRangeProperty implements OnInit, OnChanges {
@ViewChild('dateRange', { static: true }) dateRange!: ElementRef;
@ViewChild('inputGroup', { static: true }) inputGroup!: XInputGroupComponent;
@ViewChild('inputStartCom', { static: true }) inputStartCom!: XInputComponent;
@ViewChild('inputEndCom', { static: true }) inputEndCom!: XInputComponent;
modelType: XDatePickerModelType = 'date';
numberValue!: number[];
isInput = false;
get getRequired() {
return this.required && XIsEmpty(this.value);
}
override writeValue(value: any) {
if (XIsUndefined(value)) value = [];
if (value.length > 0) {
if (XIsDate(value[0])) {
this.modelType = 'date';
this.numberValue = value.getTime();
} else if (XIsNumber(value[0])) {
this.modelType = 'number';
this.numberValue = value;
} else if (XIsString(value[0])) {
this.modelType = 'string';
const valueTime = value.map((x: string) => new Date(x).getTime());
this.numberValue = !isNaN(valueTime[0]) ? valueTime : [];
}
}
this.value = value;
this.setDisplayValue(this.numberValue);
this.valueChange.next(this.numberValue);
this.cdr.detectChanges();
}
enter: boolean = false;
inputClearable: boolean = false;
animating = false;
displayValue: string[] = [];
portal!: XPortalOverlayRef<XDateRangePortalComponent>;
icon: string = 'fto-calendar';
box!: DOMRect;
protalHeight!: number;
maxNodes: number = 6;
protalTobottom: boolean = true;
valueChange: Subject<any> = new Subject();
dataChange: Subject<any> = new Subject();
positionChange: Subject<any> = new Subject();
inputActiveChange: Subject<string> = new Subject();
closeSubject: Subject<void> = new Subject();
startDisplay: string | number = '';
endDisplay: string | number = '';
startActive: XBoolean = false;
endActive: XBoolean = false;
activeType?: 'start' | 'end';
private _unSubject = new Subject<void>();
constructor(
public renderer: Renderer2,
public configService: XConfigService,
private cdr: ChangeDetectorRef,
private portalService: XPortalService,
private viewContainerRef: ViewContainerRef,
private datePipe: DatePipe,
private overlay: Overlay
) {
super();
}
ngOnInit() {
this.setFlex(this.dateRange.nativeElement, this.renderer, this.justify, this.align, this.direction);
this.setFormat();
this.setClassMap();
this.setSubject();
}
ngAfterViewInit() {
this.setPortal();
}
ngOnChanges(changes: SimpleChanges): void {
const { type } = changes;
if (XIsChange(type)) {
this.setFormat();
this.setDisplayValue(this.numberValue);
}
}
ngOnDestroy(): void {
this._unSubject.next();
this._unSubject.unsubscribe();
}
setSubject() {
this.closeSubject.pipe(takeUntil(this._unSubject)).subscribe(() => {
this.closePortal();
});
}
setFormat() {
if (this.format !== 'yyyy-MM-dd') return;
if (this.type === 'date') {
this.format = 'yyyy-MM-dd';
} else if (this.type === 'year') {
this.format = 'yyyy';
} else if (this.type === 'month') {
this.format = 'yyyy-MM';
} else if (this.type === 'date-time') {
this.format = 'yyyy-MM-dd HH:mm:ss';
} else if (this.type === 'date-hour') {
this.format = 'yyyy-MM-dd HH';
} else if (this.type === 'date-minute') {
this.format = 'yyyy-MM-dd HH:mm';
}
}
menter() {
if (this.disabled) return;
this.enter = true;
if (!XIsEmpty(this.numberValue)) {
this.icon = '';
this.inputClearable = true;
this.cdr.detectChanges();
}
}
mleave() {
if (this.disabled) return;
this.enter = false;
if (this.inputClearable) {
this.icon = 'fto-calendar';
this.inputClearable = false;
this.cdr.detectChanges();
}
}
clearEmit() {
this.value = [];
this.numberValue = [];
this.displayValue = [];
this.mleave();
this.valueChange.next(this.numberValue);
this.modelChange();
}
modelChange() {
if (this.onChange) {
this.onChange(this.getValue());
}
}
getValue() {
return this.modelType === 'date'
? this.numberValue.map((x) => new Date(x))
: this.modelType === 'string'
? this.numberValue.map((x) => this.datePipe.transform(x, this.format))
: this.numberValue;
}
portalAttached() {
return this.portal?.overlayRef?.hasAttached();
}
closePortal() {
if (this.portalAttached()) {
this.portal?.overlayRef?.detach();
this.active = false;
this.startActive = false;
this.endActive = false;
this.cdr.detectChanges();
return true;
}
return false;
}
destroyPortal() {
this.portal?.overlayRef?.dispose();
}
showPortal($event: Event, type?: 'start' | 'end') {
type && $event.stopPropagation();
if (this.disabled || this.animating) return;
this.startActive = type === 'start';
this.endActive = type === 'end';
this.activeType = type;
this.activeType && this.inputActiveChange.next(this.activeType);
if (this.active) return;
this.active = true;
const config: OverlayConfig = {
backdropClass: '',
positionStrategy: this.setPlacement(),
scrollStrategy: this.overlay.scrollStrategies.reposition()
};
this.setPosition(config);
this.portal = this.portalService.attach({
content: XDateRangePortalComponent,
viewContainerRef: this.viewContainerRef,
overlayConfig: config
});
this.portal.overlayRef
?.outsidePointerEvents()
.pipe(takeUntil(this._unSubject))
.subscribe((event: MouseEvent) => {
const clickTarget = event.target as HTMLElement;
if (clickTarget !== this.inputStartCom.inputRef.nativeElement && clickTarget !== this.inputEndCom.inputRef.nativeElement) {
this.setDisplayValue(this.numberValue);
this.closeSubject.next();
}
});
this.setInstance();
}
setPosition(config: OverlayConfig) {
let position = config.positionStrategy as FlexibleConnectedPositionStrategy;
position.positionChanges.pipe(takeUntil(this._unSubject)).subscribe((pos: ConnectedOverlayPositionChange) => {
const place = XPortalConnectedPosition.get(pos.connectionPair) as XCorner;
place !== this.placement && this.positionChange.next(place);
});
}
setInstance() {
let componentRef = this.portal?.componentRef;
if (!componentRef) return;
Object.assign(componentRef.instance, {
type: this.type,
value: this.numberValue,
placement: this.placement,
preset: this.preset,
valueChange: this.valueChange,
positionChange: this.positionChange,
inputActiveChange: this.inputActiveChange,
activeType: this.activeType,
closePortal: () => this.closeSubject.next(),
destroyPortal: () => this.destroyPortal(),
nodeEmit: (dates: Date[], sure = true) => this.onNodeClick(dates, sure),
startNodeEmit: (node: Date) => this.startNodeClick(node),
endNodeEmit: (node: Date) => this.endNodeClick(node),
animating: (ing: boolean) => (this.animating = ing)
});
componentRef.changeDetectorRef.detectChanges();
}
startNodeClick(node: Date) {
this.startDisplay = this.datePipe.transform(node, this.format) as string;
this.startActive = false;
this.endActive = true;
this.inputEndCom.inputFocus('after');
this.cdr.detectChanges();
}
endNodeClick(node: Date) {
this.endDisplay = this.datePipe.transform(node, this.format) as string;
this.cdr.detectChanges();
}
onNodeClick(dates: Date[], sure = true) {
this.isInput = false;
if (sure) {
this.numberValue = dates.map((x) => x.getTime());
this.value = this.getValue();
this.setDisplayValue(this.numberValue);
this.closeSubject.next();
this.modelChange();
this.nodeEmit.emit(this.numberValue);
} else {
this.setDisplayValue(dates.map((x) => x.getTime()));
this.cdr.markForCheck();
}
}
onInput() {
this.isInput = true;
}
setDisplayValue(dateNumber: number[]) {
// if (this.isInput && isNaN(this.startDisplay) && !isNaN(Date.parse(this.displayValue))) {
// this.displayValue = this.datePipe.transform(this.displayValue, this.format);
// this.numberValue = new Date(this.displayValue).getTime();
// this.value = this.getValue();
// this.modelChange();
// this.isInput = false;
// } else {
if (!dateNumber) return;
this.displayValue = dateNumber.map((x) => this.datePipe.transform(x, this.format)) as string[];
// }
}
setPlacement() {
return this.portalService.setPlacement({
elementRef: this.inputGroup.elementRef,
placement: [this.placement as XCorner, 'bottom-start', 'bottom-end', 'top-start', 'top-end'],
transformOriginOn: 'x-date-range-portal'
});
}
setPortal() {
this.portalAttached() && this.portal?.overlayRef?.updatePositionStrategy(this.setPlacement());
}
setClassMap() {
XClearClass(this.labelMap);
this.labelMap[`x-text-align-${this.labelAlign}`] = this.labelAlign ? true : false;
}
formControlChanges() {
this.ngOnInit();
this.cdr.detectChanges();
}
} | the_stack |
import {
assertEquals,
assertMatch,
assertThrows,
} from "https://deno.land/std@0.53.0/testing/asserts.ts";
import { DB, Status } from "./mod.ts";
import { SqliteError } from "./src/error.ts";
// file used for fs io tests
const testDbFile = "test.db";
const permRead =
(await Deno.permissions.query({ name: "read", path: "./" })).state ===
"granted";
const permWrite =
(await Deno.permissions.query({ name: "write", path: "./" })).state ===
"granted";
async function removeTestDb(name: string) {
try {
await Deno.remove(name);
} catch { /* no op */ }
try {
await Deno.remove(`${testDbFile}-journal`);
} catch { /* no op */ }
}
async function dbExists(path: string) {
try {
await Deno.stat(path);
return true;
} catch {
return false;
}
}
Deno.test("README example", function () {
// Open a database (no file permission version of open)
const db = new DB();
db.query(
"CREATE TABLE IF NOT EXISTS people (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)",
);
const name =
["Peter Parker", "Clark Kent", "Bruce Wane"][Math.floor(Math.random() * 3)];
// Run a simple query
db.query("INSERT INTO people (name) VALUES (?)", [name]);
// Print out data in table
for (const [_name] of db.query("SELECT name FROM people")) continue; // no console.log ;)
db.close();
});
Deno.test("old README example", function () {
const db = new DB();
const first = ["Bruce", "Clark", "Peter"];
const last = ["Wane", "Kent", "Parker"];
db.query(
"CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT, email TEXT, subscribed INTEGER)",
);
for (let i = 0; i < 100; i++) {
const name = `${first[Math.floor(Math.random() * first.length)]} ${
last[
Math.floor(
Math.random() * last.length,
)
]
}`;
const email = `${name.replace(" ", "-")}@deno.land`;
const subscribed = Math.random() > 0.5 ? true : false;
db.query("INSERT INTO users (name, email, subscribed) VALUES (?, ?, ?)", [
name,
email,
subscribed,
]);
}
for (
const [
name,
email,
] of db.query<[string, string]>(
"SELECT name, email FROM users WHERE subscribed = ? LIMIT 100",
[true],
)
) {
assertMatch(name, /(Bruce|Clark|Peter) (Wane|Kent|Parker)/);
assertEquals(email, `${name.replace(" ", "-")}@deno.land`);
}
const res = db.query("SELECT email FROM users WHERE name LIKE ?", [
"Robert Parr",
]);
assertEquals(res, []);
// Omit write tests, as we don't want to require ---allow-write
// and have a write test, which checks for the flag and skips itself.
const subscribers = db.query(
"SELECT name, email FROM users WHERE subscribed = ?",
[true],
);
for (const [_name, _email] of subscribers) {
if (Math.random() > 0.5) continue;
break;
}
db.close();
});
Deno.test("bind values", function () {
const db = new DB();
let vals, rows;
// string
db.query(
"CREATE TABLE strings (id INTEGER PRIMARY KEY AUTOINCREMENT, val TEXT)",
);
vals = ["Hello World!", "I love Deno.", "Täst strüng..."];
for (const val of vals) {
db.query("INSERT INTO strings (val) VALUES (?)", [val]);
}
rows = [...db.query("SELECT val FROM strings")].map(([v]) => v);
assertEquals(rows.length, vals.length);
assertEquals(rows, vals);
// integer
db.query(
"CREATE TABLE ints (id INTEGER PRIMARY KEY AUTOINCREMENT, val INTEGER)",
);
vals = [42, 1, 2, 3, 4, 3453246, 4536787093, 45536787093];
for (const val of vals) db.query("INSERT INTO ints (val) VALUES (?)", [val]);
rows = [...db.query("SELECT val FROM ints")].map(([v]) => v);
assertEquals(rows.length, vals.length);
assertEquals(rows, vals);
// float
db.query(
"CREATE TABLE floats (id INTEGER PRIMARY KEY AUTOINCREMENT, val REAL)",
);
vals = [42.1, 1.235, 2.999, 1 / 3, 4.2345, 345.3246, 4536787.953e-8];
for (const val of vals) {
db.query("INSERT INTO floats (val) VALUES (?)", [val]);
}
rows = [...db.query("SELECT val FROM floats")].map(([v]) => v);
assertEquals(rows.length, vals.length);
assertEquals(rows, vals);
// boolean
db.query(
"CREATE TABLE bools (id INTEGER PRIMARY KEY AUTOINCREMENT, val INTEGER)",
);
vals = [true, false];
for (const val of vals) {
db.query(
"INSERT INTO bools (val) VALUES (?)",
[val],
);
}
rows = [...db.query("SELECT val FROM bools")].map(([v]) => v);
assertEquals(rows.length, vals.length);
assertEquals(rows, [1, 0]);
// date
db.query("CREATE TABLE dates (date TEXT NOT NULL)");
vals = [new Date(), new Date("2018-11-20"), new Date(123456789)];
for (const val of vals) {
db.query("INSERT INTO dates (date) VALUES (?)", [val]);
}
rows = db.query<[string]>("SELECT date FROM dates").map(([d]) => new Date(d));
assertEquals(rows, vals);
// blob
db.query(
"CREATE TABLE blobs (id INTEGER PRIMARY KEY AUTOINCREMENT, val BLOB)",
);
vals = [
new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 0]),
new Uint8Array([3, 57, 45]),
];
for (const val of vals) {
db.query(
"INSERT INTO blobs (val) VALUES (?)",
[val],
);
}
rows = [...db.query("SELECT val FROM blobs")].map(([v]) => v);
assertEquals(rows.length, vals.length);
assertEquals(rows, vals);
// big int
db.query(
"CREATE TABLE bigints (id INTEGER PRIMARY KEY AUTOINCREMENT, val INTEGER)",
);
const intVals: (bigint | number)[] = [9007199254741991n, 100n];
for (const val of intVals) {
db.query(
"INSERT INTO bigints (val) VALUES (?)",
[val],
);
}
rows = [...db.query("SELECT val FROM bigints")].map(([v]) => v);
intVals[1] = 100;
assertEquals(rows, intVals);
// null & undefined
db.query(
"CREATE TABLE nulls (id INTEGER PRIMARY KEY AUTOINCREMENT, val INTEGER)",
);
vals = [null, undefined];
for (const val of vals) {
db.query(
"INSERT INTO nulls (val) VALUES (?)",
[val],
);
}
rows = [...db.query("SELECT val FROM nulls")].map(([v]) => v);
assertEquals(rows.length, vals.length);
assertEquals(rows, [null, null]);
// mixed
db.query(
"CREATE TABLE mix (id INTEGER PRIMARY KEY AUTOINCREMENT, val1 INTEGER, val2 TEXT, val3 REAL, val4 TEXT)",
);
vals = [42, "Hello World!", 0.33333, null];
db.query(
"INSERT INTO mix (val1, val2, val3, val4) VALUES (?, ?, ?, ?)",
vals,
);
rows = [...db.query("SELECT val1, val2, val3, val4 FROM mix")];
assertEquals(rows.length, 1);
assertEquals(rows[0], vals);
// too many
assertThrows(() => {
db.query("SELECT * FROM strings", [null]);
});
assertThrows(() => {
db.query("SELECT * FROM strings LIMIT ?", [5, "extra"]);
});
// too few
assertThrows(() => {
db.query("SELECT * FROM strings LIMIT ?");
});
assertThrows(() => {
db.query(
"SELECT * FROM mix WHERE val1 = ? AND val2 = ? AND val3 = ? LIMIT ?",
[
1,
"second",
],
);
});
// omitted is null
db.query(
"CREATE TABLE omit_is_null (idx INTEGER PRIMARY KEY AUTOINCREMENT, val TEXT)",
);
db.query("INSERT INTO omit_is_null (val) VALUES (?)");
rows = [...db.query("SELECT val FROM omit_is_null")].map(([val]) => val);
assertEquals(rows, [null]);
db.close();
});
Deno.test("bind named parameters", function () {
const db = new DB();
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, val TEXT)",
);
// default named syntax
db.query("INSERT INTO test (val) VALUES (:val)", { val: "value" });
db.query(
"INSERT INTO test (val) VALUES (:otherVal)",
{ otherVal: "value other" },
);
// @ named syntax
db.query(
"INSERT INTO test (val) VALUES (@someName)",
{ ["@someName"]: "@value" },
);
// $ names syntax
db.query(
"INSERT INTO test (val) VALUES ($var::Name)",
{ ["$var::Name"]: "$value" },
);
// explicit positional syntax
db.query("INSERT INTO test (id, val) VALUES (?2, ?1)", ["this-is-it", 1000]);
// names must exist
assertThrows(() => {
db.query(
"INSERT INTO test (val) VALUES (:val)",
{ Val: "miss-spelled :(" },
);
});
// Make sure the data came through correctly
const vals = [...db.query("SELECT val FROM test ORDER BY id ASC")].map(
(row) => row[0],
);
assertEquals(
vals,
["value", "value other", "@value", "$value", "this-is-it"],
);
db.close();
});
Deno.test("query from prepared query", function () {
const db = new DB();
db.query("CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT)");
db.query("INSERT INTO test (id) VALUES (1), (2), (3)");
const res = [];
const query = db.prepareQuery<[number]>("SELECT id FROM test");
for (const [id] of query.iter()) {
res.push(id);
}
assertEquals(res, [1, 2, 3]);
query.finalize();
db.close();
});
Deno.test("query all from prepared query", function () {
const db = new DB();
db.query("CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT)");
const query = db.prepareQuery("SELECT id FROM test");
assertEquals(query.all(), []);
db.query("INSERT INTO test (id) VALUES (1), (2), (3)");
assertEquals(query.all(), [[1], [2], [3]]);
query.finalize();
db.close();
});
Deno.test("query one from prepared query", function () {
const db = new DB();
db.query("CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT)");
db.query("INSERT INTO test (id) VALUES (1), (2), (3)");
const queryOne = db.prepareQuery<[number]>(
"SELECT id FROM test WHERE id = ?",
);
assertEquals(queryOne.one([2]), [2]);
queryOne.finalize();
const queryAll = db.prepareQuery("SELECT id FROM test");
assertThrows(() => queryAll.one());
queryAll.finalize();
db.close();
});
Deno.test("execute from prepared query", function () {
const db = new DB();
db.query("CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT)");
const insert = db.prepareQuery("INSERT INTO test (id) VALUES (:id)");
for (const id of [1, 2, 3]) {
insert.execute({ id });
}
insert.finalize();
assertEquals(db.query("SELECT id FROM test"), [[1], [2], [3]]);
db.close();
});
Deno.test("blobs are copies", function () {
const db = new DB();
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, val BLOB)",
);
const data = new Uint8Array([1, 2, 3, 4, 5]);
db.query("INSERT INTO test (val) VALUES (?)", [data]);
const [[a]] = db.query<[Uint8Array]>("SELECT val FROM test");
const [[b]] = db.query<[Uint8Array]>("SELECT val FROM test");
assertEquals(data, a);
assertEquals(data, b);
assertEquals(a, b);
a[0] = 100;
assertEquals(a[0], 100);
assertEquals(b[0], 1);
assertEquals(data[0], 1);
data[0] = 5;
const [[c]] = db.query<[Uint8Array]>("SELECT val FROM test");
assertEquals(c[0], 1);
db.close();
});
Deno.test({
name: "save to file",
ignore: !permRead || !permWrite,
fn: async function () {
const data = [
"Hello World!",
"Hello Deno!",
"JavaScript <3",
"This costs 0€!",
"Wéll, hällö thėrè¿",
];
// Ensure test file does not exist
await removeTestDb(testDbFile);
// Write data to db
const db = new DB(testDbFile);
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, val TEXT)",
);
for (const val of data) {
db.query("INSERT INTO test (val) VALUES (?)", [val]);
}
// Read db and check the data is restored
const db2 = await new DB(testDbFile);
for (const [id, val] of db2.query<[number, string]>("SELECT * FROM test")) {
assertEquals(data[id - 1], val);
}
// Clean up
await Deno.remove(testDbFile);
db.close();
db2.close();
},
});
Deno.test({
name: "temporary database",
ignore: !permRead || !permWrite,
fn: function () {
const data = [
"Hello World!",
"Hello Deno!",
"JavaScript <3",
"This costs 0€!",
"Wéll, hällö thėrè¿",
];
const db = new DB("");
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, val TEXT)",
);
for (const val of data) {
db.query("INSERT INTO test (val) VALUES (?)", [val]);
}
// Read db and check the data is restored
for (const [id, val] of db.query<[number, string]>("SELECT * FROM test")) {
assertEquals(data[id - 1], val);
}
db.close();
},
});
Deno.test("invalid SQL", function () {
const db = new DB();
const queries = [
"INSERT INTO does_not_exist (balance) VALUES (5)",
"this is not sql",
";;;",
];
for (const query of queries) assertThrows(() => db.query(query));
db.close();
});
Deno.test("foreign key constraints enabled", function () {
const db = new DB();
db.query("CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT)");
db.query(
"CREATE TABLE orders (id INTEGER PRIMARY KEY AUTOINCREMENT, user INTEGER, FOREIGN KEY(user) REFERENCES users(id))",
);
db.query("INSERT INTO users (id) VALUES (1)");
const [[id]] = db.query<[number]>("SELECT id FROM users");
// User must exist
assertThrows(() => {
db.query("INSERT INTO orders (user) VALUES (?)", [id + 1]);
});
db.query("INSERT INTO orders (user) VALUES (?)", [id]);
// Can't delete if that violates the constraint
assertThrows(() => {
db.query("DELETE FROM users WHERE id = ?", [id]);
});
// Now deleting is OK
db.query("DELETE FROM orders WHERE user = ?", [id]);
db.query("DELETE FROM users WHERE id = ?", [id]);
db.close();
});
Deno.test("close database", function () {
const db = new DB();
db.close();
assertThrows(() => db.query("CREATE TABLE test (name TEXT PRIMARY KEY)"));
db.close(); // check close is idempotent and won't throw
});
Deno.test("open queries block close", function () {
const db = new DB();
db.query("CREATE TABLE test (name TEXT PRIMARY KEY)");
const query = db.prepareQuery("SELECT name FROM test");
assertThrows(() => db.close());
query.finalize();
db.close();
});
Deno.test("open queries cleaned up by forced close", function () {
const db = new DB();
db.query("CREATE TABLE test (name TEXT PRIMARY KEY)");
db.query("INSERT INTO test (name) VALUES (?)", ["Deno"]);
db.prepareQuery("SELECT name FROM test WHERE name like '%test%'");
assertThrows(() => db.close());
db.close(true);
});
Deno.test("constraint error code is correct", function () {
const db = new DB();
db.query("CREATE TABLE test (name TEXT PRIMARY KEY)");
db.query("INSERT INTO test (name) VALUES (?)", ["A"]);
const e = assertThrows(() =>
db.query("INSERT INTO test (name) VALUES (?)", ["A"])
) as SqliteError;
assertEquals(e.code, Status.SqliteConstraint, "Got wrong error code");
assertEquals(
Status[e.codeName],
Status.SqliteConstraint,
"Got wrong error code name",
);
});
Deno.test("syntax error code is correct", function () {
const db = new DB();
const e = assertThrows(() =>
db.query("CREATE TABLEX test (name TEXT PRIMARY KEY)")
) as SqliteError;
assertEquals(e.code, Status.SqliteError, "Got wrong error code");
assertEquals(
Status[e.codeName],
Status.SqliteError,
"Got wrong error code name",
);
});
Deno.test("invalid binding throws", function () {
const db = new DB();
db.query("CREATE TABLE test (id INTEGER)");
assertThrows(() => {
// deno-lint-ignore no-explicit-any
const badBinding: any = [{}];
db.query("SELECT * FORM test WHERE id = ?", badBinding);
});
db.close();
});
Deno.test("invalid bind does not leak statements", function () {
const db = new DB();
db.query("CREATE TABLE test (id INTEGER)");
for (let n = 0; n < 100; n++) {
assertThrows(() => {
// deno-lint-ignore no-explicit-any
const badBinding: any = [{}];
db.query("INSERT INTO test (id) VALUES (?)", badBinding);
});
assertThrows(() => {
const badBinding = { missingKey: null };
db.query("INSERT INTO test (id) VALUES (?)", badBinding);
});
}
db.query("INSERT INTO test (id) VALUES (1)");
db.close();
});
Deno.test("get columns from select query", function () {
const db = new DB();
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)",
);
const query = db.prepareQuery("SELECT id, name from test");
assertEquals(query.columns(), [
{ name: "id", originName: "id", tableName: "test" },
{ name: "name", originName: "name", tableName: "test" },
]);
});
Deno.test("get columns from returning query", function () {
const db = new DB();
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)",
);
const query = db.prepareQuery(
"INSERT INTO test (name) VALUES (?) RETURNING *",
);
assertEquals(query.columns(), [
{ name: "id", originName: "id", tableName: "test" },
{ name: "name", originName: "name", tableName: "test" },
]);
assertEquals(query.all(["name"]), [[1, "name"]]);
});
Deno.test("get columns with renamed column", function () {
const db = new DB();
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)",
);
db.query("INSERT INTO test (name) VALUES (?)", ["name"]);
const query = db.prepareQuery(
"SELECT id AS test_id, name AS test_name from test",
);
const columns = query.columns();
assertEquals(columns, [
{ name: "test_id", originName: "id", tableName: "test" },
{ name: "test_name", originName: "name", tableName: "test" },
]);
});
Deno.test("get columns from finalized query throws", function () {
const db = new DB();
db.query("CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT)");
const query = db.prepareQuery("SELECT id from test");
query.finalize();
// after iteration is done
assertThrows(() => {
query.columns();
});
});
Deno.test("date time is correct", function () {
const db = new DB();
// the date/ time is passed from JS and should be current (note that it is GMT)
const [[now]] = [...db.query("SELECT current_timestamp")];
assertEquals(new Date(now + "Z"), new Date());
db.close();
});
Deno.test("last inserted id", function () {
const db = new DB();
// By default, lastInsertRowId must be 0
assertEquals(db.lastInsertRowId, 0);
// Create table and insert value
db.query("CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT)");
const insertRowIds = [];
// Insert data to table and collect their ids
for (let i = 0; i < 10; i++) {
db.query("INSERT INTO users (name) VALUES ('John Doe')");
insertRowIds.push(db.lastInsertRowId);
}
// Now, the last inserted row id must be 10
assertEquals(db.lastInsertRowId, 10);
// All collected row ids must be the same as in the database
assertEquals(
insertRowIds,
[...db.query("SELECT id FROM users")].map(([i]) => i),
);
db.close();
// When the database is closed, the value
// will be reset to 0 again
assertEquals(db.lastInsertRowId, 0);
});
Deno.test("changes is correct", function () {
const db = new DB();
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)",
);
for (const name of ["a", "b", "c"]) {
db.query("INSERT INTO test (name) VALUES (?)", [name]);
assertEquals(1, db.changes);
}
db.query("UPDATE test SET name = ?", ["new name"]);
assertEquals(3, db.changes);
assertEquals(6, db.totalChanges);
});
Deno.test("json functions exist", function () {
const db = new DB();
// The JSON1 functions should exist and we should be able to call them without unexpected errors
db.query(`SELECT json('{"this is": ["json"]}')`);
// We should expect an error if we pass invalid JSON where valid JSON is expected
assertThrows(() => {
db.query(`SELECT json('this is not json')`);
});
// We should be able to use bound values as arguments to the JSON1 functions,
// and they should produce the expected results for these simple expressions.
const [[objectType]] = db.query(`SELECT json_type('{}')`);
assertEquals(objectType, "object");
const [[integerType]] = db.query(`SELECT json_type(?)`, ["2"]);
assertEquals(integerType, "integer");
const [[realType]] = db.query(`SELECT json_type(?)`, ["2.5"]);
assertEquals(realType, "real");
const [[stringType]] = db.query(`SELECT json_type(?)`, [`"hello"`]);
assertEquals(stringType, "text");
const [[integerTypeAtPath]] = db.query(
`SELECT json_type(?, ?)`,
[`["hello", 2, {"world": 4}]`, `$[2].world`],
);
assertEquals(integerTypeAtPath, "integer");
});
Deno.test("very large numbers", function () {
const db = new DB();
db.query("CREATE TABLE numbers (id INTEGER PRIMARY KEY, number REAL)");
db.query("INSERT INTO numbers (number) VALUES (?)", [+Infinity]);
db.query("INSERT INTO numbers (number) VALUES (?)", [-Infinity]);
db.query("INSERT INTO numbers (number) VALUES (?)", [+20e20]);
db.query("INSERT INTO numbers (number) VALUES (?)", [-20e20]);
const [
[positiveInfinity],
[negativeInfinity],
[positiveTwentyTwenty],
[negativeTwentyTwenty],
] = db.query("SELECT number FROM numbers");
assertEquals(negativeInfinity, -Infinity);
assertEquals(positiveInfinity, +Infinity);
assertEquals(positiveTwentyTwenty, +20e20);
assertEquals(negativeTwentyTwenty, -20e20);
});
Deno.test({
name: "database larger than 2GB",
ignore: !permRead || !permWrite || !(await dbExists("./build/2GB_test.db")),
fn: function () {
const db = new DB("./build/2GB_test.db"); // can be generated with `cd build && make testdb`
db.query("INSERT INTO test (value) VALUES (?)", ["This is a test..."]);
const rows = [
...db.query("SELECT value FROM test ORDER BY id DESC LIMIT 10"),
];
assertEquals(rows.length, 10);
assertEquals(rows[0][0], "This is a test...");
db.close();
},
});
Deno.test("empty query returns empty array", function () {
const db = new DB();
db.query("CREATE TABLE test (id INTEGER PRIMARY KEY)");
assertEquals([], db.query("SELECT * FROM test"));
db.close();
});
Deno.test("prepared query can be reused", function () {
const db = new DB();
db.query("CREATE TABLE test (id INTEGER PRIMARY KEY)");
const query = db.prepareQuery("INSERT INTO test (id) VALUES (?)");
query.execute([1]);
query.execute([2]);
query.execute([3]);
assertEquals([[1], [2], [3]], db.query("SELECT id FROM test"));
query.finalize();
db.close();
});
Deno.test("prepared query clears bindings before reused", function () {
const db = new DB();
db.query("CREATE TABLE test (id INTEGER PRIMARY KEY, value INTEGER)");
const query = db.prepareQuery("INSERT INTO test (value) VALUES (?)");
query.execute([1]);
query.execute();
assertEquals([[1], [null]], db.query("SELECT value FROM test"));
query.finalize();
db.close();
});
Deno.test("big integers bind correctly", function () {
const db = new DB();
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, val INTEGER)",
);
const goodValues = [
0n,
42n,
-42n,
9223372036854775807n,
-9223372036854775808n,
];
const overflowValues = [
9223372036854775807n + 1n,
-9223372036854775808n - 1n,
2352359223372036854775807n,
-32453249223372036854775807n,
];
const query = db.prepareQuery("INSERT INTO test (val) VALUES (?)");
for (const val of goodValues) {
query.execute([val]);
}
const dbValues = db.query<[number | bigint]>(
"SELECT val FROM test ORDER BY id",
).map((
[id],
) => BigInt(id));
assertEquals(goodValues, dbValues);
for (const bigVal of overflowValues) {
assertThrows(() => {
query.execute([bigVal]);
});
}
query.finalize();
db.close();
});
Deno.test("using finalized prepared query throws", function () {
const db = new DB();
db.query("CREATE TABLE test (name TEXT)");
const query = db.prepareQuery("INSERT INTO test (name) VALUES (?)");
query.finalize();
assertThrows(() => query.execute(["test"]));
db.close();
});
Deno.test("columns can be obtained from empty prepared query", function () {
const db = new DB();
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEST, age INTEGER)",
);
db.query("INSERT INTO test (name, age) VALUES (?, ?)", ["Peter Parker", 21]);
const query = db.prepareQuery("SELECT * FROM test");
const columnsFromPreparedQuery = query.columns();
query.finalize();
const queryEmpty = db.prepareQuery("SELECT * FROM test WHERE 1 = 0");
const columnsFromPreparedQueryWithEmptyQuery = queryEmpty.columns();
assertEquals(queryEmpty.all(), []);
query.finalize();
assertEquals(
[{ name: "id", originName: "id", tableName: "test" }, {
name: "name",
originName: "name",
tableName: "test",
}, { name: "age", originName: "age", tableName: "test" }],
columnsFromPreparedQuery,
);
assertEquals(
columnsFromPreparedQueryWithEmptyQuery,
columnsFromPreparedQuery,
);
});
Deno.test("SQL localtime reflects system locale", function () {
const db = new DB();
const [[timeDb]] = db.query("SELECT datetime('now', 'localtime')");
const now = new Date();
const jsMonth = `${now.getMonth() + 1}`.padStart(2, "0");
const jsDate = `${now.getDate()}`.padStart(2, "0");
const jsHour = `${now.getHours()}`.padStart(2, "0");
const jsMinute = `${now.getMinutes()}`.padStart(2, "0");
const jsSecond = `${now.getSeconds()}`.padStart(2, "0");
const timeJs =
`${now.getFullYear()}-${jsMonth}-${jsDate} ${jsHour}:${jsMinute}:${jsSecond}`;
assertEquals(timeDb, timeJs);
});
Deno.test("object query functions work correctly", function () {
const db = new DB();
db.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL, height REAL)",
);
const rowsOrig = [
{ id: 1, name: "Peter Parker", height: 1.5 },
{ id: 2, name: "Clark Kent", height: 1.9 },
{ id: 3, name: "Robert Paar", height: 2.1 },
];
const insertQuery = db.prepareQuery(
"INSERT INTO test (id, name, height) VALUES (:id, :name, :height)",
);
for (const row of rowsOrig) {
insertQuery.execute(row);
}
insertQuery.finalize();
const query = db.prepareQuery("SELECT * FROM test LIMIT ?");
assertEquals(rowsOrig, query.allEntries([rowsOrig.length]));
assertEquals(rowsOrig[0], query.oneEntry([1]));
const rowsIter = [];
for (const row of query.iterEntries([rowsOrig.length])) {
rowsIter.push(row);
}
assertEquals(rowsOrig, rowsIter);
assertEquals(rowsOrig, db.queryEntries("SELECT * FROM test"));
query.finalize();
db.close();
});
// Tests which drop the permission from read + write to read only
// and should run after any other test.
Deno.test({
name: "database open options",
ignore: !permRead || !permWrite,
fn: async function () {
await removeTestDb(testDbFile);
// when no file exists, these should error
assertThrows(() => new DB(testDbFile, { mode: "write" }));
assertThrows(() => new DB(testDbFile, { mode: "read" }));
// create the database
const dbCreate = new DB(testDbFile, { mode: "create" });
dbCreate.query(
"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL)",
);
dbCreate.close();
const dbWrite = new DB(testDbFile, { mode: "write" });
dbWrite.query("INSERT INTO test (name) VALUES (?)", ["open-options-test"]);
dbWrite.close();
const dbRead = new DB(testDbFile, { mode: "read" });
const rows = [...dbRead.query("SELECT id, name FROM test")];
assertEquals(rows, [[1, "open-options-test"]]);
assertThrows(() =>
dbRead.query("INTERT INTO test (name) VALUES (?)", ["this-fails"])
);
dbRead.close();
await Deno.permissions.revoke({ name: "write" });
assertThrows(() => new DB(testDbFile));
assertThrows(() => new DB(testDbFile, { mode: "create" }));
assertThrows(() => new DB(testDbFile, { mode: "write" }));
(new DB(testDbFile, { mode: "read" })).close();
// with memory flag set, the database will be in memory and
// not require any permissions
await Deno.permissions.revoke({ name: "read" });
assertThrows(() => new DB(testDbFile, { mode: "read" }));
(new DB(testDbFile, { memory: true })).close();
// the mode can also be specified via uri flag and setting the
// relevant parameter
(new DB(`file:${testDbFile}?mode=memory`, { uri: true })).close();
},
}); | the_stack |
import * as codepipeline from '@aws-cdk/aws-codepipeline';
import * as iam from '@aws-cdk/aws-iam';
import * as cdk from '@aws-cdk/core';
import { Construct } from 'constructs';
/**
* Options in common between both StackSet actions
*/
export interface CommonCloudFormationStackSetOptions {
/**
* The percentage of accounts per Region for which this stack operation can fail before AWS CloudFormation stops the operation in that Region. If
* the operation is stopped in a Region, AWS CloudFormation doesn't attempt the operation in subsequent Regions. When calculating the number
* of accounts based on the specified percentage, AWS CloudFormation rounds down to the next whole number.
*
* @default 0%
*/
readonly failureTolerancePercentage?: number;
/**
* The maximum percentage of accounts in which to perform this operation at one time. When calculating the number of accounts based on the specified
* percentage, AWS CloudFormation rounds down to the next whole number. If rounding down would result in zero, AWS CloudFormation sets the number as
* one instead. Although you use this setting to specify the maximum, for large deployments the actual number of accounts acted upon concurrently
* may be lower due to service throttling.
*
* @default 1%
*/
readonly maxAccountConcurrencyPercentage?: number;
/**
* The AWS Region the StackSet is in.
*
* Note that a cross-region Pipeline requires replication buckets to function correctly.
* You can provide their names with the `PipelineProps.crossRegionReplicationBuckets` property.
* If you don't, the CodePipeline Construct will create new Stacks in your CDK app containing those buckets,
* that you will need to `cdk deploy` before deploying the main, Pipeline-containing Stack.
*
* @default - same region as the Pipeline
*/
readonly stackSetRegion?: string;
}
/**
* The source of a StackSet template
*/
export abstract class StackSetTemplate {
/**
* Use a file in an artifact as Stack Template.
*/
public static fromArtifactPath(artifactPath: codepipeline.ArtifactPath): StackSetTemplate {
return new class extends StackSetTemplate {
public readonly _artifactsReferenced?: codepipeline.Artifact[] | undefined = [artifactPath.artifact];
public _render() {
return artifactPath.location;
}
}();
}
/**
* Which artifacts are referenced by this template
*
* Does not need to be called by app builders.
*
* @internal
*/
public abstract readonly _artifactsReferenced?: codepipeline.Artifact[] | undefined;
/**
* Render the template to the pipeline
*
* Does not need to be called by app builders.
*
* @internal
*/
public abstract _render(): any;
}
/**
* Where Stack Instances will be created from the StackSet
*/
export abstract class StackInstances {
/**
* Create stack instances in a set of accounts and regions passed as literal lists
*
* Stack Instances will be created in every combination of region and account.
*
* > NOTE: `StackInstances.inAccounts()` and `StackInstances.inOrganizationalUnits()`
* > have exactly the same behavior, and you can use them interchangeably if you want.
* > The only difference between them is that your code clearly indicates what entity
* > it's working with.
*/
public static inAccounts(accounts: string[], regions: string[]): StackInstances {
return StackInstances.fromList(accounts, regions);
}
/**
* Create stack instances in all accounts in a set of Organizational Units (OUs) and regions passed as literal lists
*
* If you want to deploy to Organization Units, you must choose have created the StackSet
* with `deploymentModel: DeploymentModel.organizations()`.
*
* Stack Instances will be created in every combination of region and account.
*
* > NOTE: `StackInstances.inAccounts()` and `StackInstances.inOrganizationalUnits()`
* > have exactly the same behavior, and you can use them interchangeably if you want.
* > The only difference between them is that your code clearly indicates what entity
* > it's working with.
*/
public static inOrganizationalUnits(ous: string[], regions: string[]): StackInstances {
return StackInstances.fromList(ous, regions);
}
/**
* Create stack instances in a set of accounts or organizational units taken from the pipeline artifacts, and a set of regions
*
* The file must be a JSON file containing a list of strings. For example:
*
* ```json
* [
* "111111111111",
* "222222222222",
* "333333333333"
* ]
* ```
*
* Stack Instances will be created in every combination of region and account, or region and
* Organizational Units (OUs).
*
* If this is set of Organizational Units, you must have selected `StackSetDeploymentModel.organizations()`
* as deployment model.
*/
public static fromArtifactPath(artifactPath: codepipeline.ArtifactPath, regions: string[]): StackInstances {
if (regions.length === 0) {
throw new Error("'regions' may not be an empty list");
}
return new class extends StackInstances {
public readonly _artifactsReferenced?: codepipeline.Artifact[] | undefined = [artifactPath.artifact];
public _bind(_scope: Construct): StackInstancesBindResult {
return {
stackSetConfiguration: {
DeploymentTargets: artifactPath.location,
Regions: regions.join(','),
},
};
}
}();
}
/**
* Create stack instances in a literal set of accounts or organizational units, and a set of regions
*
* Stack Instances will be created in every combination of region and account, or region and
* Organizational Units (OUs).
*
* If this is set of Organizational Units, you must have selected `StackSetDeploymentModel.organizations()`
* as deployment model.
*/
private static fromList(targets: string[], regions: string[]): StackInstances {
if (targets.length === 0) {
throw new Error("'targets' may not be an empty list");
}
if (regions.length === 0) {
throw new Error("'regions' may not be an empty list");
}
return new class extends StackInstances {
public _bind(_scope: Construct): StackInstancesBindResult {
return {
stackSetConfiguration: {
DeploymentTargets: targets.join(','),
Regions: regions.join(','),
},
};
}
}();
}
/**
* The artifacts referenced by the properties of this deployment target
*
* Does not need to be called by app builders.
*
* @internal
*/
readonly _artifactsReferenced?: codepipeline.Artifact[];
/**
* Called to attach the stack set instances to a stackset action
*
* Does not need to be called by app builders.
*
* @internal
*/
public abstract _bind(scope: Construct): StackInstancesBindResult;
}
/**
* Returned by the StackInstances.bind() function
*
* Does not need to be used by app builders.
*
* @internal
*/
export interface StackInstancesBindResult {
/**
* Properties to mix into the Action configuration
*/
readonly stackSetConfiguration: any;
}
/**
* Base parameters for the StackSet
*/
export abstract class StackSetParameters {
/**
* A list of template parameters for your stack set.
*
* You must specify all template parameters. Parameters you don't specify will revert
* to their `Default` values as specified in the template.
*
* Specify the names of parameters you want to retain their existing values,
* without specifying what those values are, in an array in the second
* argument to this function. Use of this feature is discouraged. CDK is for
* specifying desired-state infrastructure, and use of this feature makes the
* parameter values unmanaged.
*
* @example
*
* const parameters = codepipeline_actions.StackSetParameters.fromLiteral({
* BucketName: 'my-bucket',
* Asset1: 'true',
* });
*/
public static fromLiteral(parameters: Record<string, string>, usePreviousValues?: string[]): StackSetParameters {
return new class extends StackSetParameters {
public readonly _artifactsReferenced: codepipeline.Artifact[] = [];
_render(): string {
return [
...Object.entries(parameters).map(([key, value]) =>
`ParameterKey=${key},ParameterValue=${value}`),
...(usePreviousValues ?? []).map((key) =>
`ParameterKey=${key},UsePreviousValue=true`),
].join(' ');
}
}();
}
/**
* Read the parameters from a JSON file from one of the pipeline's artifacts
*
* The file needs to contain a list of `{ ParameterKey, ParameterValue, UsePreviousValue }` objects, like
* this:
*
* ```
* [
* {
* "ParameterKey": "BucketName",
* "ParameterValue": "my-bucket"
* },
* {
* "ParameterKey": "Asset1",
* "ParameterValue": "true"
* },
* {
* "ParameterKey": "Asset2",
* "UsePreviousValue": true
* }
* ]
* ```
*
* You must specify all template parameters. Parameters you don't specify will revert
* to their `Default` values as specified in the template.
*
* For of parameters you want to retain their existing values
* without specifying what those values are, set `UsePreviousValue: true`.
* Use of this feature is discouraged. CDK is for
* specifying desired-state infrastructure, and use of this feature makes the
* parameter values unmanaged.
*/
public static fromArtifactPath(artifactPath: codepipeline.ArtifactPath): StackSetParameters {
return new class extends StackSetParameters {
public _artifactsReferenced: codepipeline.Artifact[] = [artifactPath.artifact];
public _render(): string {
return artifactPath.location;
}
}();
}
/**
* Artifacts referenced by this parameter set
*
* @internal
*/
public abstract readonly _artifactsReferenced: codepipeline.Artifact[];
/**
* Converts Parameters to a string.
*
* @internal
*/
public abstract _render(): string;
}
/**
* Determines how IAM roles are created and managed.
*/
export abstract class StackSetDeploymentModel {
/**
* Deploy to AWS Organizations accounts.
*
* AWS CloudFormation StackSets automatically creates the IAM roles required
* to deploy to accounts managed by AWS Organizations. This requires an
* account to be a member of an Organization.
*
* Using this deployment model, you can specify either AWS Account Ids or
* Organization Unit Ids in the `stackInstances` parameter.
*/
public static organizations(props: OrganizationsDeploymentProps = {}): StackSetDeploymentModel {
return new class extends StackSetDeploymentModel {
_bind() {
return {
stackSetConfiguration: {
PermissionModel: 'SERVICE_MANAGED',
OrganizationsAutoDeployment: props.autoDeployment,
},
};
}
}();
}
/**
* Deploy to AWS Accounts not managed by AWS Organizations
*
* You are responsible for creating Execution Roles in every account you will
* be deploying to in advance to create the actual stack instances. Unless you
* specify overrides, StackSets expects the execution roles you create to have
* the default name `AWSCloudFormationStackSetExecutionRole`. See the [Grant
* self-managed
* permissions](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-prereqs-self-managed.html)
* section of the CloudFormation documentation.
*
* The CDK will automatically create the central Administration Role in the
* Pipeline account which will be used to assume the Execution Role in each of
* the target accounts.
*
* If you wish to use a pre-created Administration Role, use `Role.fromRoleName()`
* or `Role.fromRoleArn()` to import it, and pass it to this function:
*
* ```ts
* const existingAdminRole = iam.Role.fromRoleName(this, 'AdminRole', 'AWSCloudFormationStackSetAdministrationRole');
*
* const deploymentModel = codepipeline_actions.StackSetDeploymentModel.selfManaged({
* // Use an existing Role. Leave this out to create a new Role.
* administrationRole: existingAdminRole,
* });
* ```
*
* Using this deployment model, you can only specify AWS Account Ids in the
* `stackInstances` parameter.
*
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-prereqs-self-managed.html
*/
public static selfManaged(props: SelfManagedDeploymentProps = {}): StackSetDeploymentModel {
return new class extends StackSetDeploymentModel {
_bind(scope: Construct) {
let administrationRole = props.administrationRole;
if (!administrationRole) {
administrationRole = new iam.Role(scope, 'StackSetAdministrationRole', {
assumedBy: new iam.ServicePrincipal('cloudformation.amazonaws.com', {
conditions: {
// Confused deputy protection
StringLike: {
'aws:SourceArn': `arn:${cdk.Aws.PARTITION}:cloudformation:*:${cdk.Aws.ACCOUNT_ID}:stackset/*`,
},
},
}),
});
administrationRole.addToPrincipalPolicy(new iam.PolicyStatement({
actions: ['sts:AssumeRole'],
resources: [`arn:${cdk.Aws.PARTITION}:iam::*:role/${props.executionRoleName ?? 'AWSCloudFormationStackSetExecutionRole'}`],
}));
}
return {
stackSetConfiguration: {
PermissionModel: 'SELF_MANAGED',
AdministrationRoleArn: administrationRole.roleArn,
ExecutionRoleName: props.executionRoleName,
},
passedRoles: [administrationRole],
} as StackSetDeploymentModelBindResult;
}
}();
}
/**
* Bind to the Stack Set action and return the Action configuration
*
* Does not need to be called by app builders.
*
* @internal
*/
public abstract _bind(scope: Construct): StackSetDeploymentModelBindResult;
}
/**
* Returned by the StackSetDeploymentModel.bind() function
*
* Does not need to be used by app builders.
*
* @internal
*/
export interface StackSetDeploymentModelBindResult {
/**
* Properties to mix into the Action configuration
*/
readonly stackSetConfiguration: any;
/**
* Roles that need to be passed by the pipeline action
*
* @default - No roles
*/
readonly passedRoles?: iam.IRole[];
}
/**
* Properties for configuring service-managed (Organizations) permissions
*/
export interface OrganizationsDeploymentProps {
/**
* Automatically deploy to new accounts added to Organizational Units
*
* Whether AWS CloudFormation StackSets automatically deploys to AWS
* Organizations accounts that are added to a target organization or
* organizational unit (OU).
*
* @default Disabled
*/
readonly autoDeployment?: StackSetOrganizationsAutoDeployment;
}
/**
* Describes whether AWS CloudFormation StackSets automatically deploys to AWS Organizations accounts that are added to a target organization or
* organizational unit (OU).
*/
export enum StackSetOrganizationsAutoDeployment {
/**
* StackSets automatically deploys additional stack instances to AWS Organizations accounts that are added to a target organization or
* organizational unit (OU) in the specified Regions. If an account is removed from a target organization or OU, AWS CloudFormation StackSets
* deletes stack instances from the account in the specified Regions.
*/
ENABLED = 'Enabled',
/**
* StackSets does not automatically deploy additional stack instances to AWS Organizations accounts that are added to a target organization or
* organizational unit (OU) in the specified Regions.
*/
DISABLED = 'Disabled',
/**
* Stack resources are retained when an account is removed from a target organization or OU.
*/
ENABLED_WITH_STACK_RETENTION = 'EnabledWithStackRetention'
}
/**
* Properties for configuring self-managed permissions
*/
export interface SelfManagedDeploymentProps {
/**
* The IAM role in the administrator account used to assume execution roles in the target accounts
*
* You must create this role before using the StackSet action.
*
* The role needs to be assumable by CloudFormation, and it needs to be able
* to `sts:AssumeRole` each of the execution roles (whose names are specified
* in the `executionRoleName` parameter) in each of the target accounts.
*
* If you do not specify the role, we assume you have created a role named
* `AWSCloudFormationStackSetAdministrationRole`.
*
* @default - Assume an existing role named `AWSCloudFormationStackSetAdministrationRole` in the same account as the pipeline.
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-prereqs-self-managed.html
*/
readonly administrationRole?: iam.IRole;
/**
* The name of the IAM role in the target accounts used to perform stack set operations.
*
* You must create these roles in each of the target accounts before using the
* StackSet action.
*
* The roles need to be assumable by by the `administrationRole`, and need to
* have the permissions necessary to successfully create and modify the
* resources that the subsequent CloudFormation deployments need.
* Administrator permissions would be commonly granted to these, but if you can
* scope the permissions down frome there you would be safer.
*
* @default AWSCloudFormationStackSetExecutionRole
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-prereqs-self-managed.html
*/
readonly executionRoleName?: string;
} | the_stack |
import * as jsStyles from './AssignsStyles';
import * as React from 'react';
import InfiniteScroll from 'react-infinite-scroller';
import spservices from '../../../../services/spservices';
import styles from './Assigns.module.scss';
import * as strings from 'MyTasksWebPartStrings';
import {
Callout,
DefaultPalette,
FontWeights,
Icon,
IconButton,
IFacepilePersona,
IPersonaSharedProps,
Label,
MessageBar,
MessageBarType,
Persona,
PersonaBase,
PersonaSize,
Stack,
TextField,
Spinner,
SpinnerSize,
Dialog,
DialogType,
DialogFooter
} from 'office-ui-fabric-react';
import { FontSizes } from '@uifabric/fluent-theme/lib/fluent/FluentType';
import { IAssignments } from '../../../../services/IAssignments';
import { IAssignsProps } from './IAssignsProps';
import { IAssignsState } from './IAssignsState';
import { IGroupMember, IMember } from '../../../../services/IGroupMembers';
import { IAssign } from './IAssign';
import { AssignMode } from './../Assigns/EAssignMode';
import { stringIsNullOrEmpty } from '@pnp/pnpjs';
export class Assigns extends React.Component<IAssignsProps, IAssignsState> {
private _spservices: spservices = this.props.spservice;
private _membersSkipToken: string = undefined;
private _unAssignsMembers: IMember[] = [];
private _assigns: IMember[] = [];
private _nonMembers: IMember[] = [];
constructor(props: IAssignsProps) {
super(props);
this.state = {
unAssigns: [],
assigns: [],
nonMembers: [],
hasError: false,
hasMoreMembers: false,
messageError: '',
isloading: true,
searchValue: ''
};
}
/**
* Load more group members not assigned
*/
private _loadMoreMembers = async (ev: any) => {
try {
await this._getunAssigns(this.props.plannerPlan.owner);
const unAssigns = await this._renderUnAssigns(this._unAssignsMembers);
this.setState({
unAssigns: unAssigns,
hasError: false,
messageError: '',
hasMoreMembers: this._membersSkipToken ? true : false
});
} catch (error) {
this.setState({ hasError: true, messageError: error.message });
}
}
/**
* Determines whether click member on
*/
private _onClickAddUnAssignMember = async (ev: React.MouseEvent<HTMLDivElement, MouseEvent>) => {
ev.preventDefault();
let renderAssigns: JSX.Element[] = [];
let renderUnAssigns: JSX.Element[] = [];
const memberId = ev.currentTarget.getAttribute('data-memberId');
const user = this._unAssignsMembers.filter(unAssignMember => {
return unAssignMember.id === memberId;
});
const idx = this._unAssignsMembers.indexOf(user[0]);
const rtnMember = this._unAssignsMembers.splice(idx, 1);
this._assigns.push(rtnMember[0]);
renderAssigns = await this._renderAssigns(this._assigns);
renderUnAssigns = await this._renderUnAssigns(this._unAssignsMembers);
//alert(ev.currentTarget.getAttribute('data-memberId'));
this.setState({ assigns: renderAssigns, unAssigns: renderUnAssigns, searchValue: '' });
//alert(ev.currentTarget.getAttribute('data-memberId'));
}
/**
* Determines whether click assign non member on
*/
private _onClickAssignNonMember = async (ev: React.MouseEvent<HTMLDivElement, MouseEvent>) => {
ev.preventDefault();
let renderAssigns: JSX.Element[] = [];
let renderNonMembers: JSX.Element[] = [];
let renderUnAssigns: JSX.Element[] = [];
const memberId = ev.currentTarget.getAttribute('data-memberId');
const user = this._nonMembers.filter(nonMember => {
return nonMember.id === memberId;
});
const idx = this._nonMembers.indexOf(user[0]);
const rtnMember = this._nonMembers.splice(idx, 1);
this._assigns.push(rtnMember[0]);
renderAssigns = await this._renderAssigns(this._assigns) ;
renderUnAssigns = await this._renderUnAssigns(this._unAssignsMembers);
renderNonMembers = await this._renderNonMembers(this._nonMembers);
//alert(ev.currentTarget.getAttribute('data-memberId'));
this.setState({ assigns: renderAssigns, unAssigns: renderUnAssigns, nonMembers: renderNonMembers, searchValue: '' });
}
private _onClickRemoveAssign = async (ev: React.MouseEvent<HTMLDivElement, MouseEvent>) => {
ev.preventDefault();
let renderAssigns: JSX.Element[] = [];
let renderUnAssigns: JSX.Element[] = [];
const memberId = ev.currentTarget.getAttribute('data-memberId');
const user = this._assigns.filter(assignMember => {
return assignMember.id === memberId;
});
const idx = this._assigns.indexOf(user[0]);
const rtnMember = this._assigns.splice(idx, 1);
this._unAssignsMembers.push(rtnMember[0]);
renderAssigns = await this._renderAssigns(this._assigns);
renderUnAssigns = await this._renderUnAssigns(this._unAssignsMembers);
//alert(ev.currentTarget.getAttribute('data-memberId'));
this.setState({ assigns: renderAssigns, unAssigns: renderUnAssigns, searchValue: '' });
}
/**
* Gets assignments
* @param assignments
* @returns assignments
*/
private async _getAssignments(assignments: IAssignments): Promise<void> {
let assignmentsKeys: string[] = [];
assignmentsKeys = Object.keys(assignments);
for (const userId of assignmentsKeys) {
try {
const user = await this.props.spservice.getUser(userId);
this._assigns.push(user);
} catch (error) {
throw new Error(error);
}
}
}
/**
* Render non members of assigns
*/
private _renderNonMembers = async (nonMembers: IMember[]): Promise<JSX.Element[]> => {
let persona: IPersonaSharedProps = undefined;
let renderNonMembers: JSX.Element[] = [];
try {
for (const user of nonMembers) {
const userPhoto = await this.props.spservice.getUserPhoto(user.userPrincipalName);
persona = {
style: { paddingRight: 5, cursor: 'default' },
text: user.displayName,
imageUrl: userPhoto
};
renderNonMembers.push(
<div className={styles.renderMemberItem} key={user.id}>
<Persona
{...persona}
data-memberId={user.id}
size={PersonaSize.size32}
styles={jsStyles.memberPersonaStyle}
onClick={this._onClickAssignNonMember}
/>
</div>
);
}
return renderNonMembers;
} catch (error) {
throw new Error(error);
}
};
/**
* Render assigns of assigns
*/
private _renderAssigns = async (assigns: IMember[]): Promise<JSX.Element[]> => {
let persona: IPersonaSharedProps = undefined;
let renderAssigns: JSX.Element[] = [];
try {
for (const user of assigns) {
const userPhoto = await this.props.spservice.getUserPhoto(user.userPrincipalName);
persona = {
style: { paddingRight: 5, cursor: 'default' },
text: user.displayName,
imageUrl: userPhoto
};
renderAssigns.push(
<div className={styles.renderMemberItem} key={user.id}>
<Persona
{...persona}
data-memberId={user.id}
size={PersonaSize.size32}
styles={jsStyles.memberPersonaStyle}
// onClick={this._onClickMember}
/>
<IconButton
iconProps={{ iconName: 'ChromeClose', styles: { ...jsStyles.chromeCloseButtomStyle } }}
styles={{ root: { paddingRight: 10 } }}
data-memberId={user.id}
onClick={this._onClickRemoveAssign}
/>
</div>
);
}
return renderAssigns;
} catch (error) {
throw new Error(error);
}
}
/**
* Render unassigns
*/
private _renderUnAssigns = async (unAssignsMembers: IMember[]): Promise<JSX.Element[]> => {
let persona: IPersonaSharedProps = undefined;
let renderMembers: JSX.Element[] = [];
try {
// read group member and add to render array
for (const member of unAssignsMembers) {
// If in Edit Task Mode check Assigns
if (this.props.AssignMode === AssignMode.Edit) {
// if (await this._checkIfUserAssigned(member.id)) continue;
} // don't show members that are already assigned
const userPhoto = await this.props.spservice.getUserPhoto(member.userPrincipalName);
persona = {
style: { paddingRight: 5 },
text: member.displayName,
imageUrl: userPhoto
};
renderMembers.push(
<div className={styles.renderMemberItem} key={member.id}>
<Persona
{...persona}
data-memberId={member.id}
size={PersonaSize.size32}
styles={jsStyles.memberPersonaStyle}
onClick={this._onClickAddUnAssignMember}
/>
</div>
);
}
// <Icon iconName={'ChromeClose'} styles={{ root: { paddingRight: 10, fontSize: FontSizes.size12 } }} />
return renderMembers;
} catch (error) {
throw new Error(error);
}
};
/**
* Gets group members
* @param groupId
* @returns group members
*/
private async _getunAssigns(groupId: string): Promise<void> {
try {
const groupMembers: IGroupMember = await this._spservices.getGroupMembers(groupId, this._membersSkipToken);
if (groupMembers && groupMembers['@odata.nextLink']) {
const URLQueryString = new URLSearchParams(groupMembers['@odata.nextLink']);
this._membersSkipToken = URLQueryString.get('$skiptoken');
} else {
this._membersSkipToken = undefined;
}
// skip users already assigned
if (groupMembers && groupMembers.value ){
for ( const groupMember of groupMembers.value){
const isAssigned = await this._checkIfUserAssigned(groupMember.id);
if (isAssigned){
continue;
}else{
this._unAssignsMembers.push(groupMember);
}
}
}
// const unAssignsMembers = unAssigns && unAssigns.value ? unAssigns.value : [];
// this._unAssignsMembers = this._unAssignsMembers.concat(unAssignsMembers);
// Sort Members
this._unAssignsMembers = this._unAssignsMembers.sort((a, b) => {
if (a.displayName.toLocaleUpperCase() < b.displayName.toLocaleUpperCase()) return -1;
if (a.displayName.toLocaleLowerCase() > b.displayName.toLocaleLowerCase()) return 1;
return 0;
});
} catch (error) {
throw new Error(error);
}
}
/**
* Determines whether callout dismiss on
*/
/**
*
* Determines whether callout dismiss on
*/
private _onCalloutDismiss = (ev: any) => {
this.props.onDismiss(this._assigns);
}
/**
* Components did mount
* @returns did mount
*/
public async componentDidMount(): Promise<void> {
let unAssigns: JSX.Element[] = [];
let assigns: JSX.Element[] = [];
try {
/*
if (this.props.AssignMode === AssignMode.Edit) {
await this._getAssignments(this.props.task.assignments);
}else{
this._assigns = this.props.assigns;
}*/
this._assigns = this.props.assigns;
assigns = await this._renderAssigns(this._assigns);
await this._getunAssigns(this.props.plannerPlan.owner);
unAssigns = await this._renderUnAssigns(this._unAssignsMembers);
this.setState({
assigns: assigns,
unAssigns: unAssigns,
hasError: false,
messageError: '',
isloading: false,
hasMoreMembers: this._membersSkipToken ? true : false
});
} catch (error) {
this.setState({ unAssigns: unAssigns, assigns: assigns, hasError: true, messageError: error.message });
}
}
/**
* Check if user is member of unAssignsMembers
*/
private _checkIfUserIsMember = async (userId: string): Promise<boolean> => {
const foundUser = this._unAssignsMembers.filter(user => {
return user.id === userId;
});
return foundUser.length > 0 ? true : false;
}
/**
* Check if user assigned of assigns
*/
private _checkIfUserAssigned = async (userId: string): Promise<boolean> => {
const user = this._assigns.filter(assignMember => {
return assignMember.id === userId;
});
/*
if (user.length > 0) {
const idx = this._unAssignsMembers.indexOf(user[0]);
this._unAssignsMembers.splice(idx, 1);
}*/
return user.length > 0 ? true : false;
}
/**
* Determines whether search user on
*/
private _onSearchUser = async (event: React.FormEvent<HTMLInputElement | HTMLTextAreaElement>, newValue: string) => {
event.preventDefault();
event.stopPropagation();
this.setState({searchValue: newValue,});
try {
let renderAssigns: JSX.Element[] = [];
let renderUnAssigns: JSX.Element[] = [];
let renderNonMembers: JSX.Element[] = [];
if (newValue.length > 2) {
const users: IMember[] = await this._spservices.searchUsers(newValue);
const unAssignMembers: IMember[] = [];
const assigns: IMember[] = [];
this._nonMembers = [];
for (const user of users) {
// const userAssigned = this.props.AssignMode === AssignMode.Edit ? await this._checkIfUserAssigned(user.id) : false;
const userAssigned = await this._checkIfUserAssigned(user.id) ;
if (userAssigned) {
assigns.push(user);
} else {
const found = await this._checkIfUserIsMember(user.id);
if (found) {
unAssignMembers.push(user);
} else {
this._nonMembers.push(user);
}
}
}
// renderAssigns = this.props.AssignMode === AssignMode.Edit ? await this._renderAssigns(assigns) : [];
renderAssigns = await this._renderAssigns(assigns);
renderUnAssigns = await this._renderUnAssigns(unAssignMembers);
renderNonMembers = await this._renderNonMembers(this._nonMembers);
this.setState({
assigns: renderAssigns,
unAssigns: renderUnAssigns,
searchValue: newValue,
nonMembers: renderNonMembers,
hasError: false,
messageError: ''
});
}
if (newValue.length <=2) {
renderAssigns = await this._renderAssigns(this._assigns);
renderUnAssigns = await this._renderUnAssigns(this._unAssignsMembers);
this.setState({
assigns: renderAssigns,
unAssigns: renderUnAssigns,
searchValue: newValue,
nonMembers: renderNonMembers,
hasError: false,
messageError: ''
});
}
} catch (error) {
this.setState({ hasError: true, messageError: error.message });
}
}
/**
* Components did update
* @param prevProps
* @param prevState
*/
public componentDidUpdate(prevProps: IAssignsProps, prevState: IAssignsState): void {}
public render(): React.ReactElement<IAssignsProps> {
return (
<div>
<Dialog
hidden={false}
onDismiss={this._onCalloutDismiss}
minWidth={350}
title={strings.AssignsLabel}
dialogContentProps={{
type: DialogType.normal
}}
modalProps={{
isBlocking: false,
styles: { main: { maxWidth: 350 } }
}}>
<div className={styles.calloutHeader}>
<TextField
value={this.state.searchValue}
placeholder={strings.TypeUserOrEmailLabel}
borderless={true}
styles={jsStyles.textFieldSearchStyles}
onChange={this._onSearchUser}></TextField>
</div>
<div className={styles.calloutContent}>
{this.state.isloading ? (
<Spinner size={SpinnerSize.small} label={strings.LoadingAssignLabel} />
) : (
<Stack styles={{ root: { height: '100%' } }}>
{this.state.assigns.length > 0 && (
<>
<h4 style={{ margin: 10 }}>{strings.AssignedLabel}</h4>
{this.state.assigns}
</>
)}
{(this.state.unAssigns.length > 0 || this.state.hasMoreMembers) && ( // Has Member or has more pages to load, show unassigns
<>
<h4 style={{ margin: 10 }}>{strings.UnassignedLabel}</h4>
{this.state.hasError ? (
<MessageBar messageBarType={MessageBarType.error}>{this.state.messageError}</MessageBar>
) : (
<InfiniteScroll
pageStart={0}
loadMore={this._loadMoreMembers}
hasMore={this.state.hasMoreMembers}
threshold={15}
useWindow={false}>
{this.state.unAssigns}
</InfiniteScroll>
)}
</>
)}
{this.state.nonMembers.length > 0 && (
<>
<h4 style={{ margin: 10 }}>{strings.NonMembersLabel}</h4>
{this.state.nonMembers}
</>
)}
</Stack>
)}
</div>
</Dialog>
</div>
);
}
} | the_stack |
import { RuleTester, Failure, Position, dedent } from './ruleTester';
const ruleTester = new RuleTester('brace-style');
const fail = {
open: 'Opening curly brace does not appear on the same line as controlling statement.',
openAllman: 'Opening curly brace appears on the same line as controlling statement.',
body: 'Statement inside of curly braces should be on next line.',
close: 'Closing curly brace does not appear on the same line as the subsequent block.',
closeSingle: 'Closing curly brace should be on the same line as opening curly brace or on the line after the previous block.',
closeStroustrupAllman: 'Closing curly brace appears on the same line as the subsequent block.'
};
// Only checking for messages for now. We should revisit these tests specifying the line numbers
// and make sure that we are getting the correct errors.
// Note: so far there are no test that check for `fail.close` and `fail.closeStroustrupAllman`.
function expecting(errors: string[]): Failure[] {
return errors.map((err) => {
return {
failure: err,
startPosition: new Position(),
endPosition: new Position()
};
});
}
ruleTester.addTestGroupWithConfig('onetbs-valid', 'should pass when "1tbs"', ['1tbs'], [
dedent`
function foo() {
return true;
}`,
dedent`
if (foo) {
bar();
}`,
dedent`
if (foo) {
bar();
} else {
baz();
}`,
dedent`
try {
somethingRisky();
} catch(e) {
handleError();
}`,
dedent`
try {
somethingRisky();
} catch(e) {
handleError();
} finally() {
doSomething();
}`,
dedent`
try {
somethingRisky();
} finally() {
doSomething();
} catch(e) {
handleError();
}`,
dedent`
try {
somethingRisky();
} finally() {
doSomething();
}`,
// when there are no braces, there are no problems
dedent`
if (foo) bar();
else if (baz) boom();`
]);
ruleTester.addTestGroupWithConfig('onetbs-invalid', 'should fail when "1tbs"', ['1tbs'], [
{
code: dedent`
function foo()
{
return true;
}`,
errors: expecting([fail.open])
},
{
code: dedent`
if (foo)
{
bar();
}`,
errors: expecting([fail.open])
},
{
code: dedent`
try
{
somethingRisky();
} catch(e)
{
handleError();
}`,
errors: expecting([fail.open])
},
{
code: dedent`
try
{
somethingRisky();
} catch(e)
{
handleError();
}
} finally(e)
{
doSomething();
}`,
errors: expecting([fail.open])
},
{
code: dedent`
try
{
somethingRisky();
} finally(e)
{
doSomething();
}`,
errors: expecting([fail.open])
},
{
code: dedent`
if (foo) {
bar();
}
else {
baz();
}`,
errors: expecting([fail.open])
},
{
code: dedent`
if (foo) {
bar();
} else { baz(); }`,
errors: expecting([fail.body, fail.closeSingle])
}
]);
ruleTester.addTestGroupWithConfig('stroustrup-valid', 'should pass when "stroustrup"', ['stroustrup'], [
dedent`
function foo() {
return true;
}`,
dedent`
if (foo) {
bar();
}`,
dedent`
if (foo) {
bar();
}
else {
baz();
}`,
dedent`
try {
somethingRisky();
}
catch(e) {
handleError();
}`,
dedent`
try {
somethingRisky();
}
catch(e) {
handleError();
}
finally() {
doSomething();
}
`,
dedent`
try {
somethingRisky();
}
finally {
doSomething();
}`,
// when there are no braces, there are no problems
dedent`
if (foo) bar();
else if (baz) boom();`
]);
ruleTester.addTestGroupWithConfig('stroustrup-invalid', 'should fail when "stroustrup"', ['stroustrup'], [
{
code: dedent`
function foo()
{
return true;
}`,
errors: expecting([fail.open])
},
{
code: dedent`
if (foo)
{
bar();
}`,
errors: expecting([fail.open])
},
{
code: dedent`
try
{
somethingRisky();
} catch(e)
{
handleError();
}`,
errors: expecting([fail.open, fail.openAllman])
},
{
code: dedent`
try
{
somethingRisky();
} catch(e)
{
handleError();
} finally()
{
doSomething();
}`,
errors: expecting([fail.open, fail.openAllman])
},
{
code: dedent`
try
{
somethingRisky();
} finally()
{
doSomething();
}`,
errors: expecting([fail.open, fail.openAllman])
},
{
code: dedent`
if (foo) {
bar();
} else {
baz();
}`,
errors: expecting([fail.openAllman])
}
]);
ruleTester.addTestGroupWithConfig('allman-valid', 'should pass when "allman"', ['allman'], [
dedent`
function foo()
{
return true;
}`,
dedent`
if (foo)
{
bar();
}`,
dedent`
if (foo)
{
bar();
}
else
{
baz();
}`,
dedent`
try
{
somethingRisky();
}
catch(e)
{
handleError();
}`,
dedent`
try
{
somethingRisky();
}
catch(e)
{
handleError();
}
finally()
{
doSomething();
}`,
dedent`
try
{
somethingRisky();
}
finally()
{
doSomething();
}`,
// when there are no braces, there are no problems
dedent`
if (foo) bar();
else if (baz) boom();`
]);
ruleTester.addTestGroupWithConfig('allman-invalid', 'should fail when "allman"', ['allman'], [
{
code: dedent`
function foo() {
return true;
}`,
errors: expecting([fail.openAllman])
},
{
code: dedent`
if (foo)
{
bar(); }`,
errors: expecting([fail.closeSingle])
},
{
code: dedent`
try
{
somethingRisky();
} catch(e)
{
handleError();
}`,
errors: expecting([fail.openAllman])
},
{
code: dedent`
try {
somethingRisky();
} catch(e)
{
handleError();
} finally()
{
doSomething();
}`,
errors: expecting([fail.openAllman])
},
{
code: dedent`
try {
somethingRisky();
} finally()
{
doSomething();
}`,
errors: expecting([fail.openAllman])
},
{
code: dedent`
if (foo) {
bar();
} else {
baz();
}`,
errors: expecting([fail.openAllman])
}
]);
ruleTester.addTestGroupWithConfig(
'allowSingleLine-onetbs',
'should pass when "1tbs" and "allowSingleLine" is true',
['1tbs', { allowSingleLine: true }],
[
`function nop() { return; }`,
`if (foo) { bar(); }`,
`if (foo) { bar(); } else { baz(); }`,
`try { somethingRisky(); } catch(e) { handleError(); }`,
`try { somethingRisky(); } catch(e) { handleError(); } finally() { doSomething(); }`,
`try { somethingRisky(); } finally(e) { doSomething(); }`,
dedent`
if (foo) {
bar();
} else { baz(); }`,
dedent`
try {
foo();
} catch(e) { bar(); }`,
dedent`
try {
foo();
} catch(e) { bar(); }
} finally() { doSomething(); }`,
dedent`
try {
foo();
} finally() { doSomething(); }`
]
);
ruleTester.addTestGroupWithConfig(
'allowSingleLine-onetbs-invalid',
'should fail when "1tbs" and "allowSingleLine" is false',
['1tbs', { allowSingleLine: false }],
[
{
code: `function nop() { return; }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: `if (foo) { bar(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: `if (foo) { bar(); } else { baz(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: `try { somethingRisky(); } catch(e) { handleError(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: `try { somethingRisky(); } catch(e) { handleError(); } finally() { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: `try { somethingRisky(); } finally(e) { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
if (foo) {
bar();
} else { baz(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
try {
foo();
} catch(e) { bar(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
try {
foo();
} catch(e) { bar(); }
} finally() { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
try {
foo();
} finally() { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle])
}
]
);
ruleTester.addTestGroupWithConfig(
'allowSingleLine-stroustrup',
'should pass when "stroustrup" and "allowSingleLine" is true',
['stroustrup', { allowSingleLine: true }],
[
`function nop() { return; }`,
`if (foo) { bar(); }`,
dedent`
if (foo) { bar(); }
else { baz(); }`,
dedent`
try { somethingRisky(); }
catch(e) { handleError(); }`,
dedent`
try { somethingRisky(); }
catch(e) { handleError(); }
finally() { doSomething(); }`,
dedent`
try { somethingRisky(); }
finally() { doSomething(); }`,
dedent`
if (foo) {
bar();
}
else { baz(); }`,
dedent`
try {
foo();
}
catch(e) { bar(); }`,
dedent`
try {
foo();
}
catch(e) { bar(); }
finally() { doSomething(); }`,
dedent`
try {
foo();
}
finally() { doSomething(); }`
]
);
ruleTester.addTestGroupWithConfig(
'allowSingleLine-stroustrup-invalid',
'should fail when "stroustrup" and "allowSingleLine" is false',
['stroustrup', { allowSingleLine: false }],
[
{
code: `function nop() { return; }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: `if (foo) { bar(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
if (foo) { bar(); }
else { baz(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
try { somethingRisky(); }
catch(e) { handleError(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
try { somethingRisky(); }
catch(e) { handleError(); }
finally() { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
try { somethingRisky(); }
finally() { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
if (foo) {
bar();
}
else { baz(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
try {
foo();
}
catch(e) { bar(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
try {
foo();
}
catch(e) { bar(); }
finally() { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle])
},
{
code: dedent`
try {
foo();
}
finally() { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle])
}
]
);
ruleTester.addTestGroupWithConfig(
'allowSingleLine-allman',
'should pass when "allman" and "allowSingleLine" is true',
['allman', { allowSingleLine: true }],
[
`function nop() { return; }`,
`if (foo) { bar(); }`,
dedent`
if (foo) { bar(); }
else { baz(); }`,
dedent`
try { somethingRisky(); }
catch(e) { handleError(); }`,
dedent`
try { somethingRisky(); }
catch(e) { handleError(); },
finally() { doSomething(); }`,
dedent`
try { somethingRisky(); }
finally(e) { doSomething(); }`,
dedent`
if (foo)
{
bar();
} else { baz(); }`,
dedent`
try
{
foo();
}
catch(e) { bar(); }`,
dedent`
try
{
foo();
}
catch(e) { bar(); }
finally() { doSomething(); }`,
dedent`
try
{
foo();
}
finally() { doSomething(); }`
]
);
ruleTester.addTestGroupWithConfig(
'allowSingleLine-allman-invalid',
'should fail when "allman" and "allowSingleLine" is false',
['allman', { allowSingleLine: false }],
[
{
code: `function nop() { return; }`,
errors: expecting([fail.body, fail.closeSingle, fail.openAllman])
},
{
code: `if (foo) { bar(); }`,
errors: expecting([fail.body, fail.closeSingle, fail.openAllman])
},
{
code: dedent`
if (foo) { bar(); }
else { baz(); }`,
errors: expecting([fail.body, fail.closeSingle, fail.openAllman])
},
{
code: dedent`
try { somethingRisky(); }
catch(e) { handleError(); }`,
errors: expecting([fail.body, fail.closeSingle, fail.openAllman])
},
{
code: dedent`
try { somethingRisky(); }
catch(e) { handleError(); },
finally() { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle, fail.openAllman])
},
{
code: dedent`
try { somethingRisky(); }
finally(e) { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle, fail.openAllman])
},
{
code: dedent`
if (foo)
{
bar();
} else { baz(); }`,
errors: expecting([fail.body, fail.closeSingle, fail.openAllman])
},
{
code: dedent`
try
{
foo();
}
catch(e) { bar(); }`,
errors: expecting([fail.body, fail.closeSingle, fail.openAllman])
},
{
code: dedent`
try
{
foo();
}
catch(e) { bar(); }
finally() { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle, fail.openAllman])
},
{
code: dedent`
try
{
foo();
}
finally() { doSomething(); }`,
errors: expecting([fail.body, fail.closeSingle, fail.openAllman])
}
]
);
ruleTester.runTests(); | the_stack |
module StockChart {
interface IKLineOptions extends IChartOptions {
ohlcPrices: CandleBar[]
volumes: number[]
dates: string[]
dataCount: number
riseColor: string
fallColor: string
period: Period,
maLists: MAList[]
}
type CandleBar = {
o: number
h: number
l: number
c: number
}
type AxisDate = {
text: string
index: number
}
type MAList = {
title: string,
color?: string | number,
prices: number[]
}
enum Period { Day, Week, Month }
enum MAColors {
Yellow,
Blue,
Pink,
Green
}
const COLOR = {
YELLOW: 'rgba(219,169,83,.5)',
BLUE: 'rgba(99,179,243,.5)',
PINK: 'rgba(223,140,201,.5)',
GREEN: 'rgba(151,192,57,.5)'
}
class KLine extends Chart {
ohlcPrices: CandleBar[]
volumes: number[]
dates: string[]
volumeTopHeight: number
fillColor: string
volumeColor: string
unitX: number
unitY: number
roofPrice: number
floorPrice: number
dataCount: number
riseColor: string
fallColor: string
period: Period
maLists: MAList[]
calcY: (price: number) => number
constructor(options?: IKLineOptions) {
super(options)
this.ohlcPrices = options.ohlcPrices
this.volumes = options.volumes
this.dates = options.dates
this.dataCount = options.dataCount
this.volumeTopHeight = this.height - this.volumeHeight + this.textOffsetY
this.unitX = this.figureWidth / this.dataCount
this.riseColor = options.riseColor
this.fallColor = options.fallColor
this.period = options.period
this.maLists = options.maLists
}
initialize() {
super.initialize()
this.drawGrid()
this.drawPriceBar()
this.drawMaLines()
this.drawVolumeBar()
this.drawAxisYText()
}
private drawGrid() {
const {ctx, grid, height, figureWidth, figureHeight, figureOffsetHeight, figureOffsetY, volumeTopHeight} = this
ctx.beginPath()
// 横轴基线
this.drawLine({
color: grid.color,
startPoint: [0, this.figureHeight],
points: [[figureWidth, figureHeight]]
}, false)
// 量图顶部线
this.drawLine({
color: grid.color,
startPoint: [0, volumeTopHeight],
points: [[figureWidth, volumeTopHeight]]
}, false)
// 图表顶部线
this.drawLine({
color: grid.color,
startPoint: [0, 0],
points: [[figureWidth, 0]]
}, false)
// 图表底部线
this.drawLine({
color: grid.color,
startPoint: [0, this.height],
points: [[figureWidth, height]]
})
// 横向网格
const gridY = figureOffsetHeight / grid.y
for (let i = 0; i < grid.y; i++) {
ctx.beginPath()
this.drawLine({
color: grid.color,
startPoint: [0, gridY * i + figureOffsetY],
points: [[figureWidth, gridY * i + figureOffsetY]]
})
}
this.drawGridX()
}
private drawPriceBar() {
const {ctx, ohlcPrices, dataCount, figureHeight, figureOffsetHeight} = this
const len = ohlcPrices.length
const count = Math.min(len, dataCount)
const highPrices = ohlcPrices.map((price: CandleBar) => {
return price.h
})
const lowPrices = ohlcPrices.map((price: CandleBar) => {
return price.l
})
let maxPrice = Math.max.apply(null, highPrices)
let minPrice = Math.min.apply(null, lowPrices)
// 横座标和纵座标的单位尺寸
const unitX = this.unitX
const unitY = this.unitY = (figureOffsetHeight - 10) / (maxPrice - minPrice)
// 纵座标顶部和底部的价格,这里一般是之前计算出的 maxPrice 和 minPrice
this.roofPrice = maxPrice
// this.floorPrice = (minPrice * unitY - 10) / unitY
this.floorPrice = minPrice
// 计算当前价所在纵轴坐标位置
const calcY = this.calcY = (price: number): number => {
return Math.round(figureHeight - Math.abs(price - minPrice) * unitY - 10)
}
// 绘制 K 线
for (let i = 0; i < count; i++) {
const openPrice = ohlcPrices[i].o
const highPrice = ohlcPrices[i].h
const lowPrice = ohlcPrices[i].l
const closePrice = ohlcPrices[i].c
const barX = unitX * i + unitX / 2
const barColor = this.getBarColor(ohlcPrices, i)
ctx.beginPath()
this.drawLine({
color: barColor,
size: 1,
startPoint: [barX, calcY(highPrice)],
points: [[barX, calcY(lowPrice)]]
})
ctx.beginPath()
if (calcY(openPrice) === calcY(closePrice)) {
this.drawLine({
color: barColor,
size: 1,
startPoint: [barX - 2, calcY(openPrice)],
points: [[barX + 2, calcY(closePrice)]]
})
} else {
this.drawLine({
color: barColor,
size: 4,
startPoint: [barX, calcY(openPrice)],
points: [[barX, calcY(closePrice)]]
})
}
}
}
private drawVolumeBar() {
const {ctx, volumes, ohlcPrices, dataCount, height, volumeHeight, textOffsetY, unitX} = this
const len = volumes.length
const count = Math.min(len, dataCount)
let maxVolume = Math.max.apply(null, volumes)
let minVolume = Math.min.apply(null, volumes)
// 如果最大交易量等于最小交易量,计算单位高度保证分母为 1
if (maxVolume === minVolume) {
minVolume = maxVolume - 1
}
// 单位高度
const volumeUnitY = (volumeHeight - textOffsetY) / (maxVolume - minVolume)
// 绘制量线
let currentVolumeHeight
for (let i = 0; i < count; i++) {
const barX = unitX * i + unitX / 2
currentVolumeHeight = Math.round(height - (volumes[i] - minVolume) * volumeUnitY)
if (currentVolumeHeight === height) {
currentVolumeHeight = height - 1
}
ctx.beginPath()
this.drawLine({
color: this.getBarColor(ohlcPrices, i),
size: 4,
startPoint: [barX, height],
points: [[barX, currentVolumeHeight]]
})
}
}
private drawMaLines() {
const {maLists} = this
if (!maLists.length) {
return
}
maLists.forEach((maList: MAList) => {
this.drawMaLine(maList)
})
// 绘制均线指示图
this.drawMaLegend()
}
private drawMaLine(maList: MAList) {
const {ctx, unitX, calcY, roofPrice} = this
const len = maList.prices.length
const count = Math.min(len, this.dataCount)
let points: Point[]
let maColor: string
if (count === 0) {
return
}
maColor = this.getMaColor(maList)
// 当当前价大于坐标顶部的最大价格,均线不显示,所以这里采用 NaN
points = maList.prices.map((price: number, index: number): Point => {
return [unitX * index, price > roofPrice ? NaN : calcY(price)]
})
ctx.beginPath()
this.drawLine({
color: maColor,
size: 1,
startPoint: points.splice(0, 1)[0],
points: points
})
}
private getMaColor(maList) {
let maColor
if (typeof maList.color === 'undefined') {
const index = arrayObjectIndexOf(this.maLists, 'title', maList.title)
maColor = COLOR[MAColors[index].toUpperCase()]
} else if (typeof maList.color === 'number') {
maColor = COLOR[MAColors[maList.color].toUpperCase()]
} else {
maColor = maList.color.toString()
}
return maColor
}
private drawMaLegend() {
const {ctx, dpr, maLists, figureWidth, figureOffsetY} = this
const len = maLists.length
const dot = {
radius: 3,
paddingLeft: 10,
paddingRight: 3
}
let text: string
let textWidth: number
let prices: number[]
let lastPrice: number
for (let i = len - 1, textWidth = 0; i >= 0; i--) {
const title = maLists[i].title
// 这里要重新初始化 canvas 字体,否则 drawText 中对字体的处理会对 measureText 造成影响
ctx.font = this.font
// 去除空数据,这里的空数据指的是 NaN
prices = maLists[i].prices.filter((price) => {
return Boolean(price)
})
if (!prices.length) {
continue
}
lastPrice = prices[prices.length - 1]
text = `${title} ${lastPrice}`
textWidth += ctx.measureText(text).width
this.drawText(text, [figureWidth - textWidth, figureOffsetY - 5])
this.drawRound({
point: [figureWidth - textWidth - dot.radius - dot.paddingRight, figureOffsetY - 5 - dot.radius],
radius: dot.radius,
isFill: true,
fillColor: this.getMaColor(maLists[i])
})
textWidth += dot.radius * 2 + dot.paddingLeft + dot.paddingRight
}
}
private drawAxisYText() {
this.drawText(this.roofPrice.toFixed(2), [0, this.figureOffsetY - this.textOffsetY])
this.drawText(this.floorPrice.toFixed(2), [0, this.figureHeight - this.textOffsetY])
}
private drawGridX() {
const {ctx, dataCount, period} = this
let {dates} = this
const len = dates.length
const count = Math.min(len, dataCount)
// 最前两点不显示横座标,因为显示不下
if (count <= 2) {
return
}
// 重新开辟绘画路径,避免横向网格线变粗
ctx.beginPath()
switch (Period[period]) {
// 周 K
case 'Week':
let uniqDates: AxisDate[] = []
let weekDates: AxisDate[] = []
for (let i = 2; i < count - 2; i++) {
if (dates[i] !== dates[i - 1]) {
uniqDates.push({
text: dates[i],
index: i
})
}
}
weekDates = uniqDates.filter((date, index) => {
return !(index % 3)
})
if (!weekDates.length) {
return
}
weekDates.forEach((date) => {
this.drawGridTextX(date.text, date.index)
})
break
// 月 K
case 'Month':
let monthDates: AxisDate[] = []
dates = dates.map((date) => {
return date.split('-')[0]
})
for (let i = 2; i < count - 2; i++) {
if (dates[i] !== dates[i - 1]) {
monthDates.push({
text: dates[i],
index: i
})
}
}
monthDates.forEach((date) => {
this.drawGridTextX(date.text, date.index)
})
break
// 默认 Period 为 'Day' 为日 K
default:
let dayDates: AxisDate[] = []
for (let i = 2; i < count - 2; i++) {
if (dates[i] !== dates[i - 1]) {
dayDates.push({
text: dates[i],
index: i
})
}
}
for (let j = 1; j < dayDates.length; j++) {
// 如果相隔太近(7个点内),也不显示横座标值,否则会产生文字重叠现象
if (dayDates[j].index - dayDates[j - 1].index < 7) {
dayDates.splice(j, 1)
j--;
}
}
dayDates.forEach((date) => {
this.drawGridTextX(date.text, date.index)
})
break
}
}
private drawGridTextX(date: string, index: number) {
const {grid, dataCount, height, volumeHeight, figureWidth, figureHeight, figureOffsetY} = this
const unitX = figureWidth / dataCount
const axisY = height - volumeHeight
const barX = unitX * index + unitX / 2
this.drawText(date, [unitX * index - 15, axisY])
this.drawLine({
color: grid.color,
startPoint: [barX, figureOffsetY],
points: [[barX, figureHeight]]
})
}
private getBarColor(bars: CandleBar[], index: number) {
const {riseColor, fallColor} = this
const openPrice = bars[index].o
const closePrice = bars[index].c
// 涨
if (closePrice > openPrice) {
return riseColor
}
// 跌
else if (closePrice < openPrice) {
return fallColor
}
// 不涨不跌
else {
if (index === 0) {
return 'black'
} else {
const preClosePrice = bars[index - 1].c
return closePrice > preClosePrice ? riseColor : fallColor
}
}
}
}
export function drawKLine(options) {
const defaultOptions = {
dataCount: 50,
grid: {
y: 4,
color: 'rgba(221,221,221,1)'
},
lineColor: 'rgba(94,168,199,1)',
volumeColor: 'rgba(130,152,200,1)',
riseColor: 'rgba(252,63,29,1)',
fallColor: 'rgba(85,170,48,1)',
period: 0
}
options = mixins({}, defaultOptions, options)
const kLine = new KLine(options)
kLine.initialize()
}
} | the_stack |
import { App, Stack } from '@aws-cdk/core';
import * as iam from '../lib';
import { PolicyStatement } from '../lib';
const PRINCIPAL_ARN1 = 'arn:aws:iam::111111111:user/user-name';
const principal1 = new iam.ArnPrincipal(PRINCIPAL_ARN1);
const PRINCIPAL_ARN2 = 'arn:aws:iam::111111111:role/role-name';
const principal2 = new iam.ArnPrincipal(PRINCIPAL_ARN2);
// Check that 'resource' statements are merged, and that 'notResource' statements are not,
// if the statements are otherwise the same.
test.each([
['resources', true],
['notResources', false],
] as Array<['resources' | 'notResources', boolean]>)
('merge %p statements: %p', (key, doMerge) => {
assertMergedC(doMerge, [
new iam.PolicyStatement({
[key]: ['a'],
actions: ['service:Action'],
principals: [principal1],
}),
new iam.PolicyStatement({
[key]: ['b'],
actions: ['service:Action'],
principals: [principal1],
}),
], [
{
Effect: 'Allow',
Resource: ['a', 'b'],
Action: 'service:Action',
Principal: { AWS: PRINCIPAL_ARN1 },
},
]);
});
// Check that 'action' statements are merged, and that 'notAction' statements are not,
// if the statements are otherwise the same.
test.each([
['actions', true],
['notActions', false],
] as Array<['actions' | 'notActions', boolean]>)
('merge %p statements: %p', (key, doMerge) => {
assertMergedC(doMerge, [
new iam.PolicyStatement({
resources: ['a'],
[key]: ['service:Action1'],
principals: [principal1],
}),
new iam.PolicyStatement({
resources: ['a'],
[key]: ['service:Action2'],
principals: [principal1],
}),
], [
{
Effect: 'Allow',
Resource: 'a',
Action: ['service:Action1', 'service:Action2'],
Principal: { AWS: PRINCIPAL_ARN1 },
},
]);
});
// Check that 'principal' statements are merged, and that 'notPrincipal' statements are not,
// if the statements are otherwise the same.
test.each([
['principals', true],
['notPrincipals', false],
] as Array<['principals' | 'notPrincipals', boolean]>)
('merge %p statements: %p', (key, doMerge) => {
assertMergedC(doMerge, [
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
[key]: [principal1],
}),
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
[key]: [principal2],
}),
], [
{
Effect: 'Allow',
Resource: 'a',
Action: 'service:Action',
Principal: { AWS: [PRINCIPAL_ARN1, PRINCIPAL_ARN2].sort() },
},
]);
});
test('merge multiple types of principals', () => {
assertMerged([
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [principal1],
}),
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [new iam.ServicePrincipal('service.amazonaws.com')],
}),
], [
{
Effect: 'Allow',
Resource: 'a',
Action: 'service:Action',
Principal: {
AWS: PRINCIPAL_ARN1,
Service: 'service.amazonaws.com',
},
},
]);
});
test('multiple mergeable keys are not merged', () => {
assertNoMerge([
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action1'],
principals: [principal1],
}),
new iam.PolicyStatement({
resources: ['b'],
actions: ['service:Action2'],
principals: [principal1],
}),
]);
});
test('can merge statements without principals', () => {
assertMerged([
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
}),
new iam.PolicyStatement({
resources: ['b'],
actions: ['service:Action'],
}),
], [
{
Effect: 'Allow',
Resource: ['a', 'b'],
Action: 'service:Action',
},
]);
});
test('if conditions are different, statements are not merged', () => {
assertNoMerge([
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [principal1],
conditions: {
StringLike: {
something: 'value',
},
},
}),
new iam.PolicyStatement({
resources: ['b'],
actions: ['service:Action'],
principals: [principal1],
}),
]);
});
test('if conditions are the same, statements are merged', () => {
assertMerged([
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [principal1],
conditions: {
StringLike: {
something: 'value',
},
},
}),
new iam.PolicyStatement({
resources: ['b'],
actions: ['service:Action'],
principals: [principal1],
conditions: {
StringLike: {
something: 'value',
},
},
}),
], [
{
Effect: 'Allow',
Resource: ['a', 'b'],
Action: 'service:Action',
Principal: { AWS: PRINCIPAL_ARN1 },
Condition: {
StringLike: {
something: 'value',
},
},
},
]);
});
test('also merge Deny statements', () => {
assertMerged([
new iam.PolicyStatement({
effect: iam.Effect.DENY,
resources: ['a'],
actions: ['service:Action'],
principals: [principal1],
}),
new iam.PolicyStatement({
effect: iam.Effect.DENY,
resources: ['b'],
actions: ['service:Action'],
principals: [principal1],
}),
], [
{
Effect: 'Deny',
Resource: ['a', 'b'],
Action: 'service:Action',
Principal: { AWS: PRINCIPAL_ARN1 },
},
]);
});
test('merges 3 statements in multiple steps', () => {
assertMerged([
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [principal1],
}),
new iam.PolicyStatement({
resources: ['b'],
actions: ['service:Action'],
principals: [principal1],
}),
// This can combine with the previous two once they have been merged
new iam.PolicyStatement({
resources: ['a', 'b'],
actions: ['service:Action2'],
principals: [principal1],
}),
], [
{
Effect: 'Allow',
Resource: ['a', 'b'],
Action: ['service:Action', 'service:Action2'],
Principal: { AWS: PRINCIPAL_ARN1 },
},
]);
});
test('winnow down literal duplicates', () => {
assertMerged([
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [principal1],
}),
new iam.PolicyStatement({
resources: ['a', 'b'],
actions: ['service:Action'],
principals: [principal1],
}),
], [
{
Effect: 'Allow',
Resource: ['a', 'b'],
Action: 'service:Action',
Principal: { AWS: PRINCIPAL_ARN1 },
},
]);
});
test('winnow down literal duplicates if they are Refs', () => {
const stack = new Stack();
const user1 = new iam.User(stack, 'User1');
const user2 = new iam.User(stack, 'User2');
assertMerged([
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [user1],
}),
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [user1, user2],
}),
], [
{
Effect: 'Allow',
Resource: 'a',
Action: 'service:Action',
Principal: {
AWS: [
{ 'Fn::GetAtt': ['User1E278A736', 'Arn'] },
{ 'Fn::GetAtt': ['User21F1486D1', 'Arn'] },
],
},
},
]);
});
test('merges 2 pairs separately', () => {
// Merges pairs (0,2) and (1,3)
assertMerged([
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [principal1],
}),
new iam.PolicyStatement({
resources: ['c'],
actions: ['service:Action1'],
principals: [principal1],
}),
new iam.PolicyStatement({
resources: ['b'],
actions: ['service:Action'],
principals: [principal1],
}),
new iam.PolicyStatement({
resources: ['c'],
actions: ['service:Action2'],
principals: [principal1],
}),
], [
{
Effect: 'Allow',
Resource: ['a', 'b'],
Action: 'service:Action',
Principal: { AWS: PRINCIPAL_ARN1 },
},
{
Effect: 'Allow',
Resource: 'c',
Action: ['service:Action1', 'service:Action2'],
Principal: { AWS: PRINCIPAL_ARN1 },
},
]);
});
test('do not deep-merge info Refs and GetAtts', () => {
const stack = new Stack();
const user1 = new iam.User(stack, 'User1');
const user2 = new iam.User(stack, 'User2');
assertMerged([
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [user1],
}),
new iam.PolicyStatement({
resources: ['a'],
actions: ['service:Action'],
principals: [user2],
}),
], [
{
Effect: 'Allow',
Resource: 'a',
Action: 'service:Action',
Principal: {
AWS: [
{ 'Fn::GetAtt': ['User1E278A736', 'Arn'] },
{ 'Fn::GetAtt': ['User21F1486D1', 'Arn'] },
],
},
},
]);
});
test('properly merge untyped principals (star)', () => {
const statements = [
PolicyStatement.fromJson({
Action: ['service:Action1'],
Effect: 'Allow',
Resource: ['Resource'],
Principal: '*',
}),
PolicyStatement.fromJson({
Action: ['service:Action2'],
Effect: 'Allow',
Resource: ['Resource'],
Principal: '*',
}),
];
assertMerged(statements, [
{
Action: ['service:Action1', 'service:Action2'],
Effect: 'Allow',
Resource: 'Resource',
Principal: '*',
},
]);
});
test('fail merging typed and untyped principals', () => {
const statements = [
PolicyStatement.fromJson({
Action: ['service:Action'],
Effect: 'Allow',
Resource: ['Resource'],
Principal: '*',
}),
PolicyStatement.fromJson({
Action: ['service:Action'],
Effect: 'Allow',
Resource: ['Resource'],
Principal: { AWS: PRINCIPAL_ARN1 },
}),
];
assertMerged(statements, [
{
Action: 'service:Action',
Effect: 'Allow',
Resource: 'Resource',
Principal: '*',
},
{
Action: 'service:Action',
Effect: 'Allow',
Resource: 'Resource',
Principal: { AWS: PRINCIPAL_ARN1 },
},
]);
});
test('keep merging even if it requires multiple passes', () => {
// [A, R1], [B, R1], [A, R2], [B, R2]
// -> [{A, B}, R1], [{A, B], R2]
// -> [{A, B}, {R1, R2}]
assertMerged([
new iam.PolicyStatement({
actions: ['service:A'],
resources: ['R1'],
}),
new iam.PolicyStatement({
actions: ['service:B'],
resources: ['R1'],
}),
new iam.PolicyStatement({
actions: ['service:A'],
resources: ['R2'],
}),
new iam.PolicyStatement({
actions: ['service:B'],
resources: ['R2'],
}),
], [
{
Effect: 'Allow',
Action: ['service:A', 'service:B'],
Resource: ['R1', 'R2'],
},
]);
});
function assertNoMerge(statements: iam.PolicyStatement[]) {
const app = new App();
const stack = new Stack(app, 'Stack');
const regularResult = stack.resolve(new iam.PolicyDocument({ minimize: false, statements }));
const minResult = stack.resolve(new iam.PolicyDocument({ minimize: true, statements }));
expect(minResult).toEqual(regularResult);
}
function assertMerged(statements: iam.PolicyStatement[], expected: any[]) {
const app = new App();
const stack = new Stack(app, 'Stack');
const minResult = stack.resolve(new iam.PolicyDocument({ minimize: true, statements }));
expect(minResult.Statement).toEqual(expected);
}
/**
* Assert Merged Conditional
*
* Based on a boolean, either call assertMerged or assertNoMerge. The 'expected'
* argument only applies in the case where `doMerge` is true.
*/
function assertMergedC(doMerge: boolean, statements: iam.PolicyStatement[], expected: any[]) {
return doMerge ? assertMerged(statements, expected) : assertNoMerge(statements);
} | the_stack |
import quip from "quip-apps-api";
import quiptext from "quiptext";
import moment from "moment";
import isDate from "date-fns/isDate";
import startOfWeek from "date-fns/startOfWeek";
import {isSameDay} from "./util";
import debounce from "lodash.debounce";
import {DateRange} from "./types";
export type Event = {
color: "RED" | "ORANGE" | "YELLOW" | "GREEN" | "BLUE" | "VIOLET";
dateRange: string;
titleText: string;
};
export const colors = [
quip.apps.ui.ColorMap.RED.KEY,
quip.apps.ui.ColorMap.ORANGE.KEY,
quip.apps.ui.ColorMap.YELLOW.KEY,
quip.apps.ui.ColorMap.GREEN.KEY,
quip.apps.ui.ColorMap.BLUE.KEY,
quip.apps.ui.ColorMap.VIOLET.KEY,
];
const formatDisplayMonthForStorage = (d: Date): string => {
return String(d);
};
const formatDefaultMonthForApi = (d: Date): string => {
return moment(d).format("YYYY-MM");
};
const parseDefaultMonthFromStorage = (displayMonth: string): Date => {
// Fixes a previous display month data type by converting
// New date format ranges from 2020-01 to 2020-12 (zero-padded, eg 05).
// Date(string) parses it after adding one to the month.
const regex = /^\d{4}-[01][0-9]$/g;
if (regex.test(displayMonth)) {
const tempDate = new Date(displayMonth);
return new Date(tempDate.setMonth(tempDate.getMonth() + 1));
}
return new Date(displayMonth);
};
const parseDisplayMonthFromApi = (s: string): Date => {
const date = moment(s, "YYYY-MM").toDate();
// Adding 5 days to potentially avoid any time-zone differences
// since parsing date from API sets it to the beginning of the month.
date.setDate(date.getDate() + 5);
return date;
};
const formatAllDayDateForStorage = (d: Date): string => {
// Stores it as zero-based months - eg. 2020,0,1 (Jan 1st, 2020)
return `${d.getFullYear()},${d.getMonth()},${d.getDate()}`;
};
const formatAllDayDateForApi = (d: Date): string => {
return moment(d).format("YYYY-MM-DD");
};
const parseAllDayStringFromStorage = (s: string): Date => {
if (typeof s === "number") {
// We formerly used timestamps in storage but that has tz issues.
return new Date(s);
} else if (s.includes(",")) {
const [year, monthIndex, day] = s.split(",");
return new Date(
window.parseInt(year, 10),
window.parseInt(monthIndex, 10),
window.parseInt(day, 10));
} else {
throw new Error(s + " is not a valid date from storage.");
}
};
const parseAllDayStringFromApi = (s: string): Date => {
return moment(s, "YYYY-MM-DD").toDate();
};
const eventPlaceholderText = quiptext("New Event");
export class RootRecord extends quip.apps.RootRecord {
private listener;
static getProperties() {
return {
events: quip.apps.RecordList.Type(EventRecord),
displayMonth: "string",
};
}
static getDefaultProperties() {
return {
displayMonth: String(new Date()),
events: [],
};
}
initialize() {
// TODO(kz2): this should probably unlisten at some point
this.listener = this.listen(this.setStatePayload_);
}
setStatePayload_ = debounce(() => {
// @ts-ignore remove this ignore with quip-apps-private
if (typeof quip.apps.setPayload === "function") {
// @ts-ignore remove this ignore with quip-apps-private
quip.apps.setPayload(this.getExportState());
}
}, 2000);
getEvents(): Array<EventRecord> {
return this.get("events").getRecords();
}
getEventsByStartDate(start: Date): Array<EventRecord> {
return this.getEvents().filter(event =>
isSameDay(event.getDateRange().start, start)
);
}
getLastEvent() {
const records = this.get("events").getRecords();
if (!records.length) {
return;
}
return records.reduce((a, b) => {
return b.get("created") > a.get("created") ? b : a;
});
}
populateDisplayMonth(displayMonth: string): void {
const displayMonthDate = parseDisplayMonthFromApi(displayMonth);
this.set(
"displayMonth",
formatDisplayMonthForStorage(displayMonthDate));
}
populateEvents(events: Event[]): Array<EventRecord> {
events.forEach(event => {
// @ts-ignore TODO(kz2): dateRange has type string
const start = parseAllDayStringFromApi(event.dateRange.start);
// @ts-ignore TODO(kz2): dateRange has type string
const end = parseAllDayStringFromApi(event.dateRange.end);
this.get("events").add(
{
dateRange: JSON.stringify({
start: formatAllDayDateForStorage(start),
end: formatAllDayDateForStorage(end),
}),
color: event.color,
title: {
"RichText_placeholderText": eventPlaceholderText,
// @ts-ignore TODO(kz2) content does not exist on type
// Event
"RichText_defaultText": event.content,
},
},
this.getNextIndexForStartDate(start));
});
quip.apps.recordQuipMetric("events_populated", undefined);
return this.get("events");
}
addEvent(start: Date, end: Date): EventRecord {
let color = quip.apps.ui.ColorMap.RED.KEY;
const lastEvent = this.getLastEvent();
if (lastEvent) {
const lastColor = lastEvent.getColor();
const lastColorIndex = colors.indexOf(lastColor);
let nextColorIndex = lastColorIndex + 1;
if (nextColorIndex >= colors.length) {
nextColorIndex = 0;
}
color = colors[nextColorIndex];
}
const newEvent = this.get("events").add(
{
dateRange: JSON.stringify({
start: formatAllDayDateForStorage(start),
end: formatAllDayDateForStorage(end),
}),
color,
},
this.getNextIndexForStartDate(start));
quip.apps.recordQuipMetric("add_event", {
event_id: newEvent.id(),
});
return newEvent;
}
getExportState(): String {
let events = this.getEvents().map(event => {
const dateRange = event.getDateRange();
// @ts-ignore TODO(kz2): dateRange.start is type `Date`, but
// the assigned value is type `string`
dateRange.start = formatAllDayDateForApi(dateRange.start);
// @ts-ignore TODO(kz2): dateRange.end is type `Date`, but
// the assigned value is type `string`
dateRange.end = formatAllDayDateForApi(dateRange.end);
return {
color: event.getColor(),
dateRange,
content: event.getTitleText(),
};
});
return JSON.stringify({
events,
displayMonth: formatDefaultMonthForApi(this.getDisplayMonth()),
});
}
getNextIndexForStartDate(
startDate: Date,
excludeEvent?: EventRecord | null
): number {
let nextIndex = 0;
let events = this.getEvents();
if (excludeEvent) {
events = events.filter(event => event.id() !== excludeEvent.id());
}
if (!events.length) {
return nextIndex;
}
const t = startDate.getTime();
for (let i = 0, event; (event = events[i]); i++) {
if (t < event.getDateRange().start.getTime()) {
nextIndex = i;
break;
}
nextIndex = i + 1;
}
return nextIndex;
}
getDisplayMonth() {
const displayMonth = this.get("displayMonth");
return parseDefaultMonthFromStorage(displayMonth);
}
setDisplayMonth(date: Date) {
this.set("displayMonth", formatDisplayMonthForStorage(date));
quip.apps.recordQuipMetric("set_display_month", {});
}
}
export class EventRecord extends quip.apps.Record {
private listener;
private commentsListener;
private titleContentListener;
domNode: Element | undefined | null;
// An array of startOfWeek time -> el on that week.
domNodesEvent: {
[x: number]: Element;
};
static getProperties() {
return {
color: "string",
created: "number",
dateRange: "string",
title: quip.apps.RichTextRecord,
};
}
static getDefaultProperties() {
return {
color: quip.apps.ui.ColorMap.RED.KEY,
created: Date.now(),
title: {
RichText_placeholderText: eventPlaceholderText,
},
};
}
initialize() {
this.domNode = null;
this.domNodesEvent = {};
this.listener = this.listen(this.notifyParent);
this.commentsListener = this.listenToComments(this.notifyParent);
if (this.get("title")) {
this.titleContentListener = this.get("title").listenToContent(
this.notifyParent);
}
}
delete() {
if (this.listener) {
this.unlisten(this.notifyParent);
}
if (this.commentsListener) {
this.unlistenToComments(this.notifyParent);
}
if (this.titleContentListener) {
if (this.get("title")) {
this.get("title").unlistenToContent(this.notifyParent);
}
}
quip.apps.recordQuipMetric("delete_event", {
event_id: this.id(),
});
super.delete();
}
notifyParent = () => {
this.getParentRecord().notifyListeners();
};
supportsComments() {
return true;
}
getTitleText() {
if (this.get("title")) {
return this.get("title").getTextContent();
}
}
getDom() {
return this.domNode;
}
setDom(el?: Element | null) {
this.domNode = el;
}
getDomNodesForEvent(): object {
return this.domNodesEvent;
}
getDomEvent(weekStartTime?: number): Element | undefined | null {
if (!this.domNodesEvent) {
return;
}
weekStartTime =
weekStartTime || startOfWeek(this.getDateRange().start).getTime();
return this.domNodesEvent[weekStartTime];
}
setDomEvent(eventForWeek?: {el: Element; weekStartTime: number}) {
if (!eventForWeek) {
this.domNodesEvent = {};
return;
}
if (!this.domNodesEvent) {
this.domNodesEvent = {};
}
const {el, weekStartTime} = eventForWeek;
this.domNodesEvent[weekStartTime] = el;
}
getIndex() {
return this.getContainingList().indexOf(this.id());
}
setIndex(i: number) {
if (typeof i !== "number") {
throw new Error("Cannot setIndex without a number value: " + i);
}
this.getContainingList().move(this, i);
}
getDateRange(): DateRange {
const {start, end} = JSON.parse(this.get("dateRange"));
// TODO: update when we support time
return {
start: parseAllDayStringFromStorage(start),
end: parseAllDayStringFromStorage(end),
};
}
setDateRange(start: Date, end: Date) {
if (!(isDate(start) && isDate(end))) {
console.error("start", start, "end", end);
throw new Error("start and end must both be Date types");
}
// TODO: update when we start using time
this.set(
"dateRange",
JSON.stringify({
start: formatAllDayDateForStorage(start),
end: formatAllDayDateForStorage(end),
}));
quip.apps.recordQuipMetric("move_event", {
event_id: this.id(),
});
}
getColor() {
return this.get("color");
}
setColor(color: string) {
this.set("color", color);
}
}
quip.apps.registerClass(RootRecord, "Root");
quip.apps.registerClass(EventRecord, "project-calendar-event"); | the_stack |
import { KitchenSinkCreateInput } from '../generated/binding';
export const KITCHEN_SINKS: KitchenSinkCreateInput[] = [
{
dateField: '2000-03-26T19:39:08.597Z',
stringField: 'Trantow',
emailField: 'consequuntur-94489@a.com',
integerField: 41,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.3885,
nullableStringField: null,
dateOnlyField: '2018-08-19',
dateTimeField: '2019-11-25T22:03:38.286Z',
jsonField: {
foo: 'bar',
quia: 'autem'
},
arrayOfInts: [1],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2018-06-09T17:19:07.764Z',
stringField: 'BOSCO',
emailField: 'consequuntur-94490@odit.com',
integerField: -77,
stringEnumField: 'BAR',
booleanField: false,
floatField: 1.2352,
nullableStringField: null,
dateOnlyField: '2020-02-27',
dateTimeField: '2019-02-14T08:01:37.290Z',
jsonField: {
foo: 'bar',
repellat: 'autem'
},
arrayOfInts: [5, 4],
arrayOfStrings: ['cat', 'dog'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2010-06-15T02:46:13.288Z',
stringField: 'SCHOEN',
emailField: 'deleniti-94490@voluptate.com',
integerField: 59,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.6309,
nullableStringField: null,
dateOnlyField: '2019-06-01',
dateTimeField: '2020-12-25T11:57:22.725Z',
jsonField: {
foo: 'baz',
ab: 'est'
},
arrayOfInts: [4, 0, 5, 2],
arrayOfStrings: ['cat', 'dog'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2015-09-20T05:30:11.138Z',
stringField: 'VOLUPTATUM NULLA',
emailField: 'vitae-94490@dolore.com',
integerField: -70,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.9273,
nullableStringField: null,
dateOnlyField: '2020-12-03',
dateTimeField: '2018-09-03T00:38:19.337Z',
jsonField: {
foo: 'baz',
voluptatum: 'dignissimos'
},
arrayOfInts: [6, 0, 1],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2007-11-02T14:52:54.467Z',
stringField: 'Wisoky',
emailField: 'illum-94490@et.com',
integerField: 60,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.0425,
nullableStringField: 'not-null',
dateOnlyField: '2020-02-25',
dateTimeField: '2018-05-31T19:22:05.341Z',
jsonField: {
foo: 'baz',
molestias: 'repellat'
},
arrayOfInts: [2, 4, 4],
arrayOfStrings: ['pony', 'turtle'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2019-11-07T04:11:01.926Z',
stringField: 'SCHINNER',
emailField: 'nihil-94490@laboriosam.com',
integerField: 72,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.9199,
nullableStringField: null,
dateOnlyField: '2020-08-18',
dateTimeField: '2020-04-25T02:02:08.170Z',
jsonField: {
foo: 'baz',
blanditiis: 'perspiciatis'
},
arrayOfInts: [2, 0],
arrayOfStrings: ['dog'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2006-12-02T23:23:07.874Z',
stringField: 'maximillia',
emailField: 'voluptas-94490@dolores.com',
integerField: -73,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.6969,
nullableStringField: null,
dateOnlyField: '2018-04-14',
dateTimeField: '2020-09-13T04:05:32.491Z',
jsonField: {
foo: 'bar',
omnis: 'laboriosam'
},
arrayOfInts: [1],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2013-07-23T13:06:48.326Z',
stringField: 'koelpin',
emailField: 'facere-94490@eius.com',
integerField: -10,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.4273,
nullableStringField: 'not-null',
dateOnlyField: '2020-10-01',
dateTimeField: '2020-08-25T00:43:52.898Z',
jsonField: {
foo: 'biz',
et: 'voluptatum'
},
arrayOfInts: [5, 5],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2000-01-12T09:14:05.317Z',
stringField: 'DELENITI SUNT',
emailField: 'voluptatem-94490@quisquam.com',
integerField: 60,
stringEnumField: 'BAR',
booleanField: false,
floatField: 1.9735,
nullableStringField: 'not-null',
dateOnlyField: '2018-09-17',
dateTimeField: '2018-07-05T11:32:01.838Z',
jsonField: {
foo: 'baz',
qui: 'asperiores'
},
arrayOfInts: [0, 0, 7],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2013-04-12T16:21:23.172Z',
stringField: 'inventore velit',
emailField: 'unde-94490@quis.com',
integerField: 60,
stringEnumField: 'BAR',
booleanField: true,
floatField: -1.7707,
nullableStringField: 'not-null',
dateOnlyField: '2020-07-10',
dateTimeField: '2018-04-28T06:17:13.736Z',
jsonField: {
foo: 'baz',
ad: 'perspiciatis'
},
arrayOfInts: [],
arrayOfStrings: ['dog', 'fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2007-12-19T04:35:25.042Z',
stringField: 'QUAM CULPA',
emailField: 'porro-94490@voluptatem.com',
integerField: -28,
stringEnumField: 'FOO',
booleanField: true,
floatField: 0.7448,
nullableStringField: null,
dateOnlyField: '2020-03-31',
dateTimeField: '2019-03-30T19:45:37.259Z',
jsonField: {
foo: 'baz',
et: 'est'
},
arrayOfInts: [1, 2, 1, 6],
arrayOfStrings: ['fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '4'
},
type: 'Baz'
}
}
},
{
dateField: '2019-05-03T15:48:00.387Z',
stringField: 'SCHIMMEL',
emailField: 'at-94490@culpa.com',
integerField: -40,
stringEnumField: 'FOO',
booleanField: true,
floatField: 0.3528,
nullableStringField: null,
dateOnlyField: '2018-12-28',
dateTimeField: '2019-04-26T22:33:32.962Z',
jsonField: {
foo: 'biz',
non: 'rerum'
},
arrayOfInts: [8, 4, 7],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '4'
},
type: 'Baz'
}
}
},
{
dateField: '2010-08-26T05:32:14.613Z',
stringField: 'Madeline',
emailField: 'nulla-94490@quia.com',
integerField: -88,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.9475,
nullableStringField: 'not-null',
dateOnlyField: '2019-08-17',
dateTimeField: '2019-11-11T20:22:39.955Z',
jsonField: {
foo: 'bar',
blanditiis: 'eligendi'
},
arrayOfInts: [4],
arrayOfStrings: ['fox', 'turtle'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '4'
},
type: 'Baz'
}
}
},
{
dateField: '2008-06-23T01:50:57.367Z',
stringField: 'NAKIA',
emailField: 'maiores-94490@quia.com',
integerField: 20,
stringEnumField: 'FOO',
booleanField: true,
floatField: 0.3992,
nullableStringField: null,
dateOnlyField: '2020-06-20',
dateTimeField: '2019-04-17T01:25:34.424Z',
jsonField: {
foo: 'baz',
hic: 'voluptatem'
},
arrayOfInts: [5],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '4'
},
type: 'Baz'
}
}
},
{
dateField: '2009-06-05T09:17:38.251Z',
stringField: 'nakia',
emailField: 'illum-94490@odit.com',
integerField: 36,
stringEnumField: 'BAR',
booleanField: false,
floatField: 1.0651,
nullableStringField: 'not-null',
dateOnlyField: '2020-10-28',
dateTimeField: '2020-09-29T08:57:05.059Z',
jsonField: {
foo: 'bar',
ut: 'expedita'
},
arrayOfInts: [7],
arrayOfStrings: ['dog'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '4'
},
type: 'Baz'
}
}
},
{
dateField: '2013-03-07T16:28:03.213Z',
stringField: 'Nakia',
emailField: 'fugit-94490@doloremque.com',
integerField: 37,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.4664,
nullableStringField: 'not-null',
dateOnlyField: '2018-06-02',
dateTimeField: '2018-06-21T08:52:15.890Z',
jsonField: {
foo: 'baz',
repellat: 'rem'
},
arrayOfInts: [3, 0, 8],
arrayOfStrings: ['fox', 'pony'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '4'
},
type: 'Baz'
}
}
},
{
dateField: '2010-03-21T16:34:30.420Z',
stringField: 'sed praesentium',
emailField: 'illo-94490@aut.com',
integerField: -18,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.9525,
nullableStringField: 'not-null',
dateOnlyField: '2020-09-21',
dateTimeField: '2018-03-10T17:25:14.892Z',
jsonField: {
foo: 'biz',
et: 'laboriosam'
},
arrayOfInts: [5],
arrayOfStrings: ['dog', 'fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '4'
},
type: 'Baz'
}
}
},
{
dateField: '2019-03-15T10:02:32.875Z',
stringField: 'LUEILWITZ',
emailField: 'ipsa-94490@animi.com',
integerField: -34,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.7865,
nullableStringField: 'not-null',
dateOnlyField: '2020-08-24',
dateTimeField: '2018-10-26T12:04:40.017Z',
jsonField: {
foo: 'baz',
doloremque: 'labore'
},
arrayOfInts: [1, 8],
arrayOfStrings: ['cat'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '3'
},
type: 'Baz'
}
}
},
{
dateField: '2002-04-13T06:42:01.063Z',
stringField: 'BERENICE',
emailField: 'sapiente-94490@quia.com',
integerField: 72,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.1031,
nullableStringField: 'not-null',
dateOnlyField: '2018-10-13',
dateTimeField: '2020-12-24T09:25:47.128Z',
jsonField: {
foo: 'biz',
et: 'nulla'
},
arrayOfInts: [7, 0, 0, 3],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '3'
},
type: 'Baz'
}
}
},
{
dateField: '2015-11-13T18:10:22.996Z',
stringField: 'amber',
emailField: 'ducimus-94490@occaecati.com',
integerField: -38,
stringEnumField: 'FOO',
booleanField: true,
floatField: 0.9494,
nullableStringField: null,
dateOnlyField: '2018-07-03',
dateTimeField: '2018-08-13T16:06:07.214Z',
jsonField: {
foo: 'biz',
perferendis: 'et'
},
arrayOfInts: [0, 1, 7, 0],
arrayOfStrings: ['turtle', 'fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '4'
},
type: 'Baz'
}
}
},
{
dateField: '2003-10-28T14:00:51.027Z',
stringField: 'JEROMY',
emailField: 'sit-94490@facilis.com',
integerField: -85,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.6478,
nullableStringField: null,
dateOnlyField: '2019-05-29',
dateTimeField: '2018-12-11T07:56:46.405Z',
jsonField: {
foo: 'baz',
earum: 'reiciendis'
},
arrayOfInts: [],
arrayOfStrings: ['fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2017-07-08T05:55:21.859Z',
stringField: 'RYLEIGH',
emailField: 'qui-94490@exercitationem.com',
integerField: -77,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.4488,
nullableStringField: 'not-null',
dateOnlyField: '2020-03-30',
dateTimeField: '2018-01-11T01:58:14.032Z',
jsonField: {
foo: 'baz',
quaerat: 'dignissimos'
},
arrayOfInts: [6, 2, 0, 7],
arrayOfStrings: ['dog', 'fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2004-03-05T21:41:32.408Z',
stringField: 'delfinaz',
emailField: 'et-94490@in.com',
integerField: 21,
stringEnumField: 'FOO',
booleanField: true,
floatField: -1.1396,
nullableStringField: 'not-null',
dateOnlyField: '2020-05-15',
dateTimeField: '2018-03-09T14:25:42.617Z',
jsonField: {
foo: 'baz',
quia: 'assumenda'
},
arrayOfInts: [7],
arrayOfStrings: ['turtle'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2016-02-16T00:51:08.515Z',
stringField: 'SHEMAR',
emailField: 'officia-94490@officia.com',
integerField: -64,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.869,
nullableStringField: null,
dateOnlyField: '2018-03-15',
dateTimeField: '2019-12-29T20:30:17.183Z',
jsonField: {
foo: 'bar',
earum: 'in'
},
arrayOfInts: [],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2015-11-16T20:13:42.749Z',
stringField: 'Wilmer',
emailField: 'eligendi-94490@mollitia.com',
integerField: 1,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.4152,
nullableStringField: 'not-null',
dateOnlyField: '2018-06-26',
dateTimeField: '2018-10-25T06:53:58.190Z',
jsonField: {
foo: 'biz',
molestiae: 'voluptatem'
},
arrayOfInts: [],
arrayOfStrings: ['cat'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2001-12-22T04:20:50.000Z',
stringField: 'stark',
emailField: 'qui-94490@placeat.com',
integerField: 93,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.076,
nullableStringField: 'not-null',
dateOnlyField: '2018-12-25',
dateTimeField: '2018-06-10T11:31:29.657Z',
jsonField: {
foo: 'biz',
hic: 'architecto'
},
arrayOfInts: [5],
arrayOfStrings: ['turtle', 'turtle'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2000-10-24T14:17:23.329Z',
stringField: 'elda',
emailField: 'nostrum-94490@ex.com',
integerField: 31,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.771,
nullableStringField: null,
dateOnlyField: '2020-12-13',
dateTimeField: '2018-03-13T18:09:40.750Z',
jsonField: {
foo: 'baz',
qui: 'aut'
},
arrayOfInts: [0, 8, 5, 5],
arrayOfStrings: ['turtle', 'cat'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2011-07-27T20:53:06.011Z',
stringField: 'Damien',
emailField: 'consectetur-94490@maiores.com',
integerField: 14,
stringEnumField: 'FOO',
booleanField: true,
floatField: 0.2581,
nullableStringField: null,
dateOnlyField: '2020-06-02',
dateTimeField: '2019-06-19T04:58:36.438Z',
jsonField: {
foo: 'bar',
voluptatem: 'quasi'
},
arrayOfInts: [0],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2012-06-19T19:25:51.258Z',
stringField: 'earum nesciunt',
emailField: 'rerum-94490@voluptatem.com',
integerField: 38,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.6917,
nullableStringField: 'not-null',
dateOnlyField: '2018-05-18',
dateTimeField: '2018-06-06T19:52:48.715Z',
jsonField: {
foo: 'baz',
ipsum: 'eveniet'
},
arrayOfInts: [6, 3, 4, 2],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2007-01-23T16:37:38.001Z',
stringField: 'illum atque',
emailField: 'nemo-94490@quis.com',
integerField: 80,
stringEnumField: 'BAR',
booleanField: false,
floatField: -0.7094,
nullableStringField: 'not-null',
dateOnlyField: '2018-09-21',
dateTimeField: '2020-12-16T14:16:54.067Z',
jsonField: {
foo: 'baz',
atque: 'non'
},
arrayOfInts: [5, 6],
arrayOfStrings: ['pony', 'dog'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2009-10-01T13:30:44.281Z',
stringField: 'horowitz',
emailField: 'ab-94490@expedita.com',
integerField: -7,
stringEnumField: 'BAR',
booleanField: false,
floatField: -0.4361,
nullableStringField: null,
dateOnlyField: '2019-06-25',
dateTimeField: '2018-07-29T01:17:45.596Z',
jsonField: {
foo: 'biz',
nulla: 'explicabo'
},
arrayOfInts: [0, 4, 6],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2013-06-11T14:11:47.706Z',
stringField: 'schowalter',
emailField: 'autem-94490@ea.com',
integerField: -55,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.1803,
nullableStringField: 'not-null',
dateOnlyField: '2018-02-07',
dateTimeField: '2018-03-24T06:41:17.641Z',
jsonField: {
foo: 'biz',
laborum: 'qui'
},
arrayOfInts: [2, 1, 1],
arrayOfStrings: ['pony'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2002-03-26T22:04:29.419Z',
stringField: 'MAIORES ADIPISCI',
emailField: 'aperiam-94490@consequatur.com',
integerField: 19,
stringEnumField: 'BAR',
booleanField: false,
floatField: -0.2365,
nullableStringField: 'not-null',
dateOnlyField: '2018-04-18',
dateTimeField: '2018-12-25T17:14:31.792Z',
jsonField: {
foo: 'biz',
corrupti: 'placeat'
},
arrayOfInts: [0, 7, 8],
arrayOfStrings: ['turtle'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2002-12-07T20:12:25.832Z',
stringField: 'devin',
emailField: 'officiis-94490@repellat.com',
integerField: -30,
stringEnumField: 'BAR',
booleanField: false,
floatField: 1.1463,
nullableStringField: null,
dateOnlyField: '2018-12-05',
dateTimeField: '2019-11-13T18:22:51.055Z',
jsonField: {
foo: 'bar',
qui: 'quis'
},
arrayOfInts: [],
arrayOfStrings: ['fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2015-06-06T19:51:16.479Z',
stringField: 'VELIT ADIPISCI',
emailField: 'repellat-94490@qui.com',
integerField: 47,
stringEnumField: 'FOO',
booleanField: true,
floatField: 0.6576,
nullableStringField: 'not-null',
dateOnlyField: '2018-10-07',
dateTimeField: '2019-11-20T11:42:01.432Z',
jsonField: {
foo: 'biz',
dolores: 'quia'
},
arrayOfInts: [3, 6],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2002-03-22T12:13:38.674Z',
stringField: 'AMET QUI',
emailField: 'sint-94490@quia.com',
integerField: -87,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.1504,
nullableStringField: null,
dateOnlyField: '2019-12-12',
dateTimeField: '2018-02-23T17:27:00.987Z',
jsonField: {
foo: 'biz',
dolorem: 'ea'
},
arrayOfInts: [8, 4, 3, 6],
arrayOfStrings: ['pony', 'cat'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2012-11-07T20:20:21.500Z',
stringField: 'maiores praesentium',
emailField: 'et-94490@velit.com',
integerField: -83,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.7952,
nullableStringField: 'not-null',
dateOnlyField: '2019-10-17',
dateTimeField: '2019-10-15T13:37:32.192Z',
jsonField: {
foo: 'baz',
officiis: 'ut'
},
arrayOfInts: [4, 5, 3, 8],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2006-06-24T23:17:19.558Z',
stringField: 'rerum et',
emailField: 'laudantium-94490@illo.com',
integerField: 50,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.7105,
nullableStringField: null,
dateOnlyField: '2019-09-19',
dateTimeField: '2019-02-11T05:37:09.516Z',
jsonField: {
foo: 'baz',
corrupti: 'quos'
},
arrayOfInts: [1, 8, 3],
arrayOfStrings: ['cat', 'pony'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2003-06-16T03:09:06.824Z',
stringField: 'erling',
emailField: 'non-94490@corrupti.com',
integerField: -73,
stringEnumField: 'BAR',
booleanField: false,
floatField: 1.5513,
nullableStringField: 'not-null',
dateOnlyField: '2019-11-29',
dateTimeField: '2019-02-08T16:41:13.768Z',
jsonField: {
foo: 'bar',
corrupti: 'sit'
},
arrayOfInts: [6],
arrayOfStrings: ['turtle', 'dog'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2019-01-25T19:47:42.092Z',
stringField: 'KAELYN',
emailField: 'vero-94490@qui.com',
integerField: 24,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.4497,
nullableStringField: null,
dateOnlyField: '2020-11-21',
dateTimeField: '2020-06-10T10:53:56.599Z',
jsonField: {
foo: 'biz',
aliquid: 'ut'
},
arrayOfInts: [8],
arrayOfStrings: ['cat'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2019-08-01T03:49:51.824Z',
stringField: 'alias sint',
emailField: 'modi-94490@animi.com',
integerField: 62,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.5469,
nullableStringField: null,
dateOnlyField: '2019-08-02',
dateTimeField: '2019-05-02T11:56:08.871Z',
jsonField: {
foo: 'baz',
quae: 'nemo'
},
arrayOfInts: [3, 4, 6, 1],
arrayOfStrings: ['cat'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2009-01-09T09:22:12.609Z',
stringField: 'molestiae praesentium',
emailField: 'error-94490@eveniet.com',
integerField: -22,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.5727,
nullableStringField: null,
dateOnlyField: '2019-12-25',
dateTimeField: '2018-12-23T05:29:36.961Z',
jsonField: {
foo: 'bar',
nisi: 'aut'
},
arrayOfInts: [7],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2003-05-03T23:37:31.552Z',
stringField: 'Okuneva',
emailField: 'sed-94490@dolores.com',
integerField: -93,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.1316,
nullableStringField: null,
dateOnlyField: '2018-04-30',
dateTimeField: '2020-12-26T01:23:27.078Z',
jsonField: {
foo: 'bar',
molestias: 'quo'
},
arrayOfInts: [],
arrayOfStrings: ['fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2013-01-05T06:08:23.118Z',
stringField: 'hartmann',
emailField: 'non-94490@perspiciatis.com',
integerField: -39,
stringEnumField: 'FOO',
booleanField: true,
floatField: 0.4536,
nullableStringField: 'not-null',
dateOnlyField: '2018-07-08',
dateTimeField: '2018-02-21T06:42:38.139Z',
jsonField: {
foo: 'baz',
iusto: 'exercitationem'
},
arrayOfInts: [3, 7, 8, 5],
arrayOfStrings: ['dog'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2006-12-21T16:26:17.369Z',
stringField: 'raquel',
emailField: 'necessitatibus-94490@quia.com',
integerField: -35,
stringEnumField: 'BAR',
booleanField: false,
floatField: -0.3465,
nullableStringField: 'not-null',
dateOnlyField: '2019-10-06',
dateTimeField: '2018-04-28T05:35:05.513Z',
jsonField: {
foo: 'biz',
labore: 'nihil'
},
arrayOfInts: [],
arrayOfStrings: ['turtle'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2008-02-09T14:54:40.128Z',
stringField: 'quas fugit',
emailField: 'a-94490@a.com',
integerField: -66,
stringEnumField: 'FOO',
booleanField: true,
floatField: -1.8619,
nullableStringField: 'not-null',
dateOnlyField: '2019-04-21',
dateTimeField: '2020-03-08T08:10:54.980Z',
jsonField: {
foo: 'baz',
dolorem: 'ut'
},
arrayOfInts: [7, 7, 1],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '5'
},
type: 'Baz'
}
}
},
{
dateField: '2006-02-27T00:18:17.121Z',
stringField: 'ODIO ID',
emailField: 'totam-94490@rem.com',
integerField: -90,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.827,
nullableStringField: null,
dateOnlyField: '2019-04-12',
dateTimeField: '2019-10-12T02:20:37.708Z',
jsonField: {
foo: 'baz',
ipsum: 'consequuntur'
},
arrayOfInts: [3, 0, 4],
arrayOfStrings: ['dog'],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2001-02-07T13:01:49.310Z',
stringField: 'iste in',
emailField: 'et-94491@sed.com',
integerField: -24,
stringEnumField: 'BAR',
booleanField: false,
floatField: -0.5139,
nullableStringField: 'not-null',
dateOnlyField: '2020-10-11',
dateTimeField: '2020-03-16T17:15:27.711Z',
jsonField: {
foo: 'baz',
est: 'corporis'
},
arrayOfInts: [0, 8],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2012-05-08T03:31:01.786Z',
stringField: 'quia et',
emailField: 'inventore-94491@quos.com',
integerField: 60,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.8093,
nullableStringField: null,
dateOnlyField: '2018-08-28',
dateTimeField: '2019-05-27T03:22:02.440Z',
jsonField: {
foo: 'bar',
et: 'incidunt'
},
arrayOfInts: [7, 3, 0],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2009-01-16T13:52:39.606Z',
stringField: 'HOPPE',
emailField: 'officia-94491@dolorum.com',
integerField: 87,
stringEnumField: 'BAR',
booleanField: false,
floatField: 1.7151,
nullableStringField: null,
dateOnlyField: '2019-10-23',
dateTimeField: '2018-02-06T23:50:49.092Z',
jsonField: {
foo: 'biz',
nisi: 'est'
},
arrayOfInts: [0],
arrayOfStrings: ['dog'],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2003-09-29T06:20:32.429Z',
stringField: 'korey',
emailField: 'dolores-94491@excepturi.com',
integerField: 1,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.3781,
nullableStringField: null,
dateOnlyField: '2019-06-08',
dateTimeField: '2020-05-28T19:08:17.354Z',
jsonField: {
foo: 'bar',
quisquam: 'perspiciatis'
},
arrayOfInts: [],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '1'
},
type: 'Baz'
}
}
},
{
dateField: '2012-05-12T13:46:41.150Z',
stringField: 'VON',
emailField: 'facere-94491@blanditiis.com',
integerField: -8,
stringEnumField: 'BAR',
booleanField: false,
floatField: 1.0037,
nullableStringField: 'not-null',
dateOnlyField: '2019-08-05',
dateTimeField: '2019-02-25T02:47:46.670Z',
jsonField: {
foo: 'biz',
nam: 'numquam'
},
arrayOfInts: [0, 8, 2, 1],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '1'
},
type: 'Baz'
}
}
},
{
dateField: '2008-10-03T07:03:20.899Z',
stringField: 'neque tempore',
emailField: 'sint-94491@sunt.com',
integerField: 0,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.753,
nullableStringField: 'not-null',
dateOnlyField: '2019-03-24',
dateTimeField: '2019-09-09T21:30:46.841Z',
jsonField: {
foo: 'bar',
ut: 'enim'
},
arrayOfInts: [],
arrayOfStrings: ['cat'],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '1'
},
type: 'Baz'
}
}
},
{
dateField: '2011-11-11T14:15:49.472Z',
stringField: 'ut consequatur',
emailField: 'et-94491@minus.com',
integerField: -18,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.9532,
nullableStringField: null,
dateOnlyField: '2020-07-07',
dateTimeField: '2020-05-26T01:29:21.907Z',
jsonField: {
foo: 'baz',
accusamus: 'voluptatem'
},
arrayOfInts: [0, 0],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '1'
},
type: 'Baz'
}
}
},
{
dateField: '2006-03-23T09:17:56.199Z',
stringField: 'fermin',
emailField: 'doloremque-94491@aliquid.com',
integerField: 82,
stringEnumField: 'FOO',
booleanField: true,
floatField: 0.6261,
nullableStringField: 'not-null',
dateOnlyField: '2019-03-03',
dateTimeField: '2020-06-07T00:03:51.058Z',
jsonField: {
foo: 'biz',
voluptatum: 'ullam'
},
arrayOfInts: [3],
arrayOfStrings: ['pony'],
typedJsonField: {
params: {
value: {
deepParam: '1'
},
type: 'Baz'
}
}
},
{
dateField: '2005-04-20T12:50:32.785Z',
stringField: 'eum reiciendis',
emailField: 'officia-94491@sit.com',
integerField: -96,
stringEnumField: 'FOO',
booleanField: true,
floatField: -1.9195,
nullableStringField: 'not-null',
dateOnlyField: '2020-09-26',
dateTimeField: '2018-08-07T10:43:25.410Z',
jsonField: {
foo: 'baz',
corporis: 'necessitatibus'
},
arrayOfInts: [],
arrayOfStrings: [],
typedJsonField: {
params: {
value: {
deepParam: '1'
},
type: 'Baz'
}
}
},
{
dateField: '2010-07-11T22:23:16.212Z',
stringField: 'iusto perspiciatis',
emailField: 'hic-94491@praesentium.com',
integerField: -72,
stringEnumField: 'FOO',
booleanField: true,
floatField: 0.87,
nullableStringField: 'not-null',
dateOnlyField: '2020-09-18',
dateTimeField: '2018-07-02T03:09:42.268Z',
jsonField: {
foo: 'biz',
quo: 'ipsa'
},
arrayOfInts: [],
arrayOfStrings: ['pony'],
typedJsonField: {
params: {
value: {
deepParam: '1'
},
type: 'Baz'
}
}
},
{
dateField: '2018-07-16T01:22:41.394Z',
stringField: 'HAMILL',
emailField: 'suscipit-94491@temporibus.com',
integerField: 13,
stringEnumField: 'FOO',
booleanField: true,
floatField: -1.2896,
nullableStringField: null,
dateOnlyField: '2019-03-11',
dateTimeField: '2020-02-26T06:11:11.991Z',
jsonField: {
foo: 'bar',
nobis: 'recusandae'
},
arrayOfInts: [2],
arrayOfStrings: ['dog', 'dog'],
typedJsonField: {
params: {
value: {
deepParam: '1'
},
type: 'Baz'
}
}
},
{
dateField: '2006-07-08T19:12:18.422Z',
stringField: "o'reilly",
emailField: 'dolorem-94491@dolorem.com',
integerField: 74,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.2316,
nullableStringField: 'not-null',
dateOnlyField: '2019-07-11',
dateTimeField: '2020-10-15T18:50:09.459Z',
jsonField: {
foo: 'bar',
eum: 'tenetur'
},
arrayOfInts: [6, 7],
arrayOfStrings: ['turtle'],
typedJsonField: {
params: {
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2009-02-22T14:33:33.660Z',
stringField: 'LABORE CULPA',
emailField: 'commodi-94491@adipisci.com',
integerField: 5,
stringEnumField: 'BAR',
booleanField: false,
floatField: -0.0592,
nullableStringField: 'not-null',
dateOnlyField: '2018-10-11',
dateTimeField: '2018-04-04T22:09:44.507Z',
jsonField: {
foo: 'baz',
eveniet: 'placeat'
},
arrayOfInts: [4],
arrayOfStrings: ['dog'],
typedJsonField: {
params: {
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2014-05-23T17:56:47.416Z',
stringField: 'VERITATIS EIUS',
emailField: 'aut-94491@culpa.com',
integerField: -54,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.8332,
nullableStringField: 'not-null',
dateOnlyField: '2020-10-14',
dateTimeField: '2018-01-25T05:43:10.415Z',
jsonField: {
foo: 'bar',
incidunt: 'placeat'
},
arrayOfInts: [1, 8, 1, 7],
arrayOfStrings: ['turtle', 'fox'],
typedJsonField: {
params: {
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2008-06-13T19:50:17.111Z',
stringField: 'billie',
emailField: 'aut-94491@debitis.com',
integerField: 71,
stringEnumField: 'BAR',
booleanField: false,
floatField: -0.5285,
nullableStringField: 'not-null',
dateOnlyField: '2018-12-04',
dateTimeField: '2018-04-05T21:24:11.767Z',
jsonField: {
foo: 'biz',
voluptatem: 'debitis'
},
arrayOfInts: [3, 7, 6, 8],
arrayOfStrings: ['turtle'],
typedJsonField: {
params: {
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2012-03-04T04:26:12.703Z',
stringField: 'winston',
emailField: 'ipsum-94491@reprehenderit.com',
integerField: -84,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.4247,
nullableStringField: null,
dateOnlyField: '2019-10-08',
dateTimeField: '2020-06-13T13:36:37.319Z',
jsonField: {
foo: 'biz',
consequuntur: 'dicta'
},
arrayOfInts: [4, 2, 5],
arrayOfStrings: ['fox'],
typedJsonField: {
params: {
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2008-03-01T12:47:24.590Z',
stringField: 'HELGA',
emailField: 'qui-94491@illum.com',
integerField: -77,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.106,
nullableStringField: null,
dateOnlyField: '2018-04-11',
dateTimeField: '2018-04-15T06:49:17.394Z',
jsonField: {
foo: 'baz',
et: 'occaecati'
},
arrayOfInts: [],
arrayOfStrings: ['fox', 'dog'],
typedJsonField: {
params: {
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2003-04-15T10:12:41.904Z',
stringField: 'ipsum voluptas',
emailField: 'totam-94491@eos.com',
integerField: -94,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.2852,
nullableStringField: null,
dateOnlyField: '2018-08-31',
dateTimeField: '2020-01-06T00:08:52.001Z',
jsonField: {
foo: 'bar',
exercitationem: 'natus'
},
arrayOfInts: [4, 3, 0],
arrayOfStrings: [],
typedJsonField: {
params: {
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2008-06-18T10:28:28.356Z',
stringField: 'luettgen',
emailField: 'optio-94491@voluptas.com',
integerField: 4,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.7963,
nullableStringField: 'not-null',
dateOnlyField: '2019-01-23',
dateTimeField: '2020-06-27T03:45:14.668Z',
jsonField: {
foo: 'biz',
ex: 'alias'
},
arrayOfInts: [0, 6, 5],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2013-06-19T13:12:38.556Z',
stringField: 'dietrich',
emailField: 'necessitatibus-94491@natus.com',
integerField: 87,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.9018,
nullableStringField: null,
dateOnlyField: '2018-06-14',
dateTimeField: '2019-11-05T13:56:17.060Z',
jsonField: {
foo: 'bar',
praesentium: 'rerum'
},
arrayOfInts: [0],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2018-03-05T11:05:35.164Z',
stringField: 'DOVIE',
emailField: 'omnis-94491@dolore.com',
integerField: 74,
stringEnumField: 'BAR',
booleanField: true,
floatField: -1.5775,
nullableStringField: null,
dateOnlyField: '2018-07-31',
dateTimeField: '2018-07-22T09:57:28.210Z',
jsonField: {
foo: 'bar',
laboriosam: 'ipsum'
},
arrayOfInts: [7, 2],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2011-08-25T12:17:02.677Z',
stringField: 'linnea',
emailField: 'delectus-94491@quia.com',
integerField: 55,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.5746,
nullableStringField: 'not-null',
dateOnlyField: '2018-10-11',
dateTimeField: '2019-12-27T01:42:13.975Z',
jsonField: {
foo: 'biz',
ipsa: 'quibusdam'
},
arrayOfInts: [5, 8, 1, 3],
arrayOfStrings: ['dog', 'fox'],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2003-01-04T16:15:21.008Z',
stringField: 'madison',
emailField: 'quae-94491@aspernatur.com',
integerField: -6,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.328,
nullableStringField: null,
dateOnlyField: '2018-11-09',
dateTimeField: '2018-04-23T20:20:23.845Z',
jsonField: {
foo: 'biz',
omnis: 'a'
},
arrayOfInts: [2],
arrayOfStrings: ['fox'],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2010-10-31T21:19:02.009Z',
stringField: 'NUMQUAM ALIQUAM',
emailField: 'odit-94491@dolor.com',
integerField: 14,
stringEnumField: 'BAR',
booleanField: true,
floatField: -1.6389,
nullableStringField: null,
dateOnlyField: '2018-09-16',
dateTimeField: '2020-12-02T20:53:30.328Z',
jsonField: {
foo: 'bar',
natus: 'dolores'
},
arrayOfInts: [3],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2016-08-25T11:53:41.092Z',
stringField: 'macy',
emailField: 'qui-94491@maiores.com',
integerField: -80,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.7988,
nullableStringField: 'not-null',
dateOnlyField: '2020-12-03',
dateTimeField: '2019-07-18T17:41:43.042Z',
jsonField: {
foo: 'bar',
et: 'omnis'
},
arrayOfInts: [1, 0, 6, 1],
arrayOfStrings: ['cat'],
typedJsonField: {
params: {
name: 'Bar',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2009-01-13T18:03:41.540Z',
stringField: 'GERLACH',
emailField: 'in-94491@amet.com',
integerField: 8,
stringEnumField: 'BAR',
booleanField: false,
floatField: 1.1804,
nullableStringField: 'not-null',
dateOnlyField: '2020-02-10',
dateTimeField: '2019-02-13T01:58:53.948Z',
jsonField: {
foo: 'baz',
sint: 'quia'
},
arrayOfInts: [5, 5, 8],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2011-02-27T02:39:55.805Z',
stringField: 'padberg',
emailField: 'consequatur-94491@corporis.com',
integerField: 76,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.169,
nullableStringField: null,
dateOnlyField: '2018-04-24',
dateTimeField: '2018-09-22T19:08:17.847Z',
jsonField: {
foo: 'baz',
est: 'dignissimos'
},
arrayOfInts: [],
arrayOfStrings: ['cat', 'dog'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2015-07-25T12:02:55.232Z',
stringField: 'ENIM DICTA',
emailField: 'est-94491@quasi.com',
integerField: -47,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.8532,
nullableStringField: null,
dateOnlyField: '2020-07-08',
dateTimeField: '2020-11-19T06:20:26.510Z',
jsonField: {
foo: 'biz',
rerum: 'et'
},
arrayOfInts: [4, 1, 4, 3],
arrayOfStrings: ['fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2004-11-09T21:46:40.870Z',
stringField: 'amalia',
emailField: 'sequi-94491@quibusdam.com',
integerField: 37,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.7813,
nullableStringField: 'not-null',
dateOnlyField: '2020-12-11',
dateTimeField: '2020-05-23T14:10:01.169Z',
jsonField: {
foo: 'bar',
autem: 'consectetur'
},
arrayOfInts: [3, 6],
arrayOfStrings: ['fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2015-04-07T13:16:25.215Z',
stringField: 'QUAS PERSPICIATIS',
emailField: 'et-94491@nesciunt.com',
integerField: -48,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.3683,
nullableStringField: null,
dateOnlyField: '2019-12-10',
dateTimeField: '2018-08-14T06:26:06.480Z',
jsonField: {
foo: 'biz',
numquam: 'ut'
},
arrayOfInts: [],
arrayOfStrings: ['fox'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2019-03-12T06:06:42.021Z',
stringField: 'OMNIS ERROR',
emailField: 'illum-94491@quam.com',
integerField: 32,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.0507,
nullableStringField: null,
dateOnlyField: '2018-10-21',
dateTimeField: '2019-08-03T23:12:04.505Z',
jsonField: {
foo: 'baz',
ratione: 'ipsam'
},
arrayOfInts: [],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2007-03-20T05:22:37.593Z',
stringField: 'Williamson',
emailField: 'quia-94491@ducimus.com',
integerField: -2,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.7467,
nullableStringField: 'not-null',
dateOnlyField: '2019-02-01',
dateTimeField: '2019-02-01T23:53:26.248Z',
jsonField: {
foo: 'bar',
repellat: 'id'
},
arrayOfInts: [1, 7, 6],
arrayOfStrings: ['pony'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2013-05-18T19:08:26.697Z',
stringField: 'schamberger',
emailField: 'impedit-94491@asperiores.com',
integerField: -20,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.0223,
nullableStringField: null,
dateOnlyField: '2020-03-04',
dateTimeField: '2018-01-07T06:38:48.310Z',
jsonField: {
foo: 'baz',
rerum: 'aperiam'
},
arrayOfInts: [5, 8, 3, 3],
arrayOfStrings: [],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2008-12-02T19:29:26.187Z',
stringField: 'Jared',
emailField: 'aut-94491@et.com',
integerField: -27,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.8873,
nullableStringField: null,
dateOnlyField: '2019-03-22',
dateTimeField: '2020-05-16T19:55:00.212Z',
jsonField: {
foo: 'biz',
qui: 'incidunt'
},
arrayOfInts: [],
arrayOfStrings: ['fox', 'dog'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2018-05-01T02:05:33.699Z',
stringField: 'QUIDEM FUGIAT',
emailField: 'corrupti-94491@vel.com',
integerField: 30,
stringEnumField: 'BAR',
booleanField: false,
floatField: -0.5086,
nullableStringField: 'not-null',
dateOnlyField: '2018-09-22',
dateTimeField: '2020-04-23T06:43:46.127Z',
jsonField: {
foo: 'biz',
laborum: 'deserunt'
},
arrayOfInts: [8],
arrayOfStrings: ['pony', 'dog'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2002-02-16T10:44:45.995Z',
stringField: 'MAGNAM REICIENDIS',
emailField: 'aut-94491@aspernatur.com',
integerField: 32,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.707,
nullableStringField: null,
dateOnlyField: '2020-09-07',
dateTimeField: '2019-06-11T23:31:27.825Z',
jsonField: {
foo: 'baz',
odit: 'assumenda'
},
arrayOfInts: [],
arrayOfStrings: ['turtle', 'pony'],
typedJsonField: {
params: {
name: 'Foo',
value: {
deepParam: '8'
},
type: 'Baz'
}
}
},
{
dateField: '2018-01-23T15:15:00.879Z',
stringField: 'DESTIN',
emailField: 'non-94491@beatae.com',
integerField: -33,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.566,
nullableStringField: null,
dateOnlyField: '2020-10-01',
dateTimeField: '2020-03-02T16:04:50.581Z',
jsonField: {
foo: 'bar',
et: 'eos'
},
arrayOfInts: [3, 5, 1, 4],
arrayOfStrings: ['fox']
},
{
dateField: '2013-11-05T10:53:37.270Z',
stringField: 'Block',
emailField: 'veniam-94491@officiis.com',
integerField: -98,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.3233,
nullableStringField: null,
dateOnlyField: '2019-01-21',
dateTimeField: '2020-08-27T14:41:13.386Z',
jsonField: {
foo: 'biz',
quas: 'similique'
},
arrayOfInts: [1, 8, 4],
arrayOfStrings: ['fox']
},
{
dateField: '2006-08-12T21:38:30.945Z',
stringField: 'jaskolski',
emailField: 'tenetur-94491@sit.com',
integerField: -59,
stringEnumField: 'BAR',
booleanField: true,
floatField: -1.2778,
nullableStringField: 'not-null',
dateOnlyField: '2019-05-27',
dateTimeField: '2018-08-22T05:05:10.761Z',
jsonField: {
foo: 'biz',
reiciendis: 'nobis'
},
arrayOfInts: [3, 2, 8],
arrayOfStrings: []
},
{
dateField: '2013-06-03T03:27:45.308Z',
stringField: 'BAUCH',
emailField: 'repudiandae-94491@quae.com',
integerField: 75,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.2883,
nullableStringField: 'not-null',
dateOnlyField: '2019-08-18',
dateTimeField: '2020-07-16T01:56:30.373Z',
jsonField: {
foo: 'biz',
ut: 'ad'
},
arrayOfInts: [4, 3],
arrayOfStrings: []
},
{
dateField: '2017-03-24T03:26:25.419Z',
stringField: 'Wisoky',
emailField: 'dolorem-94491@et.com',
integerField: 92,
stringEnumField: 'BAR',
booleanField: false,
floatField: -0.7888,
nullableStringField: null,
dateOnlyField: '2019-02-02',
dateTimeField: '2019-09-09T03:24:37.315Z',
jsonField: {
foo: 'biz',
beatae: 'eveniet'
},
arrayOfInts: [5, 7, 6, 7],
arrayOfStrings: ['turtle', 'fox']
},
{
dateField: '2014-03-15T22:36:04.533Z',
stringField: 'paucek',
emailField: 'suscipit-94491@qui.com',
integerField: -99,
stringEnumField: 'BAR',
booleanField: false,
floatField: -0.8793,
nullableStringField: null,
dateOnlyField: '2018-11-11',
dateTimeField: '2018-08-28T10:28:33.206Z',
jsonField: {
foo: 'bar',
et: 'et'
},
arrayOfInts: [5],
arrayOfStrings: ['fox']
},
{
dateField: '2011-01-30T04:08:46.682Z',
stringField: 'sed quod',
emailField: 'rerum-94491@totam.com',
integerField: 94,
stringEnumField: 'BAR',
booleanField: false,
floatField: 1.4452,
nullableStringField: null,
dateOnlyField: '2018-06-14',
dateTimeField: '2020-10-20T20:01:21.746Z',
jsonField: {
foo: 'baz',
possimus: 'rerum'
},
arrayOfInts: [8, 6],
arrayOfStrings: ['fox']
},
{
dateField: '2004-04-18T05:25:16.045Z',
stringField: 'kuhic',
emailField: 'delectus-94491@veniam.com',
integerField: 29,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.155,
nullableStringField: 'not-null',
dateOnlyField: '2018-07-13',
dateTimeField: '2018-10-20T06:13:15.849Z',
jsonField: {
foo: 'bar',
illum: 'eum'
},
arrayOfInts: [3],
arrayOfStrings: ['pony', 'fox']
},
{
dateField: '2007-01-12T22:16:15.486Z',
stringField: 'WITTING',
emailField: 'expedita-94491@et.com',
integerField: -32,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.8227,
nullableStringField: null,
dateOnlyField: '2018-09-27',
dateTimeField: '2019-04-12T20:45:07.338Z',
jsonField: {
foo: 'baz',
iste: 'rerum'
},
arrayOfInts: [0, 4, 8],
arrayOfStrings: []
},
{
dateField: '2018-01-10T18:22:09.406Z',
stringField: 'VOLUPTATE ATQUE',
emailField: 'fuga-94491@quia.com',
integerField: -52,
stringEnumField: 'FOO',
booleanField: true,
floatField: 0.2288,
nullableStringField: 'not-null',
dateOnlyField: '2019-11-30',
dateTimeField: '2018-03-27T22:53:48.958Z',
jsonField: {
foo: 'biz',
nesciunt: 'cumque'
},
arrayOfInts: [7, 5, 3],
arrayOfStrings: ['fox']
},
{
dateField: '2010-05-18T02:18:18.985Z',
stringField: 'placeat voluptate',
emailField: 'distinctio-94491@quod.com',
integerField: -56,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.0476,
nullableStringField: 'not-null',
dateOnlyField: '2018-08-21',
dateTimeField: '2019-03-07T05:12:30.173Z',
jsonField: {
foo: 'biz',
aut: 'aut'
},
arrayOfInts: [],
arrayOfStrings: []
},
{
dateField: '2016-05-17T05:10:53.438Z',
stringField: 'SIT ODIO',
emailField: 'architecto-94491@assumenda.com',
integerField: -48,
stringEnumField: 'BAR',
booleanField: true,
floatField: -1.9699,
nullableStringField: 'not-null',
dateOnlyField: '2018-04-20',
dateTimeField: '2020-03-01T10:34:35.584Z',
jsonField: {
foo: 'baz',
tempore: 'eum'
},
arrayOfInts: [7, 2, 6, 4],
arrayOfStrings: []
},
{
dateField: '2015-08-15T07:07:17.217Z',
stringField: 'pagac',
emailField: 'ipsam-94491@suscipit.com',
integerField: -31,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.4011,
nullableStringField: 'not-null',
dateOnlyField: '2018-06-02',
dateTimeField: '2019-07-20T05:45:49.910Z',
jsonField: {
foo: 'biz',
minus: 'consequuntur'
},
arrayOfInts: [4, 0, 0, 2],
arrayOfStrings: ['cat']
},
{
dateField: '2019-08-01T09:40:01.190Z',
stringField: 'eius necessitatibus',
emailField: 'itaque-94491@enim.com',
integerField: -91,
stringEnumField: 'FOO',
booleanField: true,
floatField: 1.253,
nullableStringField: 'not-null',
dateOnlyField: '2020-12-30',
dateTimeField: '2018-05-17T00:44:29.364Z',
jsonField: {
foo: 'bar',
non: 'voluptatem'
},
arrayOfInts: [6, 5, 1, 3],
arrayOfStrings: []
},
{
dateField: '2005-06-06T18:49:01.780Z',
stringField: 'RICE',
emailField: 'in-94491@et.com',
integerField: -43,
stringEnumField: 'BAR',
booleanField: false,
floatField: -1.2905,
nullableStringField: null,
dateOnlyField: '2020-12-20',
dateTimeField: '2020-11-26T21:51:44.294Z',
jsonField: {
foo: 'bar',
ipsum: 'nihil'
},
arrayOfInts: [8, 7],
arrayOfStrings: ['fox']
},
{
dateField: '2002-12-21T01:03:05.343Z',
stringField: 'Mortimer',
emailField: 'est-94491@illo.com',
integerField: -90,
stringEnumField: 'BAR',
booleanField: false,
floatField: 0.7536,
nullableStringField: 'not-null',
dateOnlyField: '2018-01-19',
dateTimeField: '2019-10-21T19:02:03.628Z',
jsonField: {
foo: 'baz',
sequi: 'fuga'
},
arrayOfInts: [2, 1, 5, 7],
arrayOfStrings: []
},
{
dateField: '2014-10-25T23:56:54.458Z',
stringField: 'EX POSSIMUS',
emailField: 'est-94491@aperiam.com',
integerField: -63,
stringEnumField: 'FOO',
booleanField: true,
floatField: -0.78,
nullableStringField: 'not-null',
dateOnlyField: '2018-05-14',
dateTimeField: '2020-07-26T19:01:52.792Z',
jsonField: {
foo: 'biz',
doloribus: 'dolore'
},
arrayOfInts: [8, 6, 6],
arrayOfStrings: []
}
];
// // For running directly with ts-node
// if (require.main === module) {
// const results = [...KITCHEN_SINKS];
// for (let index = 0; index < results.length; index++) {
// // const random = new Date()
// // .getTime()
// // .toString()
// // .substring(8, 13);
// // const nullableStringField = Faker.random.arrayElement([null, 'not-null']);
// // const lorem = Faker.lorem.words(2);
// // const name = Faker.name.firstName();
// // const last = Faker.name.lastName();
// // const string = Faker.random.arrayElement([
// // lorem,
// // lorem.toUpperCase(),
// // lorem.toLowerCase(),
// // name,
// // name.toUpperCase(),
// // name.toLowerCase(),
// // last,
// // last.toUpperCase(),
// // last.toLowerCase()
// // ]);
// // To add a new field, just attach to the results
// // results[index].nullableStringField = nullableStringField;
// results[index].dateOnlyField = Faker.date
// .between('2018-01-01', '2020-12-31')
// .toISOString()
// .substring(0, 10);
// results[index].dateTimeField = Faker.date.between('2018-01-01', '2020-12-31').toISOString();
// results[index].jsonField = {
// foo: Faker.random.arrayElement(['bar', 'baz', 'biz']),
// [Faker.lorem.word()]: Faker.lorem.word()
// } as JsonObject;
// results[index].arrayOfInts = [...Array(Faker.random.number(4))].map(() =>
// Faker.random.number(8)
// );
// results[index].arrayOfStrings = [...Array(Faker.random.number(2))].map(() => {
// return Faker.random.arrayElement(['dog', 'cat', 'pony', 'turtle', 'fox']);
// });
// }
// fs.writeFileSync('./sinks.json', JSON.stringify(results, undefined, 2));
// } | the_stack |
import {
generateBookmark,
generateHistory,
generateTab,
} from "cypress/fixtures/fixtures";
import { setupExtensionEnvironment } from "cypress/support/support";
import { SEARCH_PREFIX } from "~/constants";
describe("App", () => {
beforeEach(() => {
cy.visit("/dist/popup/index.html", {
onBeforeLoad(win: Cypress.AUTWindow & { chrome: typeof chrome }) {
setupExtensionEnvironment({
win,
bookmarks: [
generateBookmark({
title: "folder",
type: "folder",
children: [
generateBookmark({ title: `folder-item` }),
generateBookmark({
title: `nested-folder`,
type: "folder",
children: [generateBookmark({ title: `nested-folder-item` })],
}),
],
}),
...[...Array(3)].map((_, i) =>
generateBookmark({ title: `bookmark-item-${i}` })
),
],
histories: [...Array(3)].map((_, i) =>
generateHistory({ title: `history-item-${i}` })
),
tabs: [...Array(3)].map((_, i) =>
generateTab({ title: `tab-item-${i}` })
),
});
},
});
});
afterEach(() => {
cy.get("[data-cy=search-tab-btn]").click();
cy.get("[data-cy=search-input]").clear();
cy.clearLocalStorage();
});
it("show tabs", () => {
cy.get("[data-cy=search-input]").type("tab-item-0");
cy.get("[data-cy=search-result-0]").should("include.text", "tab-item-0");
cy.get("[data-cy=search-result-type-0]").should("have.text", "tab");
});
it("show bookmarks", () => {
cy.get("[data-cy=search-input]").type("bookmark-item-0");
cy.get("[data-cy=search-result-0]").should(
"include.text",
"bookmark-item-0"
);
cy.get("[data-cy=search-result-type-0]").should("have.text", "bookmark");
cy.get("[data-cy=search-input]").clear();
});
it("show bookmarks folder name", () => {
cy.get("[data-cy=search-input]").type("folder-item");
cy.get("[data-cy=search-result-0]").should(
"include.text",
"[folder]folder-item"
);
cy.get("[data-cy=search-result-1]").should(
"include.text",
"[folder/nested-folder]nested-folder-item"
);
});
it("show histories", () => {
cy.get("[data-cy=search-input]").type("history-item-0");
cy.get("[data-cy=search-result-0]").should(
"include.text",
"history-item-0"
);
cy.get("[data-cy=search-result-type-0]").should("have.text", "history");
});
it("show search on browser", () => {
cy.get("[data-cy=search-input]").type("unknown-item");
cy.get("[data-cy=search-result-wrapper]").should(
"include.text",
'"unknown-item" search with browser'
);
});
it("change selected item", () => {
cy.get("[data-cy=search-input]").type("history-item");
cy.get("[data-cy=search-result-0]").should("have.class", "selected-item");
cy.get("[data-cy=search-input]").type("{ctrl}n");
cy.get("[data-cy=search-result-1]").should("have.class", "selected-item");
cy.get("[data-cy=search-input]").type("{downArrow}");
cy.get("[data-cy=search-result-2]").should("have.class", "selected-item");
cy.get("[data-cy=search-input]").type("{ctrl}n");
cy.get("[data-cy=search-result-2]").should("have.class", "selected-item"); // check overflow control
cy.get("[data-cy=search-input]").type("{ctrl}p");
cy.get("[data-cy=search-result-1]").should("have.class", "selected-item");
cy.get("[data-cy=search-input]").type("{upArrow}");
cy.get("[data-cy=search-result-0]").should("have.class", "selected-item");
cy.get("[data-cy=search-input]").type("{ctrl}p");
cy.get("[data-cy=search-result-0]").should("have.class", "selected-item"); // check overflow control
});
it("on esc key", () => {
cy.get("[data-cy=search-input]").type("{esc}");
cy.window().its("close").should("be.called");
});
it("on Enter key", () => {
cy.get("[data-cy=search-input]").type("history-item");
cy.get("[data-cy=search-result-0]").should("have.class", "selected-item");
cy.get("[data-cy=search-input]").type("{enter}");
cy.get("@connect-postMessage").should("be.calledWithMatch", {
messageID: "update-current-page",
});
});
it("on ctrl Enter key", () => {
cy.get("[data-cy=search-input]").type("history-item");
cy.get("[data-cy=search-result-0]").should("have.class", "selected-item");
cy.get("[data-cy=search-input]").type("{ctrl}{enter}");
cy.get("@connect-postMessage").should("be.calledWithMatch", {
messageID: "open-new-tab-page",
});
});
it("on meta Enter key", () => {
cy.get("[data-cy=search-input]").type("history-item");
cy.get("[data-cy=search-result-0]").should("have.class", "selected-item");
cy.get("[data-cy=search-input]").type("{meta}{enter}");
cy.get("@connect-postMessage").should("be.calledWithMatch", {
messageID: "open-new-tab-page",
});
});
it("on enter when browser search", () => {
cy.get("[data-cy=search-input]").type("unknown-item");
cy.get("[data-cy=browser-search-btn]");
cy.get("[data-cy=search-input]").type("{enter}");
cy.window()
.its("chrome.search.query")
.should("be.calledWith", { text: "unknown-item", disposition: 1 });
});
it("favorite", () => {
// check favorite
cy.get("[data-cy=search-input]").type("history-item");
cy.get("[data-cy=search-result-0]").should("have.class", "selected-item");
cy.get("[data-cy=search-input]").type("{ctrl}f"); // with shortcut
cy.get("[data-cy=search-result-0]")
.get("[data-cy=icon-star]")
.should("be.visible");
cy.get("[data-cy=search-input]").type("{ctrl}n");
cy.get("[data-cy=search-result-1]").should("have.class", "selected-item");
cy.get("[data-cy=search-result-favorite-1]").click(); // with click
cy.get("[data-cy=search-result-1]")
.get("[data-cy=icon-star]")
.should("be.visible");
cy.get("[data-cy=search-input]").clear();
cy.get("[data-cy=search-result-0]").should("have.class", "selected-item");
cy.get("[data-cy=search-result-0]")
.get("[data-cy=icon-star]")
.should("be.visible");
cy.get("[data-cy=search-result-1]")
.get("[data-cy=icon-star]")
.should("be.visible");
// un check favorite
cy.get("[data-cy=search-input]").type("history-item");
cy.get("[data-cy=search-result-0]").should("have.class", "selected-item");
cy.get("[data-cy=search-input]").type("{ctrl}f");
cy.get("[data-cy=search-result-0]")
.get("[data-cy=icon-star-border]")
.should("be.visible");
cy.get("[data-cy=search-input]").type("{ctrl}n");
cy.get("[data-cy=search-result-1]").should("have.class", "selected-item");
cy.get("[data-cy=search-result-favorite-1]").click();
cy.get("[data-cy=search-result-1]")
.get("[data-cy=icon-star-border]")
.should("be.visible");
cy.get("[data-cy=search-input]").clear();
cy.get("[data-cy=search-result-empty]").should("be.visible");
// check tabs can't be made favorites.
cy.get("[data-cy=search-input]").type("tab-item");
cy.get("[data-cy=search-result-0]").should("have.class", "selected-item");
cy.get("[data-cy=search-result-favorite-0]")
.get("[data-cy=toggle-star]")
.should("not.exist");
cy.get("[data-cy=search-input]").type("{ctrl}f"); // with shortcut
});
it("show info page", () => {
cy.get("[data-cy=info-tab-btn]").click();
cy.get("[data-cy=page-info]").should("be.visible");
});
it("show setting page", () => {
cy.get("[data-cy=setting-tab-btn]").click();
cy.get("[data-cy=page-setting]").should("be.visible");
});
it("change prefix setting", () => {
// bookmark
cy.get("[data-cy=setting-tab-btn]").click();
cy.get("[data-cy=page-setting]").should("be.visible");
cy.get("[data-cy=select-prefix]").select(SEARCH_PREFIX.BOOKMARK);
cy.get("[data-cy=search-tab-btn]").click();
cy.get("[data-cy=page-search]").should("be.visible");
cy.get("[data-cy=search-input]").should(
"have.value",
SEARCH_PREFIX.BOOKMARK
);
cy.get("[data-cy=search-result-0]").should("include.text", "bookmark-item");
// history
cy.get("[data-cy=setting-tab-btn]").click();
cy.get("[data-cy=page-setting]").should("be.visible");
cy.get("[data-cy=select-prefix]").select(SEARCH_PREFIX.HISTORY);
cy.get("[data-cy=search-tab-btn]").click();
cy.get("[data-cy=page-search]").should("be.visible");
cy.get("[data-cy=search-input]").should(
"have.value",
SEARCH_PREFIX.HISTORY
);
cy.get("[data-cy=search-result-0]").should("include.text", "history-item");
// tab
cy.get("[data-cy=setting-tab-btn]").click();
cy.get("[data-cy=page-setting]").should("be.visible");
cy.get("[data-cy=select-prefix]").select(SEARCH_PREFIX.TAB);
cy.get("[data-cy=search-tab-btn]").click();
cy.get("[data-cy=page-search]").should("be.visible");
cy.get("[data-cy=search-input]").should("have.value", SEARCH_PREFIX.TAB);
cy.get("[data-cy=search-result-0]").should("include.text", "tab-item");
// none
cy.get("[data-cy=setting-tab-btn]").click();
cy.get("[data-cy=page-setting]").should("be.visible");
cy.get("[data-cy=select-prefix]").select("");
cy.get("[data-cy=search-tab-btn]").click();
cy.get("[data-cy=page-search]").should("be.visible");
cy.get("[data-cy=search-input]").should("have.value", "");
cy.get("[data-cy=search-result-empty]").should("be.visible");
});
it("change dark mode setting", () => {
cy.get("[data-cy=setting-tab-btn]").click();
cy.get("[data-cy=page-setting]").should("be.visible");
cy.get("[data-cy=radio-light]").check();
cy.get("html").should("not.have.class", "dark");
cy.get("[data-cy=radio-dark]").check();
cy.get("[data-cy=search-tab-btn]").click();
cy.get("[data-cy=page-search]").should("be.visible");
});
}); | the_stack |
import { assert, expect } from "chai";
import { Matrix3d, Matrix4d, Point3d, Transform, Vector3d } from "@itwin/core-geometry";
import { fromNormalizedCrossProduct, Matrix3, Matrix4, normalizedDifference } from "../../../render/webgl/Matrix";
describe("Matrix3", () => {
it("constructor works as expected", () => {
// ensure correct conversion from 64 bit number to 32 bit number
const mat = Matrix3.fromValues(9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991);
mat.data.forEach((v) => assert.isTrue(v === 9007199254740992));
});
it("toMatrix3d works as expected", () => {
const mat = Matrix3.fromValues(1, 2, 3, 4, 5, 6, 7, 8, 9);
const rotMat = mat.toMatrix3d();
assert.isTrue(rotMat instanceof Matrix3d, "is an instance of Matrix3d");
assert.isTrue(mat.data[0] === rotMat.coffs[0], "(0,0) is equivalent");
assert.isTrue(mat.data[3] === rotMat.coffs[1], "(0,1) is equivalent");
assert.isTrue(mat.data[6] === rotMat.coffs[2], "(0,2) is equivalent");
assert.isTrue(mat.data[1] === rotMat.coffs[3], "(1,0) is equivalent");
assert.isTrue(mat.data[4] === rotMat.coffs[4], "(1,1) is equivalent");
assert.isTrue(mat.data[7] === rotMat.coffs[5], "(1,2) is equivalent");
assert.isTrue(mat.data[2] === rotMat.coffs[6], "(2,0) is equivalent");
assert.isTrue(mat.data[5] === rotMat.coffs[7], "(2,1) is equivalent");
assert.isTrue(mat.data[8] === rotMat.coffs[8], "(2,2) is equivalent");
});
it("fromMatrix3d works as expected", () => {
const rotMat = Matrix3d.createRowValues(1, 2, 3, 4, 5, 6, 7, 8, 9);
const mat = Matrix3.fromMatrix3d(rotMat);
assert.isTrue(mat instanceof Matrix3, "is an instance of Matrix3");
assert.isTrue(mat.data[0] === rotMat.coffs[0], "(0,0) is equivalent");
assert.isTrue(mat.data[3] === rotMat.coffs[1], "(0,1) is equivalent");
assert.isTrue(mat.data[6] === rotMat.coffs[2], "(0,2) is equivalent");
assert.isTrue(mat.data[1] === rotMat.coffs[3], "(1,0) is equivalent");
assert.isTrue(mat.data[4] === rotMat.coffs[4], "(1,1) is equivalent");
assert.isTrue(mat.data[7] === rotMat.coffs[5], "(1,2) is equivalent");
assert.isTrue(mat.data[2] === rotMat.coffs[6], "(2,0) is equivalent");
assert.isTrue(mat.data[5] === rotMat.coffs[7], "(2,1) is equivalent");
assert.isTrue(mat.data[8] === rotMat.coffs[8], "(2,2) is equivalent");
});
it("transpose works as expected", () => {
const mat = Matrix3.fromValues(1, 2, 3, 4, 5, 6, 7, 8, 9);
const transposedMat = Matrix3.fromTranspose(mat);
expect(mat.data[0]).to.equal(transposedMat.data[0]);
expect(mat.data[3]).to.equal(transposedMat.data[1]);
expect(mat.data[6]).to.equal(transposedMat.data[2]);
expect(mat.data[1]).to.equal(transposedMat.data[3]);
expect(mat.data[4]).to.equal(transposedMat.data[4]);
expect(mat.data[7]).to.equal(transposedMat.data[5]);
expect(mat.data[2]).to.equal(transposedMat.data[6]);
expect(mat.data[5]).to.equal(transposedMat.data[7]);
expect(mat.data[8]).to.equal(transposedMat.data[8]);
});
});
describe("Matrix4", () => {
it("constructor works as expected", () => {
// ensure correct conversion from 64 bit number to 32 bit number
const mat = Matrix4.fromValues(9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991, 9007199254740991);
mat.data.forEach((v) => assert.isTrue(v === 9007199254740992));
});
it("identity works as expected", () => {
const mat = Matrix4.fromIdentity();
assert.isTrue(mat.data[0] === 1, "(0,0) --> 1");
assert.isTrue(mat.data[4] === 0, "(0,1) --> 0");
assert.isTrue(mat.data[8] === 0, "(0,2) --> 0");
assert.isTrue(mat.data[12] === 0, "(0,3) --> 0");
assert.isTrue(mat.data[1] === 0, "(1,0) --> 0");
assert.isTrue(mat.data[5] === 1, "(1,1) --> 1");
assert.isTrue(mat.data[9] === 0, "(1,2) --> 0");
assert.isTrue(mat.data[13] === 0, "(1,3) --> 0");
assert.isTrue(mat.data[2] === 0, "(2,0) --> 0");
assert.isTrue(mat.data[6] === 0, "(2,1) --> 0");
assert.isTrue(mat.data[10] === 1, "(2,2) --> 1");
assert.isTrue(mat.data[14] === 0, "(2,3) --> 0");
assert.isTrue(mat.data[3] === 0, "(3,0) --> 0");
assert.isTrue(mat.data[7] === 0, "(3,1) --> 0");
assert.isTrue(mat.data[11] === 0, "(3,2) --> 0");
assert.isTrue(mat.data[15] === 1, "(3,3) --> 1");
});
it("getRotation works as expected", () => {
const mat4 = Matrix4.fromValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16);
const mat3 = mat4.getRotation();
assert.isTrue(mat3.data[0] === mat4.data[0], "(0,0) is equivalent");
assert.isTrue(mat3.data[3] === mat4.data[4], "(0,1) is equivalent");
assert.isTrue(mat3.data[6] === mat4.data[8], "(0,2) is equivalent");
assert.isTrue(mat3.data[1] === mat4.data[1], "(1,0) is equivalent");
assert.isTrue(mat3.data[4] === mat4.data[5], "(1,1) is equivalent");
assert.isTrue(mat3.data[7] === mat4.data[9], "(1,2) is equivalent");
assert.isTrue(mat3.data[2] === mat4.data[2], "(2,0) is equivalent");
assert.isTrue(mat3.data[5] === mat4.data[6], "(2,1) is equivalent");
assert.isTrue(mat3.data[8] === mat4.data[10], "(2,2) is equivalent");
});
it("initFromTransform works as expected", () => {
const origin = new Vector3d(10, 11, 12);
const rotMat = Matrix3d.createRowValues(1, 2, 3, 4, 5, 6, 7, 8, 9);
const tran = Transform.createOriginAndMatrix(origin, rotMat);
const mat4 = Matrix4.fromIdentity();
mat4.initFromTransform(tran);
assert.isTrue(mat4.data[0] === 1, "(0,0) --> 1");
assert.isTrue(mat4.data[4] === 2, "(0,1) --> 2");
assert.isTrue(mat4.data[8] === 3, "(0,2) --> 3");
assert.isTrue(mat4.data[12] === 10, "(0,3) --> 10");
assert.isTrue(mat4.data[1] === 4, "(1,0) --> 4");
assert.isTrue(mat4.data[5] === 5, "(1,1) --> 5");
assert.isTrue(mat4.data[9] === 6, "(1,2) --> 6");
assert.isTrue(mat4.data[13] === 11, "(1,3) --> 11");
assert.isTrue(mat4.data[2] === 7, "(2,0) --> 7");
assert.isTrue(mat4.data[6] === 8, "(2,1) --> 8");
assert.isTrue(mat4.data[10] === 9, "(2,2) --> 9");
assert.isTrue(mat4.data[14] === 12, "(2,3) --> 12");
assert.isTrue(mat4.data[3] === 0, "(3,0) --> 0");
assert.isTrue(mat4.data[7] === 0, "(3,1) --> 0");
assert.isTrue(mat4.data[11] === 0, "(3,2) --> 0");
assert.isTrue(mat4.data[15] === 1, "(3,3) --> 1");
});
it("toTransform works as expected", () => {
const validMat = Matrix4.fromValues(1, 2, 3, 10, 4, 5, 6, 11, 7, 8, 9, 12, 0, 0, 0, 1);
const tran = validMat.toTransform();
const mat = tran.matrix;
const origin = tran.origin;
assert.isTrue(mat.at(0, 0) === 1, "(0,0) --> 1");
assert.isTrue(mat.at(0, 1) === 2, "(0,1) --> 2");
assert.isTrue(mat.at(0, 2) === 3, "(0,2) --> 3");
assert.isTrue(origin.x === 10, "(0,3) --> 10");
assert.isTrue(mat.at(1, 0) === 4, "(1,0) --> 4");
assert.isTrue(mat.at(1, 1) === 5, "(1,1) --> 5");
assert.isTrue(mat.at(1, 2) === 6, "(1,2) --> 6");
assert.isTrue(origin.y === 11, "(1,3) --> 11");
assert.isTrue(mat.at(2, 0) === 7, "(2,0) --> 7");
assert.isTrue(mat.at(2, 1) === 8, "(2,1) --> 8");
assert.isTrue(mat.at(2, 2) === 9, "(2,2) --> 9");
assert.isTrue(origin.z === 12, "(2,3) --> 12");
});
it("fromMatrix4d works as expected", () => {
const mat4d = Matrix4d.createRowValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16);
const mat4 = Matrix4.fromMatrix4d(mat4d);
assert.isTrue(mat4.data[0] === mat4d.atIJ(0, 0), "(0,0) is equivalent");
assert.isTrue(mat4.data[4] === mat4d.atIJ(0, 1), "(0,1) is equivalent");
assert.isTrue(mat4.data[8] === mat4d.atIJ(0, 2), "(0,2) is equivalent");
assert.isTrue(mat4.data[12] === mat4d.atIJ(0, 3), "(0,3) is equivalent");
assert.isTrue(mat4.data[1] === mat4d.atIJ(1, 0), "(1,0) is equivalent");
assert.isTrue(mat4.data[5] === mat4d.atIJ(1, 1), "(1,1) is equivalent");
assert.isTrue(mat4.data[9] === mat4d.atIJ(1, 2), "(1,2) is equivalent");
assert.isTrue(mat4.data[13] === mat4d.atIJ(1, 3), "(1,3) is equivalent");
assert.isTrue(mat4.data[2] === mat4d.atIJ(2, 0), "(2,0) is equivalent");
assert.isTrue(mat4.data[6] === mat4d.atIJ(2, 1), "(2,1) is equivalent");
assert.isTrue(mat4.data[10] === mat4d.atIJ(2, 2), "(2,2) is equivalent");
assert.isTrue(mat4.data[14] === mat4d.atIJ(2, 3), "(2,3) is equivalent");
assert.isTrue(mat4.data[3] === mat4d.atIJ(3, 0), "(3,0) is equivalent");
assert.isTrue(mat4.data[7] === mat4d.atIJ(3, 1), "(3,1) is equivalent");
assert.isTrue(mat4.data[11] === mat4d.atIJ(3, 2), "(3,2) is equivalent");
assert.isTrue(mat4.data[15] === mat4d.atIJ(3, 3), "(3,3) is equivalent");
});
it("toMatrix4d works as expected", () => {
const mat4 = Matrix4.fromValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16);
const mat4d = mat4.toMatrix4d();
assert.isTrue(mat4.data[0] === mat4d.atIJ(0, 0), "(0,0) is equivalent");
assert.isTrue(mat4.data[4] === mat4d.atIJ(0, 1), "(0,1) is equivalent");
assert.isTrue(mat4.data[8] === mat4d.atIJ(0, 2), "(0,2) is equivalent");
assert.isTrue(mat4.data[12] === mat4d.atIJ(0, 3), "(0,3) is equivalent");
assert.isTrue(mat4.data[1] === mat4d.atIJ(1, 0), "(1,0) is equivalent");
assert.isTrue(mat4.data[5] === mat4d.atIJ(1, 1), "(1,1) is equivalent");
assert.isTrue(mat4.data[9] === mat4d.atIJ(1, 2), "(1,2) is equivalent");
assert.isTrue(mat4.data[13] === mat4d.atIJ(1, 3), "(1,3) is equivalent");
assert.isTrue(mat4.data[2] === mat4d.atIJ(2, 0), "(2,0) is equivalent");
assert.isTrue(mat4.data[6] === mat4d.atIJ(2, 1), "(2,1) is equivalent");
assert.isTrue(mat4.data[10] === mat4d.atIJ(2, 2), "(2,2) is equivalent");
assert.isTrue(mat4.data[14] === mat4d.atIJ(2, 3), "(2,3) is equivalent");
assert.isTrue(mat4.data[3] === mat4d.atIJ(3, 0), "(3,0) is equivalent");
assert.isTrue(mat4.data[7] === mat4d.atIJ(3, 1), "(3,1) is equivalent");
assert.isTrue(mat4.data[11] === mat4d.atIJ(3, 2), "(3,2) is equivalent");
assert.isTrue(mat4.data[15] === mat4d.atIJ(3, 3), "(3,3) is equivalent");
});
});
describe("Vector3d functions", () => {
it("fromNormalizedCrossProduct", () => {
const vec0 = new Vector3d(-1, 7, 4);
const vec1 = new Vector3d(-5, 8, 4);
const vec = Vector3d.createCrossProduct(vec0.x, vec0.y, vec0.z, vec1.x, vec1.y, vec1.z);
assert.isTrue(vec.isExactEqual(new Vector3d(-4, -16, 27)), "cross product is correct");
const nVec = vec.normalize();
// (-0.126428, -0.505712, 0.853388)
const expectedResult = new Vector3d(-0.126428, -0.505712, 0.853388);
assert.isTrue(nVec!.isAlmostEqual(expectedResult), "normalized is correct");
assert.isTrue(fromNormalizedCrossProduct(vec0, vec1)!.isAlmostEqual(expectedResult), "fromNormalizedCrossProduct works as expected");
});
it("normalizedDifference", () => {
const target = new Point3d(5, 6, 7);
const origin = new Point3d(1, 2, 3);
// expected result (0.57735, 0.57735, 0.57735)
const expectedResult = new Point3d(0.57735, 0.57735, 0.57735);
assert.isTrue(normalizedDifference(target, origin)!.isAlmostEqual(expectedResult), "normalizedDifference works as expected");
});
}); | the_stack |
import { CancellationToken } from 'vscode-languageserver';
import { assert } from '../common/debug';
import { combinePaths, getDirectoryPath } from '../common/pathUtils';
import { parseAndGetTestState } from './harness/fourslash/testState';
import { testRenameModule } from './renameModuleTestUtils';
test('from import with paren', () => {
const code = `
// @filename: module.py
//// def getFilename(path):
//// [|/*marker*/pass|]
// @filename: test1.py
//// from . import ([|module|])
////
//// [|module|].getFilename("c")
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(
state,
fileName,
`${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`,
'module',
'renamedModule'
);
});
test('from import with paren with alias', () => {
const code = `
// @filename: module.py
//// def getFilename(path):
//// [|/*marker*/pass|]
// @filename: test1.py
//// from . import ([|module|] as [|module|])
////
//// [|module|].getFilename("c")
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(
state,
fileName,
`${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`,
'module',
'renamedModule'
);
});
test('from import with paren multiple import names', () => {
const code = `
// @filename: common/__init__.py
//// # empty
// @filename: module.py
//// def getFilename(path):
//// [|/*marker*/pass|]
// @filename: module2.py
//// # empty
// @filename: test1.py
//// [|{|"r":"from .common import renamedModule as renamedModule!n!"|}|]from . import ([|{|"r":""|}module as module, |]module2)
////
//// [|{|"r":"renamedModule"|}module|].getFilename("c")
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common', 'renamedModule.py')}`);
});
test('rename - circular references', () => {
const code = `
// @filename: module1.py
//// from . import [|mySelf|] as [|mySelf|]
// @filename: mySelf.py
//// from module1 import *
//// [|/*marker*/mySelf|].foo()
////
//// def foo():
//// pass
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(
state,
fileName,
`${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`,
'mySelf',
'renamedModule'
);
});
test('move - circular references', () => {
const code = `
// @filename: module1.py
//// from [|{|"r":".common"|}.|] import [|{|"r":"renamedModule"|}mySelf|] as [|{|"r":"renamedModule"|}mySelf|]
// @filename: common/__init__.py
//// # empty
// @filename: mySelf.py
//// [|/*marker*/|]
//// from module1 import *
//// [|{|"r":"renamedModule"|}mySelf|].foo()
//// def foo():
//// pass
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common', 'renamedModule.py')}`);
});
test('py and pyi file update', () => {
const code = `
// @filename: module.py
//// def getFilename(path):
//// pass
// @filename: module.pyi
//// [|/*marker*/|]
//// def getFilename(path): ...
// @filename: test1.py
//// from . import [|module|] as [|module|]
////
//// [|module|].getFilename("c")
// @filename: test1.pyi
//// from . import [|module|] as [|module|]
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(
state,
fileName,
`${combinePaths(getDirectoryPath(fileName), 'renamedModule.pyi')}`,
'module',
'renamedModule'
);
});
test('py and pyi file update from py', () => {
// No reference. if both py and pyi exist, then given file must point to pyi not py.
const code = `
// @filename: module.py
//// [|/*marker*/|]
//// def getFilename(path):
//// pass
// @filename: module.pyi
//// def getFilename(path): ...
// @filename: test1.py
//// from . import module
////
//// module.getFilename("c")
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`);
});
test('handle __all__ reference', () => {
const code = `
// @filename: module.py
//// [|/*marker*/|]
//// def getFilename(path):
//// pass
// @filename: test1.py
//// from . import [|module|]
////
//// __all__ = [ "[|module|]" ]
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(
state,
fileName,
`${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`,
'module',
'renamedModule'
);
});
test('handle __all__ re-export', () => {
const code = `
// @filename: module.py
//// [|/*marker*/|]
//// def foo(path):
//// pass
// @filename: common/__init__.py
//// # empty
// @filename: test1.py
//// from [|{|"r":".common"|}.|] import [|{|"r":"renamedModule"|}module|]
////
//// __all__ = [ "[|{|"r":"renamedModule"|}module|]" ]
// @filename: test2.py
//// from test1 import [|{|"r":"renamedModule"|}module|]
////
//// [|renamedModule|].foo()
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common', 'renamedModule.py')}`);
});
test('__init__.py rename', () => {
const code = `
// @filename: common/__init__.py
//// [|/*marker*/|]
//// def getFilename(path):
//// pass
// @filename: test1.py
//// from [|common|] import getFilename
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(
state,
fileName,
`${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`,
'common',
'common.renamedModule'
);
});
test('__init__.py rename import', () => {
const code = `
// @filename: common/__init__.py
//// [|/*marker*/|]
// @filename: test1.py
//// import [|common|]
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(
state,
fileName,
`${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`,
'common',
'common.renamedModule as renamedModule'
);
});
test('__init__.py move to nested folder', () => {
const code = `
// @filename: common/__init__.py
//// [|/*marker*/|]
//// def getFilename(path):
//// pass
// @filename: test1.py
//// from [|common|] import getFilename
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(
state,
fileName,
`${combinePaths(getDirectoryPath(fileName), 'nested', 'renamedModule.py')}`,
'common',
'common.nested.renamedModule'
);
});
test('__init__.py move to nested folder with same name', () => {
const code = `
// @filename: common/__init__.py
//// [|/*marker*/|]
//// def getFilename(path):
//// pass
// @filename: test1.py
//// from [|common|] import getFilename
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(
state,
fileName,
`${combinePaths(getDirectoryPath(fileName), 'nested', '__init__.py')}`,
'common',
'common.nested'
);
});
test('__init__.py move to parent folder', () => {
const code = `
// @filename: common/__init__.py
//// [|/*marker*/|]
//// def getFilename(path):
//// pass
// @filename: test1.py
//// from [|common|] import getFilename
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(
state,
fileName,
`${combinePaths(getDirectoryPath(fileName), '..', 'renamedModule.py')}`,
'common',
'renamedModule'
);
});
test('__init__.py move to parent folder with same name 1', () => {
const code = `
// @filename: common/__init__.py
//// [|/*marker*/|]
//// def getFilename(path):
//// pass
// @filename: test1.py
//// from [|common|] import getFilename
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
const edits = state.program.renameModule(
fileName,
`${combinePaths(getDirectoryPath(fileName), '..', '__init__.py')}`,
CancellationToken.None
);
assert(!edits);
});
test('__init__.py with alias', () => {
const code = `
// @filename: common/__init__.py
//// [|/*marker*/|]
//// def getFilename(path):
//// pass
// @filename: test1.py
//// from [|{|"r":".common"|}.|] import [|{|"r":"renamedModule"|}common|] as [|{|"r":"renamedModule"|}common|]
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`);
});
test('__init__.py import with alias', () => {
const code = `
// @filename: common/__init__.py
//// [|/*marker*/|]
//// def getFilename(path):
//// pass
// @filename: test1.py
//// import [|{|"r":"common.renamedModule"|}common|] as [|{|"r":"renamedModule"|}common|]
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`);
});
test('__init__.py rename complex', () => {
const code = `
// @filename: common/__init__.py
//// import [|{|"r":"common.nested.renamedModule"|}common.nested.lib|] as [|{|"r":"renamedModule"|}lib|]
//// __all__ = ["[|{|"r":"renamedModule"|}lib|]"]
// @filename: reexport.py
//// from common import [|{|"r":"renamedModule"|}lib|] as [|{|"r":"renamedModule"|}lib|]
// @filename: common/nested/__init__.py
//// # empty
// @filename: common/nested/lib/__init__.py
//// [|/*marker*/|]
//// def foo():
//// pass
// @filename: test1.py
//// import common
//// common.[|{|"r":"renamedModule"|}lib|].foo()
// @filename: test2.py
//// from reexport import [|{|"r":"renamedModule"|}lib|]
//// [|{|"r":"renamedModule"|}lib|].foo()
// @filename: test3.py
//// from common import *
//// [|{|"r":"renamedModule"|}lib|].foo()
// @filename: test4.py
//// from reexport import *
//// [|{|"r":"renamedModule"|}lib|].foo()
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'renamedModule.py')}`);
});
test('__init__.py moved to parent folder with same name 2', () => {
const code = `
// @filename: common/__init__.py
//// import [|{|"r":"common.nested"|}common.nested.lib|] as [|{|"r":"nested"|}lib|]
//// __all__ = ["[|{|"r":"nested"|}lib|]"]
// @filename: reexport.py
//// from common import [|{|"r":"nested"|}lib|] as [|{|"r":"nested"|}lib|]
// @filename: common/nested/lib/__init__.py
//// [|/*marker*/|]
//// def foo():
//// pass
// @filename: test1.py
//// import common
//// common.[|{|"r":"nested"|}lib|].foo()
// @filename: test2.py
//// from reexport import [|{|"r":"nested"|}lib|]
//// [|{|"r":"nested"|}lib|].foo()
// @filename: test3.py
//// from common import *
//// [|{|"r":"nested"|}lib|].foo()
// @filename: test4.py
//// from reexport import *
//// [|{|"r":"nested"|}lib|].foo()
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', '__init__.py')}`);
});
test('__init__.py changes middle of dotted name', () => {
const code = `
// @filename: common/__init__.py
//// # empty [|/*marker*/|]
//// from common.nested import lib as lib
// @filename: common/nested/lib.py
//// def libFoo():
//// pass
// @filename: common/nested/__init__.py
//// def nestedFoo():
//// pass
// @filename: test1.py
//// import common.nested.lib
//// common.nested.lib.libFoo()
// @filename: test2.py
//// from common import nested
//// nested.nestedFoo()
// @filename: test3.py
//// from [|{|"r":"common.renamedModule"|}common|] import *
//// lib.libFoo()
// @filename: test4.py
//// from [|{|"r":"common.renamedModule"|}common|] import lib
//// lib.libFoo()
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`);
});
test('__init__.py - split from import statement', () => {
const code = `
// @filename: common/__init__.py
//// # empty [|/*marker*/|]
//// from common.nested import lib as lib
// @filename: common/nested/lib.py
//// def libFoo():
//// pass
// @filename: common/nested/__init__.py
//// def nestedFoo():
//// pass
// @filename: test1.py
//// from common import nested[|{|"r":""|}, lib|][|{|"r":"!n!from common.renamedModule import lib"|}|]
//// nested.nestedFoo()
//// lib.libFoo()
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`);
});
test('__init__.py - split from import statement with multiple names', () => {
const code = `
// @filename: common/__init__.py
//// # empty [|/*marker*/|]
//// from common.nested import lib as lib
//// def commonFoo():
//// pass
// @filename: common/nested/lib.py
//// def libFoo():
//// pass
// @filename: common/nested/__init__.py
//// def nestedFoo():
//// pass
// @filename: test1.py
//// from common import nested[|{|"r":""|}, lib, commonFoo|][|{|"r":"!n!from common.renamedModule import commonFoo, lib"|}|]
//// nested.nestedFoo()
//// lib.libFoo()
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`);
});
test('__init__.py - merge from import statement with multiple names', () => {
const code = `
// @filename: common/nested/__init__.py
//// # empty [|/*marker*/|]
//// from common.nested2 import lib as lib
//// def commonFoo():
//// pass
// @filename: common/nested/sub.py
//// # empty
// @filename: common/empty.py
//// # empty
// @filename: common/nested2/lib.py
//// def libFoo():
//// pass
// @filename: test1.py
//// from common.nested import [|{|"r":""|}commonFoo, lib, |]sub
//// from common import [|{|"r":"commonFoo, "|}|]empty[|{|"r":", lib"|}|]
////
//// nested.commonFoo()
//// lib.libFoo()
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', '__init__.py')}`);
});
test('__init__.py - split from import statement with multiple names with circular reference', () => {
const code = `
// @filename: common/__init__.py
//// # empty
//// from common.nested import lib as lib
//// from common.nested import [|/*marker*/{|"r":"renamedModule"|}common|] as [|{|"r":"renamedModule"|}common|]
////
//// def commonFoo():
//// pass
// @filename: common/nested/lib.py
//// def libFoo():
//// pass
// @filename: common/nested/__init__.py
//// from [|{|"r":".."|}...|] import [|{|"r":"renamedModule"|}common|] as [|{|"r":"renamedModule"|}common|]
// @filename: test1.py
//// from common import nested[|{|"r":""|}, lib, common|][|{|"r":"!n!from common.renamedModule import lib, renamedModule"|}|]
//// nested.[|{|"r":"renamedModule"|}common|].commonFoo()
//// [|{|"r":"renamedModule"|}common|].commonFoo()
//// lib.libFoo()
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`);
});
test('__init__.py - merge from import statement with multiple names with circular reference', () => {
const code = `
// @filename: common/nested/__init__.py
//// # empty
//// from common.nested2 import lib as lib
//// from common.nested2 import [|/*marker*/{|"r":"common"|}nested|] as [|{|"r":"common"|}nested|]
////
//// def commonFoo():
//// pass
// @filename: common/nested/sub.py
//// # empty
// @filename: common/empty.py
//// # empty
// @filename: common/nested2/__init__.py
//// from [|{|"r":"..."|}..|] import [|{|"r":"common"|}nested|] as [|{|"r":"common"|}nested|]
// @filename: common/nested2/lib.py
//// def libFoo():
//// pass
// @filename: test1.py
//// from common.nested import [|{|"r":""|}nested, lib, |]sub
//// from common import [|{|"r":"common, "|}|]empty[|{|"r":", lib"|}|]
////
//// [|{|"r":"common"|}nested|].commonFoo()
//// lib.libFoo()
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', '__init__.py')}`);
});
test('__init__.py - merge from import statement with multiple names with circular reference with only name change', () => {
const code = `
// @filename: common/nested/__init__.py
//// # empty
//// from common.nested2 import lib as lib
//// from common.nested2 import [|/*marker*/{|"r":"renamedModule"|}nested|] as [|{|"r":"renamedModule"|}nested|]
////
//// def commonFoo():
//// pass
// @filename: common/nested/sub.py
//// # empty
// @filename: common/empty.py
//// # empty
// @filename: common/nested2/__init__.py
//// from .. import [|{|"r":"renamedModule"|}nested|] as [|{|"r":"renamedModule"|}nested|]
// @filename: common/nested2/lib.py
//// def libFoo():
//// pass
// @filename: test1.py
//// from common.nested import [|{|"r":""|}nested, lib, |]sub[|{|"r":"!n!from common.renamedModule import lib, renamedModule"|}|]
////
//// [|{|"r":"renamedModule"|}nested|].commonFoo()
//// lib.libFoo()
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'renamedModule.py')}`);
});
test('add and remove consecutive edits', () => {
const code = `
// @filename: a1.py
//// # empty [|/*marker*/|]
// @filename: a3.py
//// # empty
// @filename: test1.py
//// from . import [|{|"r":"a2"|}a1|], a3
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'a2.py')}`);
});
test('add and remove consecutive edits with alias 1', () => {
const code = `
// @filename: a1.py
//// # empty [|/*marker*/|]
// @filename: a3.py
//// # empty
// @filename: test1.py
//// from . import [|{|"r":"a2"|}a1|] as [|{|"r":"a2"|}a1|], a3
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'a2.py')}`);
});
test('add and remove consecutive edits with alias 2', () => {
const code = `
// @filename: a1.py
//// # empty [|/*marker*/|]
// @filename: a3.py
//// # empty
// @filename: test1.py
//// from . import [|{|"r":"a2"|}a1|] as a, a3
`;
const state = parseAndGetTestState(code).state;
const fileName = state.getMarkerByName('marker').fileName;
testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'a2.py')}`);
}); | the_stack |
import { ScoredItem, SocialMediaItem } from '../common-types';
import {
applyCommentFilters,
itemHasImage,
itemIsVerified,
itemMeetsDateFilters,
itemMeetsRegexFilters,
itemMeetsToxicityRangeFilters,
} from './filter_utils';
describe('FilterUtils', () => {
it('gives correct output of applyCommentFilters', () => {
const regexFilters = [
{
regex: 'your',
include: true,
},
];
const currentTimeMs = Date.now();
const dateFilter = {
startDateTimeMs: currentTimeMs - 1000,
endDateTimeMs: currentTimeMs,
};
const toxicityRangeFilters = [
{
minScore: 0.8,
maxScore: 1,
includeUnscored: false,
},
];
const comments: Array<ScoredItem<SocialMediaItem>> = [
// Meets all filters.
{
item: {
id_str: 'a',
text: 'your mother was a hamster',
date: new Date(currentTimeMs - 1000),
hasImage: true,
verified: true,
},
scores: {
TOXICITY: 0.8,
},
},
// Only meets the regex filter.
{
item: {
id_str: 'b',
text: 'and your father smelt of elderberries',
date: new Date(currentTimeMs - 10000),
},
scores: {
TOXICITY: 0.7,
},
},
// Only meets the threshold filter.
{
item: {
id_str: 'c',
text: 'Now go away or I will taunt you a second time!',
date: new Date(currentTimeMs - 10000),
},
scores: {
TOXICITY: 0.8,
},
},
// Only meets the date filter.
{
item: {
id_str: 'd',
text:
'Supreme executive power derives from a mandate from the masses.',
date: new Date(currentTimeMs - 20),
},
scores: {
TOXICITY: 0.1,
},
},
// Meets threshold and regex filters.
{
item: {
id_str: 'e',
text: 'What is your quest??',
date: new Date(currentTimeMs - 1001),
},
scores: {
TOXICITY: 0.8,
},
},
// Meets regex and date filters.
{
item: {
id_str: 'f',
text: 'What is your favorite color?',
date: new Date(currentTimeMs - 1000),
},
scores: {
TOXICITY: 0.3,
},
},
// Meets threshold and date filters.
{
item: {
id_str: 'g',
text: 'She turned me into a newt!',
date: new Date(currentTimeMs - 1000),
},
scores: {
TOXICITY: 0.8,
},
},
// Only meets the image filter.
{
item: {
id_str: 'h',
text: 'What is the airspeed velocity of an unladen swallow?',
date: new Date(currentTimeMs - 1001),
hasImage: true,
},
scores: {
TOXICITY: 0.1,
},
},
// Only meets the verified filter.
{
item: {
id_str: 'i',
text: 'I thought we were an autonomous collective.',
date: new Date(currentTimeMs - 1001),
verified: true,
},
scores: {
TOXICITY: 0.1,
},
},
];
expect(
applyCommentFilters(comments, {
toxicityRangeFilters,
regexFilters,
dateFilter,
})
).toEqual([comments[0]]);
expect(
applyCommentFilters(comments, { regexFilters, dateFilter })
).toEqual([comments[0], comments[5]]);
expect(
applyCommentFilters(comments, { toxicityRangeFilters, dateFilter })
).toEqual([comments[0], comments[6]]);
expect(
applyCommentFilters(comments, { toxicityRangeFilters, regexFilters })
).toEqual([comments[0], comments[4]]);
expect(applyCommentFilters(comments, { toxicityRangeFilters })).toEqual([
comments[0],
comments[2],
comments[4],
comments[6],
]);
expect(applyCommentFilters(comments, { regexFilters })).toEqual([
comments[0],
comments[1],
comments[4],
comments[5],
]);
expect(applyCommentFilters(comments, { dateFilter })).toEqual([
comments[0],
comments[3],
comments[5],
comments[6],
]);
expect(applyCommentFilters(comments, { imageFilter: true })).toEqual([
comments[0],
comments[7],
]);
expect(applyCommentFilters(comments, { verifiedFilter: true })).toEqual([
comments[0],
comments[8],
]);
});
it(
'produces correct output of itemMeetsToxicityRangeFilters: single filter ' +
'min and max',
() => {
const filter = {
minScore: 0.5,
maxScore: 0.8,
includeUnscored: false,
};
const text = 'hello world';
const date = new Date();
const idStr = 'a';
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.1 }, item: { id_str: idStr, text, date } },
[filter]
)
).toBe(false);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.5 }, item: { id_str: idStr, text, date } },
[filter]
)
).toBe(true);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.799 }, item: { id_str: idStr, text, date } },
[filter]
)
).toBe(true);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.8 }, item: { id_str: idStr, text, date } },
[filter]
)
).toBe(false);
// Checks that scores of 1 are included for maxScore = 1.
const filter2 = {
minScore: 0.5,
maxScore: 1,
includeUnscored: false,
};
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 1 }, item: { id_str: idStr, text, date } },
[filter2]
)
).toBe(true);
}
);
it('produces correct output of itemMeetsToxicityRangeFilters: multiple filters', () => {
const filter1 = {
minScore: 0.2,
maxScore: 0.4,
includeUnscored: false,
};
const filter2 = {
minScore: 0.8,
maxScore: 1,
includeUnscored: false,
};
const filter3 = {
minScore: 0.7,
maxScore: 0.9,
includeUnscored: false,
};
const text = 'hello world';
const date = new Date();
const idStr = 'a';
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.19 }, item: { id_str: idStr, text, date } },
[filter1, filter2, filter3]
)
).toBe(false);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.3 }, item: { id_str: idStr, text, date } },
[filter1, filter2, filter3]
)
).toBe(true);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.4 }, item: { id_str: idStr, text, date } },
[filter1, filter2, filter3]
)
).toBe(false);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.5 }, item: { id_str: idStr, text, date } },
[filter1, filter2, filter3]
)
).toBe(false);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.7 }, item: { id_str: idStr, text, date } },
[filter1, filter2, filter3]
)
).toBe(true);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.8 }, item: { id_str: idStr, text, date } },
[filter1, filter2, filter3]
)
).toBe(true);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 0.9 }, item: { id_str: idStr, text, date } },
[filter1, filter2, filter3]
)
).toBe(true);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { TOXICITY: 1 }, item: { id_str: idStr, text, date } },
[filter1, filter2, filter3]
)
).toBe(true);
});
it('produces correct output of itemMeetsToxicityRangeFilter: missing TOXICITY score', () => {
const filter1 = {
minScore: 0.5,
maxScore: 1,
includeUnscored: false,
};
const filter2 = {
minScore: 0.5,
maxScore: 1,
includeUnscored: true,
};
const text = 'hello world';
const date = new Date();
const idStr = 'a';
expect(
itemMeetsToxicityRangeFilters(
{ scores: { INSULT: 0.5 }, item: { id_str: idStr, text, date } },
[filter1]
)
).toBe(false);
expect(
itemMeetsToxicityRangeFilters(
{ scores: { INSULT: 0.5 }, item: { id_str: idStr, text, date } },
[filter1, filter2]
)
).toBe(true);
});
it('produces correct output of itemHasImage', () => {
const itemWithImage = {
scores: { TOXICITY: 0.2 },
item: { id_str: 'a', text: 'foo', date: new Date(), hasImage: true },
};
const itemWithoutImage = {
scores: { TOXICITY: 0.2 },
item: { id_str: 'a', text: 'foo', date: new Date() },
};
expect(itemHasImage(itemWithImage)).toBe(true);
expect(itemHasImage(itemWithoutImage)).toBe(false);
});
it('produces correct output of itemIsVerified', () => {
const verifiedItem = {
scores: { TOXICITY: 0.2 },
item: { id_str: 'a', text: 'foo', date: new Date(), verified: true },
};
const unverifiedItem = {
scores: { TOXICITY: 0.2 },
item: { id_str: 'a', text: 'foo', date: new Date() },
};
expect(itemIsVerified(verifiedItem)).toBe(true);
expect(itemIsVerified(unverifiedItem)).toBe(false);
});
it('produces correct output of itemMeetsRegexFilters: single filter include', () => {
const filters = [
{
regex: 'hello',
include: true,
},
];
const scores = { TOXICITY: 0.1 };
const date = new Date();
const idStr = 'a';
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'hello world', date } },
filters
)
).toBe(true);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'HELLO WORLD', date } },
filters
)
).toBe(true);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'goodbye', date } },
filters
)
).toBe(false);
});
it('produces correct output of itemMeetsRegexFilters: single filter exclude', () => {
const filters = [
{
regex: 'hello',
include: false,
},
];
const scores = { TOXICITY: 0.1 };
const date = new Date();
const idStr = 'a';
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'hello world', date } },
filters
)
).toBe(false);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'goodbye', date } },
filters
)
).toBe(true);
});
it('produces correct output of itemMeetsRegexFilters: author name and screen name', () => {
const filters = [
{
regex: 'hello',
include: true,
},
];
const scores = { TOXICITY: 0.1 };
const date = new Date();
const idStr = 'a';
expect(
itemMeetsRegexFilters(
{
scores,
item: {
id_str: idStr,
text: 'text',
authorName: 'hello123',
authorScreenName: 'screen name',
date,
},
},
filters
)
).toBe(true);
expect(
itemMeetsRegexFilters(
{
scores,
item: {
id_str: idStr,
text: 'text',
authorName: 'name',
authorScreenName: '@hElLo',
date,
},
},
filters
)
).toBe(true);
expect(
itemMeetsRegexFilters(
{
scores,
item: {
id_str: idStr,
text: 'text',
authorName: 'name',
authorScreenName: 'screen name',
date,
},
},
filters
)
).toBe(false);
});
it('produces correct output of itemMeetsRegexFilters: multiple filters include', () => {
const filters = [
{
regex: 'hello',
include: true,
},
{
regex: 'world',
include: true,
},
];
const scores = { TOXICITY: 0.1 };
const date = new Date();
const idStr = 'a';
// Include filters out comments that don't contain regexes from all the
// filters.
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'hello world', date } },
filters
)
).toBe(true);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'hello', date } },
filters
)
).toBe(false);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'world', date } },
filters
)
).toBe(false);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'goodbye', date } },
filters
)
).toBe(false);
});
it('produces correct output of itemMeetsRegexFilters: multiple filters exclude', () => {
const filters = [
{
regex: 'hello',
include: false,
},
{
regex: 'world',
include: false,
},
];
const scores = { TOXICITY: 0.1 };
const date = new Date();
const idStr = 'a';
// Exclude filters out comments that contain any of the regexes in the
// filter.
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'hello world', date } },
filters
)
).toBe(false);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'hello', date } },
filters
)
).toBe(false);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'world', date } },
filters
)
).toBe(false);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'goodbye', date } },
filters
)
).toBe(true);
});
it(
'produces correct output of itemMeetsRegexFilters: multiple filters include and ' +
'exclude',
() => {
const filters = [
{
regex: 'hello',
include: true,
},
{
regex: 'world',
include: true,
},
{
regex: 'wonderful',
include: false,
},
];
const scores = { TOXICITY: 0.1 };
const date = new Date();
const idStr = 'a';
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'hello world', date } },
filters
)
).toBe(true);
expect(
itemMeetsRegexFilters(
{
scores,
item: { id_str: idStr, text: 'hello wonderful world', date },
},
filters
)
).toBe(false);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'hello', date } },
filters
)
).toBe(false);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'world', date } },
filters
)
).toBe(false);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'wonderful', date } },
filters
)
).toBe(false);
expect(
itemMeetsRegexFilters(
{ scores, item: { id_str: idStr, text: 'goodbye', date } },
filters
)
).toBe(false);
}
);
it('produces correct output of itemMeetsDateFilters: date filter active', () => {
const currentTimeMs = Date.now();
const filter = {
startDateTimeMs: currentTimeMs - 1000,
endDateTimeMs: currentTimeMs,
};
const scores = { TOXICITY: 0.1 };
const text = 'hello world';
const idStr = 'a';
expect(
itemMeetsDateFilters(
{
scores,
item: { id_str: idStr, text, date: new Date(currentTimeMs - 1001) },
},
filter
)
).toBe(false);
expect(
itemMeetsDateFilters(
{
scores,
item: { id_str: idStr, text, date: new Date(currentTimeMs - 1000) },
},
filter
)
).toBe(true);
expect(
itemMeetsDateFilters(
{
scores,
item: { id_str: idStr, text, date: new Date(currentTimeMs - 1) },
},
filter
)
).toBe(true);
expect(
itemMeetsDateFilters(
{
scores,
item: { id_str: idStr, text, date: new Date(currentTimeMs) },
},
filter
)
).toBe(true);
expect(
itemMeetsDateFilters(
{
scores,
item: { id_str: idStr, text, date: new Date(currentTimeMs + 1) },
},
filter
)
).toBe(false);
});
}); | the_stack |
import { TreeSet } from 'jstreemap';
const SECOND = 0;
const MINUTE = 1;
const HOUR = 2;
const DAY_OF_MONTH = 3;
const MONTH = 4;
const DAY_OF_WEEK = 5;
const YEAR = 6;
const ALL_SPEC_INT = 99; // '*'
const NO_SPEC_INT = 98; // '?'
let monthMap = new Map();
let dayMap = new Map();
monthMap.set('JAN', 0);
monthMap.set('FEB', 1);
monthMap.set('MAR', 2);
monthMap.set('APR', 3);
monthMap.set('MAY', 4);
monthMap.set('JUN', 5);
monthMap.set('JUL', 6);
monthMap.set('AUG', 7);
monthMap.set('SEP', 8);
monthMap.set('OCT', 9);
monthMap.set('NOV', 10);
monthMap.set('DEC', 11);
dayMap.set('SUN', 1);
dayMap.set('MON', 2);
dayMap.set('TUE', 3);
dayMap.set('WED', 4);
dayMap.set('THU', 5);
dayMap.set('FRI', 6);
dayMap.set('SAT', 7);
export class CronExpression {
private cronExpression: string = null;
seconds = new TreeSet();
minutes = new TreeSet();
hours = new TreeSet();
daysOfMonth = new TreeSet();
months = new TreeSet();
daysOfWeek = new TreeSet();
years = new TreeSet();
lastdayOfWeek: boolean = false;
nthdayOfWeek: Number = 0;
lastdayOfMonth: boolean = false;
nearestWeekday: boolean = false;
expressionParsed: boolean = false;
strMinutes: string = null;
strHours: string = null;
strWeekdays: string = null;
strMonths: string = null;
strDaysOfMonth: string = null;
constructor(cronExpression: string) {
if (cronExpression === null) {
throw new Error('cronExpression cannot be null');
}
this.cronExpression = cronExpression;
this.buildExpressionSecondOptional(this.cronExpression.toUpperCase());
}
private buildExpressionSecondOptional = function(cronExpression: string): void {
let parsesWithMissingSeconds: boolean = false;
try {
//assume the expression doesn't contain seconds
this.buildExpression('0 ' + cronExpression);
parsesWithMissingSeconds = true;
} catch (e) {}
let parsesWithOriginal: boolean = false;
this.resetState();
try {
// check if the expression can be parsed as is
this.buildExpression(cronExpression);
parsesWithOriginal = true;
} catch (e) {
if (!parsesWithMissingSeconds) {
// the expression is definitely invalid
throw new Error(e);
} else {
this.resetState();
this.buildExpression('0 ' + cronExpression);
}
}
if (parsesWithMissingSeconds && parsesWithOriginal) {
throw new Error('Ambiguous cron expression' + String(-1));
}
};
buildExpression = function(cronExpression: string): void {
this.expressionParsed = true;
try {
if (this.seconds === null) {
this.seconds = new TreeSet();
}
if (this.minutes === null) {
this.minutes = new TreeSet();
}
if (this.hours === null) {
this.hours = new TreeSet();
}
if (this.daysOfMonth === null) {
this.daysOfMonth = new TreeSet();
}
if (this.months === null) {
this.months = new TreeSet();
}
if (this.daysOfWeek === null) {
this.daysOfWeek = new TreeSet();
}
if (this.years === null) {
this.years = new TreeSet();
}
let exprOn = SECOND;
let exprsTok: string[] = cronExpression.split(' ');
let len_exprsTok = 0;
while (len_exprsTok <= exprsTok.length - 1 && exprOn <= YEAR) {
if (exprsTok[len_exprsTok] != undefined) {
let expr: string = exprsTok[len_exprsTok].trim();
let vTok: string[] = expr.split(',');
let len_vTok = 0;
while (len_vTok <= vTok.length - 1) {
if (vTok[len_vTok] != undefined) {
let v: string = vTok[len_vTok];
this.storeExpressionVals(0, v, exprOn);
}
len_vTok++;
}
exprOn++;
}
len_exprsTok++;
}
if (exprOn <= DAY_OF_WEEK) {
throw new Error('Unexpected end of expression.');
}
if (exprOn <= YEAR) {
this.storeExpressionVals(0, '*', YEAR);
}
let dow: TreeSet<number> = this.getSet(DAY_OF_WEEK);
let dom: TreeSet<number> = this.getSet(DAY_OF_MONTH);
// Copying the logic from the UnsupportedOperationException below
let dayOfMSpec: boolean = !dom.has(NO_SPEC_INT);
let dayOfWSpec: boolean = !dow.has(NO_SPEC_INT);
if (dayOfMSpec && !dayOfWSpec) {
// skip
} else if (dayOfWSpec && !dayOfMSpec) {
// skip
} else {
throw new Error('Specifying both a day-of-week AND a day-of-month parameter is not supported.');
}
} catch (e) {
throw new Error(e);
}
};
storeExpressionVals = function(pos: number, s: string, type: number): number {
let incr: number = 0;
let i: number = this.skipWhiteSpace(pos, s);
if (i >= s.length) {
return i;
}
let c: String = s.charAt(i);
if (c >= 'A' && c <= 'Z' && !(s === 'L') && !(s === 'LW')) {
let sub: string = s.substring(i, i + 3);
let sVal = -1;
let eVal = -1;
if (type === MONTH) {
sVal = this.getMonthNumber(sub) + 1;
if (sVal <= 0) {
throw new Error("Invalid Month value: '" + sub + "'" + String(i));
}
if (s.length > i + 3) {
c = s.charAt(i + 3);
if (c === '-') {
i += 4;
sub = s.substring(i, i + 3);
eVal = this.getMonthNumber(sub) + 1;
if (eVal <= 0) {
throw new Error("Invalid Month value: '" + sub + "'" + String(i));
}
}
}
} else if (type === DAY_OF_WEEK) {
sVal = this.getDayOfWeekNumber(sub);
if (sVal < 0) {
throw new Error("Invalid Day-of-Week value: '" + sub + "'" + String(i));
}
if (s.length > i + 3) {
c = s.charAt(i + 3);
if (c === '-') {
i += 4;
sub = s.substring(i, i + 3);
eVal = this.getDayOfWeekNumber(sub);
if (eVal < 0) {
throw new Error("Invalid Day-of-Week value: '" + sub + "'" + String(i));
}
if (sVal > eVal) {
throw new Error('Invalid Day-of-Week sequence: ' + String(sVal) + ' > ' + String(eVal) + String(i));
}
} else if (c === '#') {
try {
i += 4;
this.nthdayOfWeek = Number(s.substring(i));
if (this.nthdayOfWeek < 1 || this.nthdayOfWeek > 5) {
throw new Error();
}
} catch (e) {
throw new Error("A numeric value between 1 and 5 must follow the '#' option" + String(i));
}
} else if (c === 'L') {
this.lastdayOfWeek = true;
i = i + 1;
}
}
} else {
throw new Error("Illegal characters for this position: '" + String(sub) + "'" + String(i));
}
if (eVal != -1) {
incr = 1;
}
this.addToSet(sVal, eVal, incr, type);
return i + 3;
}
if (c === '?') {
i++;
if (i + 1 < s.length && s.charAt(i) != ' ' && s.charAt(i + 1) != '\t') {
throw new Error("Illegal character after '?': " + s.charAt(i) + String(i));
}
if (type != DAY_OF_WEEK && type != DAY_OF_MONTH) {
throw new Error("'?' can only be specfied for Day-of-Month or Day-of-Week." + String(i));
}
if (type === DAY_OF_WEEK && !this.lastdayOfMonth) {
let val: number = this.daysOfMonth.last();
if (val === NO_SPEC_INT) {
throw new Error("'?' can only be specfied for Day-of-Month -OR- Day-of-Week." + String(i));
}
}
this.addToSet(NO_SPEC_INT, -1, 0, type);
return i;
}
if (c === '*' || c === '/') {
if (c === '*' && i + 1 >= s.length) {
this.addToSet(ALL_SPEC_INT, -1, incr, type);
return i + 1;
} else if (c === '/' && (i + 1 >= s.length || s.charAt(i + 1) === ' ' || s.charAt(i + 1) === '\t')) {
throw new Error("'/' must be followed by an integer." + String(i));
} else if (c === '*') {
i++;
}
c = s.charAt(i);
if (c === '/') {
// is an increment specified?
i++;
if (i >= s.length) {
throw new Error('Unexpected end of string.' + String(i));
}
incr = this.getNumericValue(s, i);
i++;
if (incr > 10) {
i++;
}
if (incr > 59 && (type === SECOND || type === MINUTE)) {
throw new Error('Increment > 60 : ' + incr + i);
} else if (incr > 23 && type === HOUR) {
throw new Error('Increment > 24 : ' + incr + i);
} else if (incr > 31 && type === DAY_OF_MONTH) {
throw new Error('Increment > 31 : ' + incr + i);
} else if (incr > 7 && type === DAY_OF_WEEK) {
throw new Error('Increment > 7 : ' + incr + i);
} else if (incr > 12 && type === MONTH) {
throw new Error('Increment > 12 : ' + incr + i);
}
} else {
incr = 1;
}
this.addToSet(ALL_SPEC_INT, -1, incr, type);
return i;
} else if (c === 'L') {
i++;
if (type === DAY_OF_MONTH) {
this.lastdayOfMonth = true;
}
if (type === DAY_OF_WEEK) {
this.addToSet(7, 7, 0, type);
}
if (type === DAY_OF_MONTH && s.length > i) {
c = s.charAt(i);
if (c === 'W') {
this.nearestWeekday = true;
i++;
}
}
return i;
} else if (c >= '0' && c <= '9') {
let val: number = Number(c.valueOf());
i++;
if (i >= s.length) {
this.addToSet(val, -1, -1, type);
} else {
c = s.charAt(i);
if (c >= '0' && c <= '9') {
let vs: [number, number] = this.getValue(val, s, i);
val = vs[1];
i = vs[0];
}
i = this.checkNext(i, s, val, type);
return i;
}
} else {
throw new Error('Unexpected character: ' + c + i);
}
return i;
};
skipWhiteSpace = function(i: number, s: String): number {
for (; i < s.length && (s.charAt(i) === ' ' || s.charAt(i) === '\t'); i++) {}
return i;
};
getMonthNumber = function(s: String): number {
let integer: number = monthMap.get(s);
if (integer === undefined) {
return -1;
}
return integer;
};
getDayOfWeekNumber = function(s: String): number {
let integer: number = dayMap.get(s);
if (integer === undefined) {
return -1;
}
return integer;
};
addToSet = function(val: number, end: number, incr: number, type: number) {
let set = this.getSet(type);
if (type === SECOND || type === MINUTE) {
if ((val < 0 || val > 59 || end > 59) && val != ALL_SPEC_INT) {
throw new Error('Minute and Second values must be between 0 and 59' + String(-1));
}
} else if (type === HOUR) {
if ((val < 0 || val > 23 || end > 23) && val != ALL_SPEC_INT) {
throw new Error('Hour values must be between 0 and 23' + String(-1));
}
} else if (type === DAY_OF_MONTH) {
if ((val < 1 || val > 31 || end > 31) && val != ALL_SPEC_INT && val != NO_SPEC_INT) {
throw new Error('Day of month values must be between 1 and 31' + String(-1));
}
} else if (type === MONTH) {
if ((val < 1 || val > 12 || end > 12) && val != ALL_SPEC_INT) {
throw new Error('Month values must be between 1 and 12' + String(-1));
}
} else if (type === DAY_OF_WEEK) {
if ((val === 0 || val > 7 || end > 7) && val != ALL_SPEC_INT && val != NO_SPEC_INT) {
throw new Error('Day-of-Week values must be between 1 and 7' + String(-1));
}
}
if ((incr === 0 || incr === -1) && val != ALL_SPEC_INT) {
if (val != -1) {
set.add(val);
} else {
set.add(NO_SPEC_INT);
}
return;
}
let startAt: number = val;
let stopAt: number = end;
if (val === ALL_SPEC_INT && incr <= 0) {
incr = 1;
set.add(ALL_SPEC_INT); // put in a marker, but also fill values
}
if (type === SECOND || type === MINUTE) {
if (stopAt === -1) {
stopAt = 59;
}
if (startAt === -1 || startAt === ALL_SPEC_INT) {
startAt = 0;
}
} else if (type === HOUR) {
if (stopAt === -1) {
stopAt = 23;
}
if (startAt === -1 || startAt === ALL_SPEC_INT) {
startAt = 0;
}
} else if (type === DAY_OF_MONTH) {
if (stopAt === -1) {
stopAt = 31;
}
if (startAt === -1 || startAt === ALL_SPEC_INT) {
startAt = 1;
}
} else if (type === MONTH) {
if (stopAt === -1) {
stopAt = 12;
}
if (startAt === -1 || startAt === ALL_SPEC_INT) {
startAt = 1;
}
} else if (type === DAY_OF_WEEK) {
if (stopAt === -1) {
stopAt = 7;
}
if (startAt === -1 || startAt === ALL_SPEC_INT) {
startAt = 1;
}
} else if (type === YEAR) {
if (stopAt === -1) {
stopAt = 2099;
}
if (startAt === -1 || startAt === ALL_SPEC_INT) {
startAt = 1970;
}
}
for (let i = startAt; i <= stopAt; i += incr) {
set.add(i);
}
};
getSet = function(type: number) {
switch (type) {
case SECOND:
return this.seconds;
case MINUTE:
return this.minutes;
case HOUR:
return this.hours;
case DAY_OF_MONTH:
return this.daysOfMonth;
case MONTH:
return this.months;
case DAY_OF_WEEK:
return this.daysOfWeek;
case YEAR:
return this.years;
}
};
// get the string value
getValue = function(v: number, s: string, i: number) {
let c: string = s.charAt(i);
let s1: string = String(v);
while (c >= '0' && c <= '9') {
s1 = s1.concat(c);
i++;
if (i >= s.length) {
break;
}
c = s.charAt(i);
}
let val: [number, number];
val = [i < s.length ? i : i + 1, Number(s1)];
return val;
};
getNumericValue = function(s: string, i: number): number {
let endOfVal = this.findNextWhiteSpace(i, s);
let val: string = s.substring(i, endOfVal);
return Number(val);
};
findNextWhiteSpace = function(i: number, s: String) {
for (; i < s.length && (s.charAt(i) != ' ' || s.charAt(i) != '\t'); i++) {}
return i;
};
checkNext = function(pos: number, s: string, val: number, type: number): number {
let end = -1;
let i = pos;
if (i >= s.length) {
this.addToSet(val, end, -1, type);
return i;
}
let c: string = s.charAt(pos);
if (c === 'L') {
if (type === DAY_OF_WEEK) {
this.lastdayOfWeek = true;
} else {
throw new Error("'L' option is not valid here. (pos=" + i + ')' + i);
}
let set = this.getSet(type);
set.add(val);
i++;
return i;
}
if (c === 'W') {
if (type === DAY_OF_MONTH) {
this.nearestWeekday = true;
} else {
throw new Error("'W' option is not valid here. (pos=" + i + ')' + i);
}
let set = this.getSet(type);
set.add(val);
i++;
return i;
}
if (c === '#') {
if (type != DAY_OF_WEEK) {
throw new Error("'#' option is not valid here. (pos=" + i + ')' + i);
}
i++;
try {
this.nthdayOfWeek = Number(s.substring(i));
if (this.nthdayOfWeek < 1 || this.nthdayOfWeek > 5) {
throw new Error();
}
} catch (e) {
throw new Error("A numeric value between 1 and 5 must follow the '#' option" + i);
}
let set = this.getSet(type);
set.add(val);
i++;
return i;
}
if (c === '-') {
i++;
c = s.charAt(i);
let v: number = Number(c);
end = v;
i++;
if (i >= s.length) {
this.addToSet(val, end, 1, type);
return i;
}
c = s.charAt(i);
if (c >= '0' && c <= '9') {
let vs: [number, number] = this.getValue(v, s, i);
let v1 = vs[1];
end = v1;
i = vs[0];
}
if (i < s.length && (c = s.charAt(i)) === '/') {
i++;
c = s.charAt(i);
let v2: number = Number(c);
i++;
if (i >= s.length) {
this.addToSet(val, end, v2, type);
return i;
}
c = s.charAt(i);
if (c >= '0' && c <= '9') {
let vs: [number, number] = this.getValue(v2, s, i);
let v3 = vs[1];
this.addToSet(val, end, v3, type);
i = vs[0];
return i;
} else {
this.addToSet(val, end, v2, type);
return i;
}
} else {
this.addToSet(val, end, 1, type);
return i;
}
}
if (c === '/') {
i++;
c = s.charAt(i);
let v2 = Number(c);
i++;
if (i >= s.length) {
this.addToSet(val, end, v2, type);
return i;
}
c = s.charAt(i);
if (c >= '0' && c <= '9') {
let vs: [number, number] = this.getValue(v2, s, i);
let v3 = vs[1];
this.addToSet(val, end, v3, type);
i = vs[1];
return i;
} else {
throw new Error("Unexpected character '" + c + "' after '/'" + i);
}
}
this.addToSet(val, end, 0, type);
i++;
return i;
};
resetState = function() {
// reset internal state
this.expressionParsed = false;
this.seconds = new TreeSet();
this.minutes = new TreeSet();
this.hours = new TreeSet();
this.daysOfMonth = new TreeSet();
this.months = new TreeSet();
this.daysOfWeek = new TreeSet();
this.years = new TreeSet();
this.lastdayOfWeek = false;
this.nthdayOfWeek = 0;
this.lastdayOfMonth = false;
this.nearestWeekday = false;
this.lastdayOffset = 0;
};
} | the_stack |
import { h, Component } from 'preact';
import { Side } from '@t/store/focus';
import { ColumnInfo, ComplexColumnInfo } from '@t/store/column';
import { Range } from '@t/store/selection';
import { ColGroup } from './colGroup';
import {
cls,
setCursorStyle,
getCoordinateWithOffset,
hasClass,
findParentByClassName,
} from '../helper/dom';
import { connect } from './hoc';
import { ColumnResizer } from './columnResizer';
import { DispatchProps } from '../dispatch/create';
import { getInstance } from '../instance';
import { isDraggableColumn, isParentColumnHeader } from '../query/column';
import { ComplexHeader } from './complexHeader';
import { ColumnHeader } from './columnHeader';
import { RIGHT_MOUSE_BUTTON } from '../helper/constant';
import Grid from '../grid';
import {
createDraggableColumnInfo,
DraggableColumnInfo,
getMovedPosAndIndexOfColumn,
PosInfo,
} from '../query/draggable';
import { findOffsetIndex } from '../helper/common';
import GridEvent from '../event/gridEvent';
import { EventBus, getEventBus } from '../event/eventBus';
interface OwnProps {
side: Side;
}
interface StoreProps {
headerHeight: number;
cellBorderWidth: number;
columns: ColumnInfo[];
scrollLeft: number;
grid: Grid;
columnSelectionRange: Range | null;
complexColumnHeaders: ComplexColumnInfo[];
eventBus: EventBus;
}
type Props = OwnProps & StoreProps & DispatchProps;
interface MovedIndexInfo {
index: number;
columnName: string | null;
}
const DRAGGING_CLASS = 'dragging';
class HeaderAreaComp extends Component<Props> {
private el?: HTMLElement;
private container: HTMLElement | null = null;
private dragColumnInfo: DraggableColumnInfo | null = null;
private floatingColumnWidth: number | null = null;
private startSelectedName: string | null = null;
private offsetLeft: number | null = null;
private movedIndexInfo: MovedIndexInfo | null = null;
private handleDblClick = (ev: MouseEvent) => {
ev.stopPropagation();
};
private getPosInfo(ev: MouseEvent, el: HTMLElement): PosInfo {
const [pageX, pageY] = getCoordinateWithOffset(ev.pageX, ev.pageY);
const { scrollTop, scrollLeft } = el;
const { top, left } = el.getBoundingClientRect();
return { pageX, pageY, left, top, scrollLeft, scrollTop };
}
private handleMouseMove = (ev: MouseEvent) => {
const { store } = this.context;
this.offsetLeft = ev.offsetX;
const posInfo = this.getPosInfo(ev, this.el!);
const { pageX, pageY, scrollTop, top } = posInfo;
const {
targetColumn: { name: currentColumnName },
} = getMovedPosAndIndexOfColumn(store, posInfo, this.offsetLeft!);
if (
currentColumnName === this.startSelectedName &&
isDraggableColumn(store, currentColumnName) &&
findOffsetIndex(store.rowCoords.offsets, pageY - top + scrollTop) > 0
) {
this.startToDragColumn(posInfo);
return;
}
this.props.dispatch('dragMoveHeader', { pageX, pageY }, this.startSelectedName!);
};
private handleMouseDown = (ev: MouseEvent) => {
const { dispatch, complexColumnHeaders } = this.props;
const target = ev.target as HTMLElement;
if (
findParentByClassName(target, 'cell-row-header') ||
hasClass(target, 'btn-sorting') ||
hasClass(target, 'btn-filter') ||
ev.button === RIGHT_MOUSE_BUTTON
) {
return;
}
let name = target.getAttribute('data-column-name')!;
if (!name) {
const parent = findParentByClassName(target, 'cell-header');
if (parent) {
name = parent.getAttribute('data-column-name')!;
}
}
const parentHeader = isParentColumnHeader(complexColumnHeaders, name);
this.startSelectedName = name;
dispatch('mouseDownHeader', name, parentHeader);
document.addEventListener('mousemove', this.handleMouseMove);
document.addEventListener('mouseup', this.clearDocumentEvents);
document.addEventListener('selectstart', this.handleSelectStart);
};
private clearDocumentEvents = () => {
this.props.dispatch('dragEnd');
setCursorStyle('');
document.removeEventListener('mousemove', this.handleMouseMove);
document.removeEventListener('mouseup', this.clearDocumentEvents);
document.removeEventListener('selectstart', this.handleSelectStart);
};
private handleSelectStart = (ev: Event) => {
ev.preventDefault();
};
private isSelected(index: number) {
const { columnSelectionRange } = this.props;
if (!columnSelectionRange) {
return false;
}
const [start, end] = columnSelectionRange;
return index >= start && index <= end;
}
private startToDragColumn = (posInfo: PosInfo) => {
const { dispatch } = this.props;
this.container = this.el?.parentElement?.parentElement!;
posInfo.container = this.container;
const draggableInfo = createDraggableColumnInfo(this.context.store, posInfo);
const { column, columnName } = draggableInfo;
const gridEvent = new GridEvent({ columnName, floatingColumn: column });
/**
* Occurs when starting to drag the column
* @event Grid#dragStart
* @property {Grid} instance - Current grid instance
* @property {string} columnName - The column name of the column to drag
* @property {HTMLElement} floatingColumn - The floating column DOM element
*/
this.props.eventBus.trigger('dragStart', gridEvent);
if (!gridEvent.isStopped()) {
this.container.appendChild(column);
this.floatingColumnWidth = column.clientWidth;
this.dragColumnInfo = draggableInfo;
dispatch('addColumnClassName', columnName, DRAGGING_CLASS);
dispatch('setFocusInfo', null, null, false);
dispatch('initSelection');
document.removeEventListener('mousemove', this.handleMouseMove);
document.addEventListener('mousemove', this.dragColumn);
document.addEventListener('mouseup', this.dropColumn);
}
};
private dragColumn = (ev: MouseEvent) => {
const posInfo = this.getPosInfo(ev, this.el!);
const { index, offsetLeft, targetColumn } = getMovedPosAndIndexOfColumn(
this.context.store,
posInfo,
this.offsetLeft!,
this.floatingColumnWidth!
);
const { column, columnName } = this.dragColumnInfo!;
column.style.left = `${offsetLeft}px`;
this.movedIndexInfo = { index, columnName: targetColumn.name };
this.props.dispatch('moveColumn', columnName, index);
const gridEvent = new GridEvent({
columnName,
targetColumnName: targetColumn.name,
});
/**
* Occurs when dragging the column
* @event Grid#drag
* @property {Grid} instance - Current grid instance
* @property {string} columnName - The column name of the dragging column
* @property {string} targetColumnName - The column name of the column at current dragging position
*/
this.props.eventBus.trigger('drag', gridEvent);
};
private dropColumn = () => {
const { columnName } = this.dragColumnInfo!;
if (this.movedIndexInfo) {
const { index, columnName: targetColumnName } = this.movedIndexInfo;
const gridEvent = new GridEvent({
columnName,
targetColumnName,
});
/**
* Occurs when dropping the column
* @event Grid#drop
* @property {Grid} instance - Current grid instance
* @property {string} columnName - The column name of the dragging column
* @property {string} targetColumnName - The column name of the column at current dragging position
*/
this.props.eventBus.trigger('drop', gridEvent);
if (!gridEvent.isStopped()) {
this.props.dispatch('moveColumn', columnName, index);
}
}
this.props.dispatch('removeColumnClassName', this.dragColumnInfo!.columnName!, DRAGGING_CLASS);
this.clearDraggableInfo();
};
private clearDraggableInfo = () => {
this.container!.removeChild(this.dragColumnInfo!.column);
this.dragColumnInfo = null;
this.container = null;
this.floatingColumnWidth = null;
this.offsetLeft = null;
this.movedIndexInfo = null;
document.removeEventListener('mousemove', this.dragColumn);
document.removeEventListener('mouseup', this.dropColumn);
};
public componentDidUpdate() {
this.el!.scrollLeft = this.props.scrollLeft;
}
public render() {
const { columns, headerHeight, side, complexColumnHeaders, grid } = this.props;
const headerHeightStyle = { height: headerHeight };
return (
<div
class={cls('header-area')}
style={headerHeightStyle}
ref={(el) => {
this.el = el;
}}
>
<table class={cls('table')} onMouseDown={this.handleMouseDown}>
<ColGroup side={side} useViewport={false} />
{complexColumnHeaders.length ? (
<ComplexHeader side={side} grid={grid} />
) : (
<tbody>
<tr style={headerHeightStyle} onDblClick={this.handleDblClick}>
{columns.map((columnInfo, index) => (
<ColumnHeader
key={columnInfo.name}
columnInfo={columnInfo}
selected={this.isSelected(index)}
grid={grid}
/>
))}
</tr>
</tbody>
)}
</table>
<ColumnResizer side={side} />
</div>
);
}
}
export const HeaderArea = connect<StoreProps, OwnProps>((store, { side }) => {
const {
column: { visibleColumnsBySideWithRowHeader, complexColumnHeaders },
dimension: { headerHeight, cellBorderWidth },
selection: { rangeBySide },
viewport,
id,
} = store;
return {
headerHeight,
cellBorderWidth,
columns: visibleColumnsBySideWithRowHeader[side],
scrollLeft: side === 'L' ? 0 : viewport.scrollLeft,
grid: getInstance(id),
columnSelectionRange: rangeBySide && rangeBySide[side].column ? rangeBySide[side].column : null,
complexColumnHeaders,
eventBus: getEventBus(id),
};
})(HeaderAreaComp); | the_stack |
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs } from "../types";
import * as utilities from "../utilities";
/**
* Creates a new object inside an existing bucket in Google cloud storage service (GCS).
* [ACLs](https://cloud.google.com/storage/docs/access-control/lists) can be applied using the `gcp.storage.ObjectACL` resource.
* For more information see
* [the official documentation](https://cloud.google.com/storage/docs/key-terms#objects)
* and
* [API](https://cloud.google.com/storage/docs/json_api/v1/objects).
*
* ## Example Usage
*
* Example creating a public object in an existing `image-store` bucket.
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const picture = new gcp.storage.BucketObject("picture", {
* bucket: "image-store",
* source: new pulumi.asset.FileAsset("/images/nature/garden-tiger-moth.jpg"),
* });
* ```
*
* ## Import
*
* This resource does not support import.
*/
export class BucketObject extends pulumi.CustomResource {
/**
* Get an existing BucketObject resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: BucketObjectState, opts?: pulumi.CustomResourceOptions): BucketObject {
return new BucketObject(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'gcp:storage/bucketObject:BucketObject';
/**
* Returns true if the given object is an instance of BucketObject. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is BucketObject {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === BucketObject.__pulumiType;
}
/**
* The name of the containing bucket.
*/
public readonly bucket!: pulumi.Output<string>;
/**
* [Cache-Control](https://tools.ietf.org/html/rfc7234#section-5.2)
* directive to specify caching behavior of object data. If omitted and object is accessible to all anonymous users, the default will be public, max-age=3600
*/
public readonly cacheControl!: pulumi.Output<string | undefined>;
/**
* Data as `string` to be uploaded. Must be defined if `source` is not. **Note**: The `content` field is marked as sensitive.
*/
public readonly content!: pulumi.Output<string | undefined>;
/**
* [Content-Disposition](https://tools.ietf.org/html/rfc6266) of the object data.
*/
public readonly contentDisposition!: pulumi.Output<string | undefined>;
/**
* [Content-Encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2.2) of the object data.
*/
public readonly contentEncoding!: pulumi.Output<string | undefined>;
/**
* [Content-Language](https://tools.ietf.org/html/rfc7231#section-3.1.3.2) of the object data.
*/
public readonly contentLanguage!: pulumi.Output<string | undefined>;
/**
* [Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".
*/
public readonly contentType!: pulumi.Output<string>;
/**
* (Computed) Base 64 CRC32 hash of the uploaded data.
*/
public /*out*/ readonly crc32c!: pulumi.Output<string>;
/**
* Enables object encryption with Customer-Supplied Encryption Key (CSEK). Google [documentation about CSEK.](https://cloud.google.com/storage/docs/encryption/customer-supplied-keys)
* Structure is documented below.
*/
public readonly customerEncryption!: pulumi.Output<outputs.storage.BucketObjectCustomerEncryption | undefined>;
public readonly detectMd5hash!: pulumi.Output<string | undefined>;
/**
* Whether an object is under event-based hold. Event-based hold is a way to retain objects until an event occurs, which is
* signified by the hold's release (i.e. this value is set to false). After being released (set to false), such objects
* will be subject to bucket-level retention (if any).
*/
public readonly eventBasedHold!: pulumi.Output<boolean | undefined>;
/**
* The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object.
*/
public readonly kmsKeyName!: pulumi.Output<string>;
/**
* (Computed) Base 64 MD5 hash of the uploaded data.
*/
public /*out*/ readonly md5hash!: pulumi.Output<string>;
/**
* (Computed) A url reference to download this object.
*/
public /*out*/ readonly mediaLink!: pulumi.Output<string>;
/**
* User-provided metadata, in key/value pairs.
*/
public readonly metadata!: pulumi.Output<{[key: string]: string} | undefined>;
/**
* The name of the object. If you're interpolating the name of this object, see `outputName` instead.
*/
public readonly name!: pulumi.Output<string>;
/**
* (Computed) The name of the object. Use this field in interpolations with `gcp.storage.ObjectACL` to recreate
* `gcp.storage.ObjectACL` resources when your `gcp.storage.BucketObject` is recreated.
*/
public /*out*/ readonly outputName!: pulumi.Output<string>;
/**
* (Computed) A url reference to this object.
*/
public /*out*/ readonly selfLink!: pulumi.Output<string>;
/**
* A path to the data you want to upload. Must be defined
* if `content` is not.
*/
public readonly source!: pulumi.Output<pulumi.asset.Asset | pulumi.asset.Archive | undefined>;
/**
* The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object.
* Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default
* storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class.
*/
public readonly storageClass!: pulumi.Output<string>;
/**
* Whether an object is under temporary hold. While this flag is set to true, the object is protected against deletion and
* overwrites.
*/
public readonly temporaryHold!: pulumi.Output<boolean | undefined>;
/**
* Create a BucketObject resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: BucketObjectArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: BucketObjectArgs | BucketObjectState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as BucketObjectState | undefined;
inputs["bucket"] = state ? state.bucket : undefined;
inputs["cacheControl"] = state ? state.cacheControl : undefined;
inputs["content"] = state ? state.content : undefined;
inputs["contentDisposition"] = state ? state.contentDisposition : undefined;
inputs["contentEncoding"] = state ? state.contentEncoding : undefined;
inputs["contentLanguage"] = state ? state.contentLanguage : undefined;
inputs["contentType"] = state ? state.contentType : undefined;
inputs["crc32c"] = state ? state.crc32c : undefined;
inputs["customerEncryption"] = state ? state.customerEncryption : undefined;
inputs["detectMd5hash"] = state ? state.detectMd5hash : undefined;
inputs["eventBasedHold"] = state ? state.eventBasedHold : undefined;
inputs["kmsKeyName"] = state ? state.kmsKeyName : undefined;
inputs["md5hash"] = state ? state.md5hash : undefined;
inputs["mediaLink"] = state ? state.mediaLink : undefined;
inputs["metadata"] = state ? state.metadata : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["outputName"] = state ? state.outputName : undefined;
inputs["selfLink"] = state ? state.selfLink : undefined;
inputs["source"] = state ? state.source : undefined;
inputs["storageClass"] = state ? state.storageClass : undefined;
inputs["temporaryHold"] = state ? state.temporaryHold : undefined;
} else {
const args = argsOrState as BucketObjectArgs | undefined;
if ((!args || args.bucket === undefined) && !opts.urn) {
throw new Error("Missing required property 'bucket'");
}
inputs["bucket"] = args ? args.bucket : undefined;
inputs["cacheControl"] = args ? args.cacheControl : undefined;
inputs["content"] = args ? args.content : undefined;
inputs["contentDisposition"] = args ? args.contentDisposition : undefined;
inputs["contentEncoding"] = args ? args.contentEncoding : undefined;
inputs["contentLanguage"] = args ? args.contentLanguage : undefined;
inputs["contentType"] = args ? args.contentType : undefined;
inputs["customerEncryption"] = args ? args.customerEncryption : undefined;
inputs["detectMd5hash"] = args ? args.detectMd5hash : undefined;
inputs["eventBasedHold"] = args ? args.eventBasedHold : undefined;
inputs["kmsKeyName"] = args ? args.kmsKeyName : undefined;
inputs["metadata"] = args ? args.metadata : undefined;
inputs["name"] = args ? args.name : undefined;
inputs["source"] = args ? args.source : undefined;
inputs["storageClass"] = args ? args.storageClass : undefined;
inputs["temporaryHold"] = args ? args.temporaryHold : undefined;
inputs["crc32c"] = undefined /*out*/;
inputs["md5hash"] = undefined /*out*/;
inputs["mediaLink"] = undefined /*out*/;
inputs["outputName"] = undefined /*out*/;
inputs["selfLink"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(BucketObject.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering BucketObject resources.
*/
export interface BucketObjectState {
/**
* The name of the containing bucket.
*/
bucket?: pulumi.Input<string>;
/**
* [Cache-Control](https://tools.ietf.org/html/rfc7234#section-5.2)
* directive to specify caching behavior of object data. If omitted and object is accessible to all anonymous users, the default will be public, max-age=3600
*/
cacheControl?: pulumi.Input<string>;
/**
* Data as `string` to be uploaded. Must be defined if `source` is not. **Note**: The `content` field is marked as sensitive.
*/
content?: pulumi.Input<string>;
/**
* [Content-Disposition](https://tools.ietf.org/html/rfc6266) of the object data.
*/
contentDisposition?: pulumi.Input<string>;
/**
* [Content-Encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2.2) of the object data.
*/
contentEncoding?: pulumi.Input<string>;
/**
* [Content-Language](https://tools.ietf.org/html/rfc7231#section-3.1.3.2) of the object data.
*/
contentLanguage?: pulumi.Input<string>;
/**
* [Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".
*/
contentType?: pulumi.Input<string>;
/**
* (Computed) Base 64 CRC32 hash of the uploaded data.
*/
crc32c?: pulumi.Input<string>;
/**
* Enables object encryption with Customer-Supplied Encryption Key (CSEK). Google [documentation about CSEK.](https://cloud.google.com/storage/docs/encryption/customer-supplied-keys)
* Structure is documented below.
*/
customerEncryption?: pulumi.Input<inputs.storage.BucketObjectCustomerEncryption>;
detectMd5hash?: pulumi.Input<string>;
/**
* Whether an object is under event-based hold. Event-based hold is a way to retain objects until an event occurs, which is
* signified by the hold's release (i.e. this value is set to false). After being released (set to false), such objects
* will be subject to bucket-level retention (if any).
*/
eventBasedHold?: pulumi.Input<boolean>;
/**
* The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object.
*/
kmsKeyName?: pulumi.Input<string>;
/**
* (Computed) Base 64 MD5 hash of the uploaded data.
*/
md5hash?: pulumi.Input<string>;
/**
* (Computed) A url reference to download this object.
*/
mediaLink?: pulumi.Input<string>;
/**
* User-provided metadata, in key/value pairs.
*/
metadata?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* The name of the object. If you're interpolating the name of this object, see `outputName` instead.
*/
name?: pulumi.Input<string>;
/**
* (Computed) The name of the object. Use this field in interpolations with `gcp.storage.ObjectACL` to recreate
* `gcp.storage.ObjectACL` resources when your `gcp.storage.BucketObject` is recreated.
*/
outputName?: pulumi.Input<string>;
/**
* (Computed) A url reference to this object.
*/
selfLink?: pulumi.Input<string>;
/**
* A path to the data you want to upload. Must be defined
* if `content` is not.
*/
source?: pulumi.Input<pulumi.asset.Asset | pulumi.asset.Archive>;
/**
* The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object.
* Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default
* storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class.
*/
storageClass?: pulumi.Input<string>;
/**
* Whether an object is under temporary hold. While this flag is set to true, the object is protected against deletion and
* overwrites.
*/
temporaryHold?: pulumi.Input<boolean>;
}
/**
* The set of arguments for constructing a BucketObject resource.
*/
export interface BucketObjectArgs {
/**
* The name of the containing bucket.
*/
bucket: pulumi.Input<string>;
/**
* [Cache-Control](https://tools.ietf.org/html/rfc7234#section-5.2)
* directive to specify caching behavior of object data. If omitted and object is accessible to all anonymous users, the default will be public, max-age=3600
*/
cacheControl?: pulumi.Input<string>;
/**
* Data as `string` to be uploaded. Must be defined if `source` is not. **Note**: The `content` field is marked as sensitive.
*/
content?: pulumi.Input<string>;
/**
* [Content-Disposition](https://tools.ietf.org/html/rfc6266) of the object data.
*/
contentDisposition?: pulumi.Input<string>;
/**
* [Content-Encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2.2) of the object data.
*/
contentEncoding?: pulumi.Input<string>;
/**
* [Content-Language](https://tools.ietf.org/html/rfc7231#section-3.1.3.2) of the object data.
*/
contentLanguage?: pulumi.Input<string>;
/**
* [Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".
*/
contentType?: pulumi.Input<string>;
/**
* Enables object encryption with Customer-Supplied Encryption Key (CSEK). Google [documentation about CSEK.](https://cloud.google.com/storage/docs/encryption/customer-supplied-keys)
* Structure is documented below.
*/
customerEncryption?: pulumi.Input<inputs.storage.BucketObjectCustomerEncryption>;
detectMd5hash?: pulumi.Input<string>;
/**
* Whether an object is under event-based hold. Event-based hold is a way to retain objects until an event occurs, which is
* signified by the hold's release (i.e. this value is set to false). After being released (set to false), such objects
* will be subject to bucket-level retention (if any).
*/
eventBasedHold?: pulumi.Input<boolean>;
/**
* The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object.
*/
kmsKeyName?: pulumi.Input<string>;
/**
* User-provided metadata, in key/value pairs.
*/
metadata?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* The name of the object. If you're interpolating the name of this object, see `outputName` instead.
*/
name?: pulumi.Input<string>;
/**
* A path to the data you want to upload. Must be defined
* if `content` is not.
*/
source?: pulumi.Input<pulumi.asset.Asset | pulumi.asset.Archive>;
/**
* The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object.
* Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default
* storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class.
*/
storageClass?: pulumi.Input<string>;
/**
* Whether an object is under temporary hold. While this flag is set to true, the object is protected against deletion and
* overwrites.
*/
temporaryHold?: pulumi.Input<boolean>;
} | the_stack |
import { assertEq } from "./test"
//
// simple s-expressions parser made for arch rewrite rules DSL,
// but is generic and can be used independently.
// Do remember than changing this requires testing of the generator
// program in the arch directory.
//
const char = (s :string) => s.charCodeAt(0)
const chr = (code :int) => String.fromCharCode(code)
const lparen = char("(")
, rparen = char(")")
, lbrack = char("[")
, rbrack = char("]")
, lbrace = char("{")
, rbrace = char("}")
, lt = char("<")
, gt = char(">")
, sp = char(" ")
, tab = char("\t")
, cr = char("\r")
, lf = char("\n")
, slash = char("/")
, bslash = char("\\")
, pipe = char("|")
, squote = char("'")
, dquote = char('"')
, semic = char(';')
, ch_r = char("r")
, ch_n = char("n")
, ch_t = char("t")
export type Value = Sym|Pre|List|Union
export type ListType = "" | "(" | "<" | "[" | "{"
export type PreType = "'" | '"' | "<" | "[" | "{"
interface IValue {
line :int // 1-based
col :int // 1-based
isList() : this is List
isUnion() : this is Union
isSym() : this is Sym
isPre() : this is Pre // note: Pre is a type of Sym
toString(ln? :string) :string
}
export class List extends Array<Value> implements IValue {
line :int
col :int
type :ListType
_keycache? :Map<string,List>
constructor(line :int, col :int, type :ListType) {
super()
this.line = line
this.col = col
this.type = type
}
static create(line :int, col :int, ...v :Value[]) :List {
let l = new List(line, col, "(")
l.splice(0, 0, ...v)
return l
}
isList() : this is List { return true }
isUnion() : this is Union { return false }
isSym() : this is Sym { return false }
isPre() : this is Pre { return false }
toString(ln :string = "") :string {
let ln2 = ln.charCodeAt(0) == 0xA ? ln + " " : ln
let s = "("
for (let i = 0; i < this.length; i++) {
let v = this[i]
let s2 = v.toString(ln2)
if (v instanceof List) {
s += (ln2 || i == 0 ? ln2 : " ") + s2
} else {
s += (i == 0 || s2.charCodeAt(0) == 0xA) ? s2 : " " + s2
}
}
return s + ")"
}
asMap() :Map<string,List> {
if (!this._keycache) {
this._keycache = new Map<string,List>()
for (let v of this) {
if (v instanceof List && v.length > 1) {
let s = v[0]
if (s instanceof Sym) {
this._keycache.set(s.value, v)
}
}
}
}
return this._keycache
}
get(key :string) :List|null {
return this.asMap().get(key) || null
}
}
export class Union extends Array<Sym> implements IValue {
line :int
col :int
prefix :string
constructor(line :int, col :int, prefix :string) {
super()
this.line = line
this.col = col
this.prefix = prefix
}
isList() : this is List { return false }
isUnion() : this is Union { return true }
isSym() : this is Sym { return false }
isPre() : this is Pre { return false }
toString() {
return `${this.prefix}(${this.map(s => s.value.substr(this.prefix.length)).join("|")})`
}
}
export class Sym implements IValue {
line :int
col :int
value :string
constructor(line :int, col :int, value :string) {
this.line = line
this.col = col
this.value = value
}
isList() : this is List { return false }
isUnion() : this is Union { return false }
isSym() : this is Sym { return true }
isPre() : this is Pre { return false }
toString() { return this.value }
asMaybeNum() :int|null {
let n = Number(this.value)
return isNaN(n) ? null : n
}
asNum() :int {
let n = this.asMaybeNum()
if (n === null) {
panic(`${this.value} is not a number (${this.line}:${this.col})`)
return 0
}
return n
}
asBool() :bool {
if (this.value === "true") {
return true
}
if (this.value !== "false") {
panic(`${this.value} is not a boolean (${this.line}:${this.col})`)
}
return false
}
}
export const nil = new Sym(0,0,"nil")
export class Pre extends Sym {
type :PreType
constructor(line :int, col :int, value :string, type :PreType) {
super(line, col, value)
this.type = type
}
toString() {
return (
this.type == "'" ? `'${this.value}'` :
this.type == '"' ? `"${this.value}"` :
this.type == "<" ? `<${this.value}>` :
this.type == "[" ? `[${this.value}]` :
this.type == "{" ? `{${this.value}}` :
this.value
)
}
isPre() : this is Pre { return true }
}
// -------------------------------------------------------------
// parse
// InterpretAs is used for options to the parser, indicating how to interpret certain syntax.
export enum InterpretAs {
Default = 0,
Sym,
Pre,
}
export const DEFAULT = InterpretAs.Default
, AS_SYM = InterpretAs.Sym
, AS_PRE = InterpretAs.Pre
export class SyntaxError extends Error {
line :int
col :int
file :string
}
export interface ParseOptions {
filename? :string
lineOffset? :int // offset source location lines by N
brack? :InterpretAs
brace? :InterpretAs
ltgt? :InterpretAs
}
export function parse(src :string, options? :ParseOptions) :List
export function parse(src :string, filename? :string) :List
export function parse(src :string, arg1? :string|ParseOptions) :List {
let options :ParseOptions = (
typeof arg1 == "string" ? {filename:arg1} :
arg1 ? arg1 :
{}
)
let i = 0
, c = 0
, nextc = 0
, line = (options.lineOffset || 0) + 1
, lineStart = 0
, symstart = -1
function readRestOfLine() {
while (i < src.length) {
c = src.charCodeAt(i++)
nextc = src.charCodeAt(i) || 0
if (c == lf) {
return
}
if (c == cr) {
if (nextc == lf) {
// CRLF
i++
}
return
}
}
}
const newLine = () => {
lineStart = i
line++
}
const syntaxErr = (msg :string) => {
let col = i - lineStart
let file = options.filename || "<input>"
let e = new SyntaxError(`${file}:${line}:${col}: ${msg}`)
e.name = "SyntaxError"
e.file = file
e.line = line
e.col = col
throw e
}
const startSym = () => {
if (symstart == -1) {
symstart = i - 1
}
}
function flushSym<T extends Union|List>(dst :T, prefix? :string) {
if (symstart != -1) {
let s = src.substring(symstart, i - 1)
if (prefix) {
s = prefix + s
}
dst.push(new Sym(line, symstart - lineStart + 1, s))
symstart = -1
return true
}
return false
}
const parseUnion = () => {
let expectingPipe = false
let prefix = src.substring(symstart, i - 1)
symstart = -1
let u = new Union(line, i - lineStart, prefix)
while_loop: while (i < src.length) {
c = src.charCodeAt(i++)
switch (c) {
case sp:
case tab:
if (flushSym(u, prefix)) {
expectingPipe = true
}
break
case pipe:
if (!expectingPipe) {
syntaxErr(`unexpected "|"`)
}
flushSym(u, prefix)
expectingPipe = false
break
case rparen:
if (!flushSym(u, prefix) && !expectingPipe) {
// case: foo(bar|) => foobar, foo
u.push(new Sym(line, i - lineStart, prefix))
}
break while_loop
case cr:
case lf:
case lbrack:
case lbrace:
case rbrack:
case rbrace:
syntaxErr(`unexpected ${repr(chr(c))} in union`)
break
default:
if (symstart == -1) {
if (expectingPipe) {
syntaxErr(`expected "|" or ")" but found ${repr(chr(c))}`)
}
expectingPipe = true
}
startSym()
}
}
return u
}
const parsePre = (startc :int, endc :int, type :PreType) :Pre => {
let buf :int[] = []
let value :string|null = null
let startindex = i
let startline = line
while_loop: while (i < src.length) {
c = src.charCodeAt(i++)
switch (c) {
case bslash:
switch (c = src.charCodeAt(i++)) {
case ch_r: buf.push(cr); break
case ch_n: buf.push(lf); break
case ch_t: buf.push(tab); break
case bslash: buf.push(bslash); break
case startc:
case endc:
buf.push(c)
break
default:
// instead or causing error on unknown escape, interpret vanilla.
buf.push(bslash)
buf.push(c)
// syntaxErr(`invalid string escape seq "\\${chr(c)}"`)
break
}
break
case endc:
value = String.fromCharCode.apply(String, buf)
break while_loop
default:
buf.push(c)
}
}
if (value === null) {
syntaxErr(`unterminated string`)
}
return new Pre(startline, startindex - lineStart, value as string, type)
}
type PreOrListParser = (startc :int, endc :int, type :ListType&PreType)=>Value
const parseBrack :PreOrListParser = (
options.brack == AS_PRE ? parsePre : (_ :int, endc :int, type :ListType&PreType) :Value => {
return parseList(endc, type)
}
)
const parseBrace :PreOrListParser = (
options.brace == AS_PRE ? parsePre : (_ :int, endc :int, type :ListType&PreType) :Value => {
return parseList(endc, type)
}
)
const parseLtgt :PreOrListParser = (
options.ltgt == AS_PRE ? parsePre : (_ :int, endc :int, type :ListType&PreType) :Value => {
return parseList(endc, type)
}
)
function parseList(endchar :int, type :ListType) :List {
let list = new List(line, i - lineStart, type)
while_loop: while (i < src.length) {
c = src.charCodeAt(i++)
nextc = src.charCodeAt(i) || 0
switch (c) {
case lparen:
if (symstart != -1) {
// expansion e.g. foo(bar|baz) => foobar foobaz
list.push(parseUnion())
} else {
list.push(parseList(rparen, "("))
}
break
case endchar:
flushSym(list)
break while_loop
case rparen:
case rbrack:
case rbrace:
syntaxErr(`unbalanced ${repr(chr(c))}`)
break
case semic:
flushSym(list)
// let commentStart = ++i
readRestOfLine()
// print(`comment: ${repr(src.substring(commentStart, i-1))}`)
newLine()
break
case slash:
if (nextc == slash) {
flushSym(list)
// let commentStart = ++i
readRestOfLine()
// print(`comment: ${repr(src.substring(commentStart, i-1))}`)
newLine()
} else {
startSym()
}
break
case cr:
flushSym(list)
if (nextc == lf) {
// consume LF after CR
i++
}
newLine()
break
case sp:
case tab:
case lf:
flushSym(list)
if (c == lf) {
newLine()
}
break
case squote: // '...'
flushSym(list)
list.push(parsePre(c, squote, "'"))
break
case dquote: // "..."
flushSym(list)
list.push(parsePre(c, dquote, '"'))
break
case lbrack: // [...]
if (options.brack == AS_SYM) {
startSym()
} else {
flushSym(list)
list.push(parseBrack(c, rbrack, "["))
}
break
case lbrace: // {...}
if (options.brace == AS_SYM) {
startSym()
} else {
flushSym(list)
list.push(parseBrace(c, rbrace, "{"))
}
break
case lt: // <...>
if (options.ltgt == AS_SYM) {
startSym()
} else {
flushSym(list)
list.push(parseLtgt(c, gt, "<"))
}
break
default:
// any other character is part of a token
startSym()
}
}
return list
}
return parseList(0, "")
}
// ----------------------------------------------------------
let ind = ""
// const diffprint = print
// const indincr = () => ind += " "
// const inddecr = () => ind = ind.substr(0, ind.length-2)
const diffprint = (...v:any[])=>{}
const indincr = () => {}
const inddecr = () => {}
// diff computes the set difference of right compared to left.
//
// Difference is not uniform, but:
// - left subset of or equal to right
// - right superset of or equal to left
//
// i.e. left ⊆ right ⊇ left
//
export function diff(L :Value, R :Value|null) :List|null {
// print(`${ind}diff ${L} ⊆ ${R}`)
// try {indincr()
let c = L.constructor
return c == List ? diffList(L as List, R)[1] :
c == Sym ? diffSym(L as Sym, R) :
c == Pre ? diffPre(L as Pre, R) :
c == Union ? diffUnion(L as Union, R) :
null
// }finally{inddecr()}
}
function diffList(left :List, right :Value|null) :[bool,List|null] {
// This is a total mess. Sorry.
if (!right) {
return [false, List.create(left.line, left.col, left, nil)]
}
if (
!(right instanceof List) ||
right.type != left.type ||
left.length == 0 ||
right.length == 0 ||
(!(left[0] instanceof List) && diff(left[0], right[0])) // (a x) (b x)
) {
return [false, List.create(left.line, left.col, left, right)]
}
diffprint(`${ind}diffList ${left} ⊆ ${right}`)
let partialMatchIndex = -1
let bestMatch :Value|null = null
function find(L :Value, minIndex :int) :int {
let index = 0
partialMatchIndex = -1
bestMatch = null
for (let R of right as List) {
if (R.constructor === L.constructor && index > minIndex) {
let d :List|null
if (L instanceof List) {
let [partial, d2] = diffList(L, R)
d = d2
if (partial) {
partialMatchIndex = index
}
} else {
d = diff(L, R)
}
if (!d) {
return index
}
if (R.constructor === L.constructor) {
bestMatch = R
}
diffprint(`${ind}res ${L} ⊆ ${R} ⇒ ${d}`)
}
index++
}
return -1
}
let d :List|null = null
let lastIndex = -1
let nmatches = 0
let isIdList = !(left[0] instanceof List)
for (let li = isIdList ? 1 : 0; li < left.length; li++) {
let L = left[li]
diffprint(`${ind}find ${L}`)
indincr()
let i = find(L, lastIndex)
inddecr()
if (i == -1) {
// if (partialMatchIndex != -1) {
// diffprint(`${ind}PARTIAL match ${right[partialMatchIndex]}`)
// }
if (!d) {
d = new List(left.line, left.col, "(")
if (isIdList) {
d.push(left[0])
}
}
let R = right[partialMatchIndex == -1 ? li : partialMatchIndex]
if (!R || R instanceof Sym) {
if (bestMatch && !((bestMatch as any) instanceof Sym)) {
let R2 = bestMatch as any as Value
if (isIdList && R2 instanceof List && diff(left[0], R2[0])) {
// don't use best-match lists unless they match on arg0
diffprint(`${ind}R2 erasure ${R2} <${R2.constructor.name}>`)
R = nil
} else {
R = bestMatch
}
} else {
// if (bestMatch) {
// diffprint(`${ind}ignoring bestMatch ${bestMatch}`)
// }
R = nil
}
}
diffprint(`${ind}not found; select substitute ${R}`)
d.push(List.create(L.line, L.col, L, R))
} else {
diffprint(`${ind}found at ${i}`)
lastIndex = i
nmatches++
}
}
let partial = !!d && nmatches > 0
if (!partial && !(left[0] instanceof List)) {
// diffprint(`${ind}•• not partial but is id list`)
partial = true
}
return [partial, d]
}
function diffSym(L :Sym, R :Value|null) :List|null {
return !R ? List.create(L.line, L.col, L, nil) :
(R instanceof Sym && R.value === L.value) ? null :
List.create(L.line, L.col, L, R)
}
function diffPre(L :Pre, R :Value|null) :List|null {
let d = diffSym(L, R)
return d === null && R instanceof Pre && L.type !== R.type ?
List.create(L.line, L.col, L, R) : d
}
function diffUnion(L :Union, R :Value|null) :List|null {
if (!R) { return List.create(L.line, L.col, L, nil) }
if (!(R instanceof Union) || L.prefix !== R.prefix || L.length !== R.length) {
return List.create(L.line, L.col, L, R)
}
// unions must match exactly (unlike lists)
for (let i = 0; i < L.length; i++) {
let D = diffSym(L[i], R[i])
if (D) {
return List.create(L.line, L.col, L, R)
}
}
return null
}
// ----------------------------------------------------------
// test
function assertS(v :any, value :string) {
assert(v.constructor === Sym)
assert(v.value == value, `${v.value} == ${value}`)
}
function assertP(v :any, value :string, type :PreType) {
assert(v.constructor === Pre)
assert(v.value == value, `${v.value} == ${value}`)
assert(v.type == type, `${v.type} == ${type}`)
}
function assertL(v :any, type :ListType, length :int) {
assert(v.constructor === List)
assert(v.type == type, `${v.type} == ${type}`)
assert(v.length == length, `${v.length} == ${length}`)
}
TEST("parse", () => {
let xs = parse(`
(a
(b c
[d e])
<f g> // comment
"h i"
'j k')
`) as any
assertL(xs, "", 1) // parse returns an anonymous list of lists
assertL(xs[0], "(", 5)
assertS(xs[0][0], "a")
assertL(xs[0][1], "(", 3)
assertS(xs[0][1][0], "b")
assertS(xs[0][1][1], "c")
assertL(xs[0][1][2], "[", 2)
assertL(xs[0][2], "<", 2)
assertS(xs[0][2][0], "f")
assertS(xs[0][2][1], "g")
assertP(xs[0][3], "h i", '"')
assertP(xs[0][4], "j k", "'")
})
TEST("toString", () => {
let xs = parse(`(a (b c [d e]) <f g> "h i" 'j k')`)[0]
assertEq(xs.toString(), `(a (b c (d e)) (f g) "h i" 'j k')`)
assertEq(xs.toString("\n"), `
(a
(b c
(d e))
(f g) "h i" 'j k')
`.trim().replace(/\n /g, "\n"))
// note that toString only really supports "\n" and " " as the provided
// string is inserted at list starting points, as illustrated here:
assertEq(xs.toString("•"), `(a•(b c•(d e))•(f g) "h i" 'j k')`)
})
TEST('diff', () => {
assertEq(diff(parse("(a c)")[0],
parse("(a b c d)")[0])+"",
"null") // no difference
assertEq(diff(parse("(a (x y))")[0],
parse("(a b (x y)")[0])+"",
"null")
assertEq(diff(parse("(a c x)")[0],
parse("(a b c d e)")[0])+"",
"(a (x nil))") // x not found
assertEq(diff(parse("(a c x y)")[0],
parse("(a b c d e)")[0])+"",
"(a (x nil) (y nil))") // x and y not found
assertEq(diff(parse("(a b (c (d x)))")[0],
parse("(a b (c k (d z)))")[0])+"",
"(a ((c (d x)) (c k (d z))))") // list "c" is different
assertEq(diff(parse("(a (c (d x)) (g h))")[0],
parse("(a (b 1) (c k (d z)) p (g h))")[0])+"",
"(a ((c (d x)) (c k (d z))))") // list "c" is different
assertEq(diff(parse("(a (x y))")[0],
parse("(a b (y x)")[0])+"",
"(a ((x y) nil))") // y out-of order
assertEq(diff(parse("(a u(x|y))")[0],
parse("(a b u(x|y)")[0])+"",
"null") // unions
assertEq(diff(parse("(a u(x|y))")[0],
parse("(a b u(x|y|z)")[0])+"",
"(a (u(x|y) u(x|y|z)))") // unions must match exactly
assertEq(diff(parse("(a k(x|y))")[0],
parse("(a b z(x|y)")[0])+"",
"(a (k(x|y) z(x|y)))") // unions with different prefix
assertEq(diff(parse(`
(a
(b b1)
a1
(b b2
(c c1
(d d1)))
a2)
`)[0], parse(`
(a
(b b1)
a1
(b b2
(c c1
(d d1)))
a2)
`)[0])+"",
"null")
diffprint("----------------------------")
assertEq(diff(parse(`
(a
(b
(c d)))
`)[0], parse(`
(a
x
(b
y
(c k)))
`)[0])+"",
"(a ((b (c d)) (b y (c k))))")
assertEq(diff(parse(`
(a
(b b1 matches)
a1
(b
(c c1
(d d1)))
a2)
`)[0], parse(`
(a
(b b1 matches)
a1
ignored
(b
included
(c c1
(d dx)))
a2)
`)[0])+"",
"(a ((b (c c1 (d d1))) (b included (c c1 (d dx)))))")
// // source location
// { let d = diff(parse(
// ` // line 1
// (a // line 2
// (b b1) // line 3
// a2) // line 4
// `)[0], parse(
// ` // line 1
// (a // line 2
// x // line 3
// y // line 4
// (b bx) // line 5
// a2) // line 6
// `)[0]) as any
// assertEq(d+"", "(((b1 bx)))")
// assertEq(d[0][0][0].line, 3) // b1
// assertEq(d[0][0][1].line, 5) // bx
// assertEq(d[0].line, 3) // synthetic lists inherit location of left side
// }
}) | the_stack |
import {Request} from '../lib/request';
import {Response} from '../lib/response';
import {AWSError} from '../lib/error';
import {Service} from '../lib/service';
import {ServiceConfigurationOptions} from '../lib/service';
import {ConfigBase as Config} from '../lib/config-base';
interface Blob {}
declare class Rbin extends Service {
/**
* Constructs a service object. This object has one method for each API operation.
*/
constructor(options?: Rbin.Types.ClientConfiguration)
config: Config & Rbin.Types.ClientConfiguration;
/**
* Creates a Recycle Bin retention rule. For more information, see Create Recycle Bin retention rules in the Amazon EC2 User Guide.
*/
createRule(params: Rbin.Types.CreateRuleRequest, callback?: (err: AWSError, data: Rbin.Types.CreateRuleResponse) => void): Request<Rbin.Types.CreateRuleResponse, AWSError>;
/**
* Creates a Recycle Bin retention rule. For more information, see Create Recycle Bin retention rules in the Amazon EC2 User Guide.
*/
createRule(callback?: (err: AWSError, data: Rbin.Types.CreateRuleResponse) => void): Request<Rbin.Types.CreateRuleResponse, AWSError>;
/**
* Deletes a Recycle Bin retention rule. For more information, see Delete Recycle Bin retention rules in the Amazon EC2 User Guide.
*/
deleteRule(params: Rbin.Types.DeleteRuleRequest, callback?: (err: AWSError, data: Rbin.Types.DeleteRuleResponse) => void): Request<Rbin.Types.DeleteRuleResponse, AWSError>;
/**
* Deletes a Recycle Bin retention rule. For more information, see Delete Recycle Bin retention rules in the Amazon EC2 User Guide.
*/
deleteRule(callback?: (err: AWSError, data: Rbin.Types.DeleteRuleResponse) => void): Request<Rbin.Types.DeleteRuleResponse, AWSError>;
/**
* Gets information about a Recycle Bin retention rule.
*/
getRule(params: Rbin.Types.GetRuleRequest, callback?: (err: AWSError, data: Rbin.Types.GetRuleResponse) => void): Request<Rbin.Types.GetRuleResponse, AWSError>;
/**
* Gets information about a Recycle Bin retention rule.
*/
getRule(callback?: (err: AWSError, data: Rbin.Types.GetRuleResponse) => void): Request<Rbin.Types.GetRuleResponse, AWSError>;
/**
* Lists the Recycle Bin retention rules in the Region.
*/
listRules(params: Rbin.Types.ListRulesRequest, callback?: (err: AWSError, data: Rbin.Types.ListRulesResponse) => void): Request<Rbin.Types.ListRulesResponse, AWSError>;
/**
* Lists the Recycle Bin retention rules in the Region.
*/
listRules(callback?: (err: AWSError, data: Rbin.Types.ListRulesResponse) => void): Request<Rbin.Types.ListRulesResponse, AWSError>;
/**
* Lists the tags assigned a specific resource.
*/
listTagsForResource(params: Rbin.Types.ListTagsForResourceRequest, callback?: (err: AWSError, data: Rbin.Types.ListTagsForResourceResponse) => void): Request<Rbin.Types.ListTagsForResourceResponse, AWSError>;
/**
* Lists the tags assigned a specific resource.
*/
listTagsForResource(callback?: (err: AWSError, data: Rbin.Types.ListTagsForResourceResponse) => void): Request<Rbin.Types.ListTagsForResourceResponse, AWSError>;
/**
* Assigns tags to the specified resource.
*/
tagResource(params: Rbin.Types.TagResourceRequest, callback?: (err: AWSError, data: Rbin.Types.TagResourceResponse) => void): Request<Rbin.Types.TagResourceResponse, AWSError>;
/**
* Assigns tags to the specified resource.
*/
tagResource(callback?: (err: AWSError, data: Rbin.Types.TagResourceResponse) => void): Request<Rbin.Types.TagResourceResponse, AWSError>;
/**
* Unassigns a tag from a resource.
*/
untagResource(params: Rbin.Types.UntagResourceRequest, callback?: (err: AWSError, data: Rbin.Types.UntagResourceResponse) => void): Request<Rbin.Types.UntagResourceResponse, AWSError>;
/**
* Unassigns a tag from a resource.
*/
untagResource(callback?: (err: AWSError, data: Rbin.Types.UntagResourceResponse) => void): Request<Rbin.Types.UntagResourceResponse, AWSError>;
/**
* Updates an existing Recycle Bin retention rule. For more information, see Update Recycle Bin retention rules in the Amazon EC2 User Guide.
*/
updateRule(params: Rbin.Types.UpdateRuleRequest, callback?: (err: AWSError, data: Rbin.Types.UpdateRuleResponse) => void): Request<Rbin.Types.UpdateRuleResponse, AWSError>;
/**
* Updates an existing Recycle Bin retention rule. For more information, see Update Recycle Bin retention rules in the Amazon EC2 User Guide.
*/
updateRule(callback?: (err: AWSError, data: Rbin.Types.UpdateRuleResponse) => void): Request<Rbin.Types.UpdateRuleResponse, AWSError>;
}
declare namespace Rbin {
export interface CreateRuleRequest {
/**
* Information about the retention period for which the retention rule is to retain resources.
*/
RetentionPeriod: RetentionPeriod;
/**
* A brief description for the retention rule.
*/
Description?: Description;
/**
* Information about the tags to assign to the retention rule.
*/
Tags?: TagList;
/**
* The resource type to be retained by the retention rule. Currently, only Amazon EBS snapshots are supported.
*/
ResourceType: ResourceType;
/**
* Information about the resource tags to use to identify resources that are to be retained by the retention rule. The retention rule retains only deleted snapshots that have one or more of the specified tag key and value pairs. If a snapshot is deleted, but it does not have any of the specified tag key and value pairs, it is immediately deleted without being retained by the retention rule. You can add the same tag key and value pair to a maximum or five retention rules.
*/
ResourceTags?: ResourceTags;
}
export interface CreateRuleResponse {
/**
* The unique identifier of the retention rule.
*/
Identifier?: RuleIdentifier;
RetentionPeriod?: RetentionPeriod;
/**
* The retention rule description.
*/
Description?: Description;
/**
* The tags assigned to the retention rule.
*/
Tags?: TagList;
/**
* The resource type retained by the retention rule.
*/
ResourceType?: ResourceType;
/**
* Information about the resource tags used to identify resources that are retained by the retention rule.
*/
ResourceTags?: ResourceTags;
/**
* The state of the retention rule. Only retention rules that are in the available state retain snapshots.
*/
Status?: RuleStatus;
}
export interface DeleteRuleRequest {
/**
* The unique ID of the retention rule to delete.
*/
Identifier: RuleIdentifier;
}
export interface DeleteRuleResponse {
}
export type Description = string;
export interface GetRuleRequest {
/**
* The unique ID of the retention rule.
*/
Identifier: RuleIdentifier;
}
export interface GetRuleResponse {
/**
* The unique ID of the retention rule.
*/
Identifier?: RuleIdentifier;
/**
* The description assigned to the retention rule.
*/
Description?: Description;
/**
* The resource type retained by the retention rule. Currently, only Amazon EBS snapshots are supported.
*/
ResourceType?: ResourceType;
/**
* Information about the period for which the retention rule retains resources.
*/
RetentionPeriod?: RetentionPeriod;
/**
* The resource tags used to identify resources that are to be retained by the retention rule.
*/
ResourceTags?: ResourceTags;
/**
* The state of the retention rule. Only retention rules that are in the available state retain snapshots.
*/
Status?: RuleStatus;
}
export interface ListRulesRequest {
/**
* The maximum number of results to return for the request in a single page. The remaining results can be seen by sending another request with the returned nextToken value. This value can be between 5 and 500. If maxResults is given a larger value than 500, you receive an error.
*/
MaxResults?: MaxResults;
/**
* The token to use to retrieve the next page of results.
*/
NextToken?: NextToken;
/**
* The resource type retained by the retention rule. Only retention rules that retain the specified resource type are listed.
*/
ResourceType: ResourceType;
/**
* The tags used to identify resources that are to be retained by the retention rule.
*/
ResourceTags?: ResourceTags;
}
export interface ListRulesResponse {
/**
* Information about the retention rules.
*/
Rules?: RuleSummaryList;
/**
* The token to use to retrieve the next page of results. This value is null when there are no more results to return.
*/
NextToken?: NextToken;
}
export interface ListTagsForResourceRequest {
/**
* The Amazon Resource Name (ARN) of the resource for which to list the tags.
*/
ResourceArn: RuleArn;
}
export interface ListTagsForResourceResponse {
/**
* Information about the tags assigned to the resource.
*/
Tags?: TagList;
}
export type MaxResults = number;
export type NextToken = string;
export interface ResourceTag {
/**
* The tag key.
*/
ResourceTagKey: ResourceTagKey;
/**
* The tag value.
*/
ResourceTagValue?: ResourceTagValue;
}
export type ResourceTagKey = string;
export type ResourceTagValue = string;
export type ResourceTags = ResourceTag[];
export type ResourceType = "EBS_SNAPSHOT"|string;
export interface RetentionPeriod {
/**
* The period value for which the retention rule is to retain resources. The period is measured using the unit specified for RetentionPeriodUnit.
*/
RetentionPeriodValue: RetentionPeriodValue;
/**
* The unit of time in which the retention period is measured. Currently, only DAYS is supported.
*/
RetentionPeriodUnit: RetentionPeriodUnit;
}
export type RetentionPeriodUnit = "DAYS"|string;
export type RetentionPeriodValue = number;
export type RuleArn = string;
export type RuleIdentifier = string;
export type RuleStatus = "pending"|"available"|string;
export interface RuleSummary {
/**
* The unique ID of the retention rule.
*/
Identifier?: RuleIdentifier;
/**
* The description for the retention rule.
*/
Description?: Description;
/**
* Information about the retention period for which the retention rule retains resources
*/
RetentionPeriod?: RetentionPeriod;
}
export type RuleSummaryList = RuleSummary[];
export interface Tag {
/**
* The tag key.
*/
Key: TagKey;
/**
* The tag value.
*/
Value: TagValue;
}
export type TagKey = string;
export type TagKeyList = TagKey[];
export type TagList = Tag[];
export interface TagResourceRequest {
/**
* The Amazon Resource Name (ARN) of the resource to which to assign the tags.
*/
ResourceArn: RuleArn;
/**
* Information about the tags to assign to the resource.
*/
Tags: TagList;
}
export interface TagResourceResponse {
}
export type TagValue = string;
export interface UntagResourceRequest {
/**
* The Amazon Resource Name (ARN) of the resource from which to unassign the tags.
*/
ResourceArn: RuleArn;
/**
* Information about the tags to unassign from the resource.
*/
TagKeys: TagKeyList;
}
export interface UntagResourceResponse {
}
export interface UpdateRuleRequest {
/**
* The unique ID of the retention rule to update.
*/
Identifier: RuleIdentifier;
/**
* Information about the retention period for which the retention rule is to retain resources.
*/
RetentionPeriod?: RetentionPeriod;
/**
* The retention rule description.
*/
Description?: Description;
/**
* The resource type to be retained by the retention rule. Currently, only Amazon EBS snapshots are supported.
*/
ResourceType?: ResourceType;
/**
* Information about the resource tags to use to identify resources that are to be retained by the retention rule. The retention rule retains only deleted snapshots that have one or more of the specified tag key and value pairs. If a snapshot is deleted, but it does not have any of the specified tag key and value pairs, it is immediately deleted without being retained by the retention rule. You can add the same tag key and value pair to a maximum or five retention rules.
*/
ResourceTags?: ResourceTags;
}
export interface UpdateRuleResponse {
/**
* The unique ID of the retention rule.
*/
Identifier?: RuleIdentifier;
RetentionPeriod?: RetentionPeriod;
/**
* The retention rule description.
*/
Description?: Description;
/**
* The resource type retained by the retention rule.
*/
ResourceType?: ResourceType;
/**
* Information about the resource tags used to identify resources that are retained by the retention rule.
*/
ResourceTags?: ResourceTags;
/**
* The state of the retention rule. Only retention rules that are in the available state retain snapshots.
*/
Status?: RuleStatus;
}
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
export type apiVersion = "2021-06-15"|"latest"|string;
export interface ClientApiVersions {
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
apiVersion?: apiVersion;
}
export type ClientConfiguration = ServiceConfigurationOptions & ClientApiVersions;
/**
* Contains interfaces for use with the Rbin client.
*/
export import Types = Rbin;
}
export = Rbin; | the_stack |
import {IBlock} from './Blocks/IBlock';
// SOURCE BLOCKS //
import {Granular} from './Blocks/Sources/Granular';
import {Microphone} from './Blocks/Sources/Microphone';
import {Noise} from './Blocks/Sources/Noise';
import {Recorder} from './Blocks/Sources/Recorder';
import {Sample} from './Blocks/Sources/Sample';
import {ToneSource} from './Blocks/Sources/ToneSource';
import {SampleGen} from './Blocks/Sources/SampleGen';
// EFFECTS BLOCKS //
import {AutoWah} from 'Blocks/Effects/Post/AutoWah';
import {BitCrusher} from 'Blocks/Effects/Post/BitCrusher';
import {Chomp} from 'Blocks/Effects/Post/Chomp';
import {Chopper} from 'Blocks/Effects/Post/Chopper';
import {Chorus} from 'Blocks/Effects/Post/Chorus';
import {Convolver} from 'Blocks/Effects/Post/ConvolutionReverb';
import {Distortion} from 'Blocks/Effects/Post/Distortion';
import {Envelope} from 'Blocks/Effects/Pre/Envelope';
import {EQ} from 'Blocks/Effects/Post/EQ';
import {Filter} from 'Blocks/Effects/Post/Filter';
import {Volume} from 'Blocks/Effects/Post/Volume';
import {Phaser} from 'Blocks/Effects/Post/Phaser';
import {PitchShifter} from 'Blocks/Effects/Post/PitchShifter';
import {Reverb} from 'Blocks/Effects/Post/Reverb';
import {Scuzz} from 'Blocks/Effects/Pre/Scuzz';
import {StereoDelay} from 'Blocks/Effects/Post/StereoDelay';
import {Vibrato} from 'Blocks/Effects/Pre/Vibrato';
// POWER BLOCKS //
import {Laser} from './Blocks/Power/Laser';
import {Pulse} from './Blocks/Power/Logic/Pulse';
import {ParticleEmitter} from './Blocks/Power/ParticleEmitter';
import {Power} from './Blocks/Power/Power';
import {Toggle} from './Blocks/Power/Logic/Toggle';
import {Void} from './Blocks/Power/Void';
// INTERACTION BLOCKS //
import {ComputerKeyboard} from 'Blocks/Interaction/ComputerKeyboard';
import {MIDIController} from 'Blocks/Interaction/MIDIController';
import {IApp} from "./IApp";
declare var App: IApp;
export class BlockCreator {
// SOURCE BLOCKS //
public Granular = Granular;
public Microphone = Microphone;
public Noise = Noise;
public Recorder = Recorder;
public Sample = Sample;
public ToneSource = ToneSource;
public SampleGen = SampleGen;
// EFFECTS BLOCKS //
public AutoWah = AutoWah;
public BitCrusher = BitCrusher;
public Chomp = Chomp;
public Chopper = Chopper;
public Chorus = Chorus;
public Convolver = Convolver;
public Distortion = Distortion;
public Envelope = Envelope;
public EQ = EQ;
public Filter = Filter;
public Phaser = Phaser;
public PitchShifter = PitchShifter;
public Reverb = Reverb;
public Scuzz = Scuzz;
public StereoDelay = StereoDelay;
public Vibrato = Vibrato;
public Volume = Volume;
// POWER BLOCKS //
public Laser = Laser;
public ParticleEmitter = ParticleEmitter;
public Power = Power;
public Pulse = Pulse;
public Toggle = Toggle;
public Void = Void;
// INTERACTION BLOCKS //
public ComputerKeyboard = ComputerKeyboard;
public MIDIController = MIDIController;
constructor() {
}
public MenuJson: any = {
"categories": [
{
"name": App.L10n.Blocks.Source.Label,
"items": [
{
"name": App.L10n.Blocks.Source.Blocks.Tone.name,
"id": ToneSource,
"description": App.L10n.Blocks.Source.Blocks.Tone.description
},
{
"name": App.L10n.Blocks.Source.Blocks.Noise.name,
"id": Noise,
"description": App.L10n.Blocks.Source.Blocks.Noise.description
},
{
"name": App.L10n.Blocks.Source.Blocks.Microphone.name,
"id": Microphone,
"description": App.L10n.Blocks.Source.Blocks.Microphone.description
},
{
"name": App.L10n.Blocks.Source.Blocks.Sample.name,
"id": Sample,
"description": App.L10n.Blocks.Source.Blocks.Sample.description
},
{
"name": App.L10n.Blocks.Source.Blocks.Granular.name,
"id": Granular,
"description": App.L10n.Blocks.Source.Blocks.Granular.description
},
{
"name": App.L10n.Blocks.Source.Blocks.Recorder.name,
"id": Recorder,
"description": App.L10n.Blocks.Source.Blocks.Recorder.description
},
{
"name": App.L10n.Blocks.Source.Blocks.SampleGen.name,
"id": SampleGen,
"description": App.L10n.Blocks.Source.Blocks.SampleGen.description
}
]
},
{
"name": App.L10n.Blocks.Effect.Label,
"items": [
{
"name": App.L10n.Blocks.Effect.Blocks.AutoWah.name,
"id": AutoWah,
"description": App.L10n.Blocks.Effect.Blocks.AutoWah.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.BitCrusher.name,
"id": BitCrusher,
"description": App.L10n.Blocks.Effect.Blocks.BitCrusher.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Chomp.name,
"id": Chomp,
"description": App.L10n.Blocks.Effect.Blocks.Chomp.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Chopper.name,
"id": Chopper,
"description": App.L10n.Blocks.Effect.Blocks.Chopper.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Chorus.name,
"id": Chorus,
"description": App.L10n.Blocks.Effect.Blocks.Chorus.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Convolution.name,
"id": Convolver,
"description": App.L10n.Blocks.Effect.Blocks.Convolution.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Distortion.name,
"id": Distortion,
"description": App.L10n.Blocks.Effect.Blocks.Distortion.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Envelope.name,
"id": Envelope,
"description": App.L10n.Blocks.Effect.Blocks.Envelope.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Eq.name,
"id": EQ,
"description": App.L10n.Blocks.Effect.Blocks.Eq.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Filter.name,
"id": Filter,
"description": App.L10n.Blocks.Effect.Blocks.Filter.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Phaser.name,
"id": Phaser,
"description": App.L10n.Blocks.Effect.Blocks.Phaser.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.PitchShifter.name,
"id": PitchShifter,
"description": App.L10n.Blocks.Effect.Blocks.PitchShifter.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Reverb.name,
"id": Reverb,
"description": App.L10n.Blocks.Effect.Blocks.Reverb.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Scuzz.name,
"id": Scuzz,
"description": App.L10n.Blocks.Effect.Blocks.Scuzz.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.StereoDelay.name,
"id": StereoDelay,
"description": App.L10n.Blocks.Effect.Blocks.StereoDelay.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Vibrato.name,
"id": Vibrato,
"description": App.L10n.Blocks.Effect.Blocks.Vibrato.description
},
{
"name": App.L10n.Blocks.Effect.Blocks.Volume.name,
"id": Volume,
"description": App.L10n.Blocks.Effect.Blocks.Volume.description
}
]
},
{
"name": App.L10n.Blocks.Power.Label,
"items": [
{
"name": App.L10n.Blocks.Power.Blocks.ParticleEmitter.name,
"id": ParticleEmitter,
"description": App.L10n.Blocks.Power.Blocks.ParticleEmitter.description
},
{
"name": App.L10n.Blocks.Power.Blocks.Power.name,
"id": Power,
"description": App.L10n.Blocks.Power.Blocks.Power.description
},
{
"name": App.L10n.Blocks.Power.Blocks.TogglePower.name,
"id": Toggle,
"description": App.L10n.Blocks.Power.Blocks.TogglePower.description
},
{
"name": App.L10n.Blocks.Power.Blocks.Void.name,
"id": Void,
"description": App.L10n.Blocks.Power.Blocks.Void.description
},
{
"name": App.L10n.Blocks.Power.Blocks.PulsePower.name,
"id": Pulse,
"description": App.L10n.Blocks.Power.Blocks.PulsePower.description
},
{
"name": App.L10n.Blocks.Power.Blocks.Laser.name,
"id": Laser,
"description": App.L10n.Blocks.Power.Blocks.Laser.description
}
]
},
{
"name": App.L10n.Blocks.Interaction.Label,
"items": [
{
"name": App.L10n.Blocks.Interaction.Blocks.ComputerKeyboard.name,
"id": ComputerKeyboard,
"description": App.L10n.Blocks.Interaction.Blocks.ComputerKeyboard.description
},
{
"name": App.L10n.Blocks.Interaction.Blocks.MIDIController.name,
"id": MIDIController,
"description": App.L10n.Blocks.Interaction.Blocks.MIDIController.description
}
]
}
]
};
public GetBlock(type: string): IBlock {
type = this.BackwardsCompatibilityCheck(type);
var b = eval("new this." + type + "()");
b.Type = eval('this.' + type);
return b;
}
// BACKWARDS COMPATIBILITY //
// PATCH CHANGED BLOCK NAMES //
public BackwardsCompatibilityCheck(type: string): string {
switch (type) {
case 'Gain':
type = 'Volume';
break;
case "Pitch":
type = 'PitchShifter';
break;
case "LFO":
//TODO: if we want to use LFO in the future we need to check version number also
type = 'Vibrato';
break;
case "WaveGen":
//TODO: if we want to use Wavegen in the future we need to check version number also
type = 'SampleGen';
break;
case "Momentary":
//TODO: if we want to use Momentary in the future we need to check version number also
type = 'Pulse';
break;
case 'Soundcloud':
type = 'Sample';
break;
case 'Delay':
//TODO: if we want to use Delay in the future we need to check version number also
type = 'StereoDelay';
break;
}
return type;
}
} | the_stack |
(function parse_init() {
"use strict";
const parser = function parse_parser():any {
const langstore:[string, string] = [sparser.options.language, sparser.options.lexer];
parse.count = -1;
parse.data = {
begin: [],
ender: [],
lexer: [],
lines: [],
stack: [],
token: [],
types: []
};
parse.datanames = [
"begin",
"ender",
"lexer",
"lines",
"stack",
"token",
"types"
];
parse.linesSpace = 0;
parse.lineNumber = 1;
parse.structure = [
["global", -1]
];
parse.structure.pop = function parse_structure_pop(): [string, number] {
const len:number = parse.structure.length - 1,
arr: [string, number] = parse.structure[len];
if (len > 0) {
parse
.structure
.splice(len, 1);
}
return arr;
};
if (sparser.options.language === "auto" || sparser.options.lexer === "auto") {
let lang:[string, string, string] = sparser.libs.language.auto(sparser.options.source, "javascript");
if (sparser.options.language === "auto") {
sparser.options.language = lang[0];
}
if (sparser.options.lexer === "auto") {
sparser.options.lexer = lang[1];
}
}
if (typeof sparser.lexers[sparser.options.lexer] === "function") {
sparser.parseerror = "";
// reset references data if sparser is used on multiple files
parse.references = [[]];
sparser.options.lexer_options = (sparser.options.lexer_options || {});
Object.keys(sparser.lexers).forEach(function parse_lexers(value) {
sparser.options.lexer_options[value] = (sparser.options.lexer_options[value] || {});
});
// This line parses the code using a lexer file
sparser.lexers[sparser.options.lexer](`${sparser.options.source} `);
// restore language and lexer values
} else {
sparser.parseerror = `Specified lexer, ${sparser.options.lexer}, is not a function.`;
}
// validate that all the data arrays are the same length
(function parser_checkLengths(): void {
let a:number = 0,
b:number = 0;
const keys: string[] = Object.keys(parse.data),
c: number = keys.length;
do {
b = a + 1;
do {
if (parse.data[keys[a]].length !== parse.data[keys[b]].length) {
sparser.parseerror = `"${keys[a]}" array is of different length than "${keys[b]}"`;
break;
}
b = b + 1;
} while (b < c);
a = a + 1;
} while (a < c - 1);
}());
// fix begin values. They must be reconsidered after reordering from object sort
if (parse.data.begin.length > 0 && (sparser.options.lexer_options[sparser.options.lexer].object_sort === true || sparser.options.lexer_options.markup.tag_sort === true)) {
parse.sortCorrection(0, parse.count + 1);
}
if (sparser.options.format === "csv") {
let a:number = 0;
const d:data = parse.data,
data:string[] = ["index,begin,ender,lexer,lines,stack,token,types"],
len:number = parse.count + 1;
do {
data.push([
a,
d.begin[a],
d.ender[a],
`"${d.lexer[a].replace(/"/g, "\"\"")}"`,
d.lines[a],
`"${d.stack[a].replace(/"/g, "\"\"")}"`,
`"${d.token[a].replace(/"/g, "\"\"")}"`,
`"${d.types[a].replace(/"/g, "\"\"")}"`
].join(","));
a = a + 1;
} while (a < len);
return data.join("\r\n");
}
if (sparser.options.format === "markdown") {
let a:number = 0,
b:number = 0,
strlen:number = 0,
row:string[] = [],
heading:string = "",
line:string = "",
str:string = "",
begin:number = 0;
const data:string[] = [],
names:string[] = parse.datanames,
longest:[number, number, number, number, number, number, number, number] = [1, 5, 5, 5, 5, 5, 5, 5],
len = parse.count + 1;
// first gather the string length of each data item
longest[0] = String(parse.count).length;
if (longest[0] < 5) {
longest[0] = 5;
}
do {
begin = String(parse.data.begin[a]).length;
if (begin > longest[1]) {
longest[1] = begin;
}
begin = String(parse.data.ender[a]).length;
if (begin > longest[2]) {
longest[2] = begin;
}
if (parse.data.lexer[a].length > longest[3]) {
longest[3] = parse.data.lexer[a].length;
}
begin = String(parse.data.lines[a]).length;
if (begin > longest[4]) {
longest[4] = begin;
}
if (parse.data.stack[a].length > longest[5]) {
longest[5] = parse.data.stack[a].length;
}
if (parse.data.token[a].length > longest[6]) {
longest[6] = parse.data.token[a].length;
}
if (parse.data.types[a].length > longest[7]) {
longest[7] = parse.data.types[a].length;
}
a = a + 1;
} while (a < len);
names.splice(0, 0, "index");
// second create the heading
a = 0;
do {
row.push(parse.datanames[a]);
if (longest[a] > 5) {
b = 5;
do {
row.push(" ");
b = b + 1;
} while (b < longest[a]);
}
row.push("|");
a = a + 1;
} while (a < 8);
row.pop();
heading = row.join("");
row = [];
// third create the line of dashes
a = 0;
do {
b = 0;
do {
row.push("-");
b = b + 1;
} while (b < longest[a]);
row.push("|");
a = a + 1;
} while (a < 8);
row.pop();
line = row.join("");
row = [];
// fourth create each data record
a = 0;
do {
if (a % 100 === 0) {
data.push(heading);
data.push(line);
}
str = String(a);
row.push(str);
b = str.length;
if (b < longest[0]) {
do {
row.push(" ");
b = b + 1;
} while (b < longest[0]);
}
row.push("|");
b = 1;
do {
str = String(parse.data[parse.datanames[b]][a]);
row.push(str);
strlen = str.length;
if (strlen < longest[b]) {
do {
row.push(" ");
strlen = strlen + 1;
} while (strlen < longest[b]);
}
row.push("|");
b = b + 1;
} while (b < 8);
row.pop();
data.push(row.join(""));
row = [];
a = a + 1;
} while (a < len);
return data.join("\r\n");
}
if (sparser.options.format === "minimal") {
let a:number = 0;
const data:minimal[] = [],
len = parse.count + 1;
do {
data.push([
parse.data.begin[a],
parse.data.ender[a],
parse.data.lexer[a],
parse.data.lines[a],
parse.data.stack[a],
parse.data.token[a],
parse.data.types[a]
]);
a = a + 1;
} while (a < len);
return data;
}
if (sparser.options.format === "objects") {
let a:number = 0;
const data:record[] = [],
len = parse.count + 1;
do {
data.push({
begin: parse.data.begin[a],
ender: parse.data.ender[a],
lexer: parse.data.lexer[a],
lines: parse.data.lines[a],
stack: parse.data.stack[a],
token: parse.data.token[a],
types: parse.data.types[a]
});
a = a + 1;
} while (a < len);
return data;
}
if (sparser.options.format === "testprep") {
let a:number = 0;
const data:string[] = [],
len = parse.count + 1;
if (sparser.parseerror !== "") {
return sparser.parseerror;
}
do {
data.push(JSON.stringify({
begin: parse.data.begin[a],
ender: parse.data.ender[a],
lexer: parse.data.lexer[a],
lines: parse.data.lines[a],
stack: parse.data.stack[a],
token: parse.data.token[a],
types: parse.data.types[a]
}));
a = a + 1;
} while (a < len);
return `[\n${data.join(",\n")}\n]`;
}
sparser.options.language = langstore[0];
sparser.options.lexer = langstore[1];
return parse.data;
},
parse:parse = {
// stores the final index location of the data arrays
count: -1,
// stores the various data arrays of the parse table
data: {
begin: [],
ender: [],
lexer: [],
lines: [],
stack: [],
token: [],
types: []
},
// stores the name of the data arrays. This is used for internal automation
datanames: ["begin", "ender", "lexer", "lines", "stack", "token", "types"],
// stores the current line number from the input string for logging parse errors
lineNumber: 1,
// stores the 'lines' value before the next token
linesSpace: 0,
// stores the declared variable names for the script lexer. This must be stored outside the script lexer since some languages recursive use of the script lexer
references: [[]],
// stores the stack and begin values by stacking depth
structure: [["global", -1]],
// an extension of Array.prototype.concat to work across the data structure. This is an expensive operation.
concat: function parse_concat(data:data, array:data):void {
parse
.datanames
.forEach(function parse_concat_datanames(value) {
data[value] = data[value].concat(array[value]);
});
if (data === parse.data) {
parse.count = data.token.length - 1;
}
},
// the function that sorts object properties
object_sort: function parse_objectSort(data: data):void {
let cc:number = parse.count,
global:boolean = (data.lexer[cc] === "style" && parse.structure[parse.structure.length - 1][0] === "global"),
dd:number = parse.structure[parse.structure.length - 1][1],
ee:number = 0,
ff:number = 0,
gg:number = 0,
behind:number = 0,
commaTest:boolean = true,
front:number = 0,
keyend:number = 0,
keylen:number = 0;
const keys:Array<[number, number]> = [],
length:number = parse.count,
begin:number = dd,
stack:string = (global === true)
? "global"
: parse.structure[parse.structure.length - 1][0],
style:boolean = (data.lexer[cc] === "style"),
delim:[string, string] = (style === true)
? [";", "separator"]
: [",", "separator"],
lines:number = parse.linesSpace,
sort = function parse_objectSort_sort(x:[number, number], y:[number, number]):number {
let xx = x[0],
yy = y[0];
if (data.types[xx] === "comment") {
do {
xx = xx + 1;
} while (xx < length && (data.types[xx] === "comment"));
if (data.token[xx] === undefined) {
return 1;
}
}
if (data.types[yy] === "comment") {
do {
yy = yy + 1;
} while (yy < length && (data.types[yy] === "comment"));
if (data.token[yy] === undefined) {
return 1;
}
}
if (style === true) {
if (data.token[xx].indexOf("@import") === 0 || data.token[yy].indexOf("@import") === 0) {
// JavaScript's standard array sort uses implementation specific algorithms.
// This simple numeric trick forces conformance.
if (xx < yy) {
return -1;
}
return 1;
}
if (data.types[xx] !== data.types[yy]) {
if (data.types[xx] === "function") {
return 1;
}
if (data.types[xx] === "variable") {
return -1;
}
if (data.types[xx] === "selector") {
return 1;
}
if (data.types[xx] === "property" && data.types[yy] !== "variable") {
return -1;
}
if (data.types[xx] === "mixin" && data.types[yy] !== "property" && data.types[yy] !== "variable") {
return -1;
}
}
}
if (data.token[xx].toLowerCase() > data.token[yy].toLowerCase()) {
return 1;
}
return -1;
},
store:data = {
begin: [],
ender: [],
lexer: [],
lines: [],
stack: [],
token: [],
types: []
};
behind = cc;
do {
if (data.begin[cc] === dd || (global === true && cc < behind && data.token[cc] === "}" && data.begin[data.begin[cc]] === -1)) {
if (data.types[cc].indexOf("template") > -1) {
return;
}
if (data.token[cc] === delim[0] || (style === true && data.token[cc] === "}" && data.token[cc + 1] !== ";")) {
commaTest = true;
front = cc + 1;
} else if (style === true && data.token[cc - 1] === "}") {
commaTest = true;
front = cc;
}
if (front === 0 && data.types[0] === "comment") {
// keep top comments at the top
do {
front = front + 1;
} while (data.types[front] === "comment");
} else if (data.types[front] === "comment" && data.lines[front] < 2) {
// if a comment follows code on the same line then keep the comment next to the code it follows
front = front + 1;
}
if (commaTest === true && (data.token[cc] === delim[0] || (style === true && data.token[cc - 1] === "}")) && front <= behind) {
if (style === true && "};".indexOf(data.token[behind]) < 0) {
behind = behind + 1;
} else if (style === false && data.token[behind] !== ",") {
behind = behind + 1;
}
keys.push([front, behind]);
if (style === true && data.token[front] === "}") {
behind = front;
} else {
behind = front - 1;
}
}
}
cc = cc - 1;
} while (cc > dd);
if (keys.length > 0 && keys[keys.length - 1][0] > cc + 1) {
ee = keys[keys.length - 1][0] - 1;
if (data.types[ee] === "comment" && data.lines[ee] > 1) {
do {
ee = ee - 1;
} while (ee > 0 && data.types[ee] === "comment");
keys[keys.length - 1][0] = ee + 1;
}
if (data.types[cc + 1] === "comment" && cc === -1) {
do {
cc = cc + 1;
} while (data.types[cc + 1] === "comment");
}
keys.push([
cc + 1,
ee
]);
}
if (keys.length > 1) {
if (style === true || data.token[cc - 1] === "=" || data.token[cc - 1] === ":" || data.token[cc - 1] === "(" || data.token[cc - 1] === "[" || data.token[cc - 1] === "," || data.types[cc - 1] === "word" || cc === 0) {
keys.sort(sort);
keylen = keys.length;
commaTest = false;
dd = 0;
do {
keyend = keys[dd][1];
if (style === true) {
gg = keyend;
if (data.types[gg] === "comment") {
gg = gg - 1;
}
if (data.token[gg] === "}") {
keyend = keyend + 1;
delim[0] = "}";
delim[1] = "end";
} else {
delim[0] = ";";
delim[1] = "separator"
}
}
ee = keys[dd][0];
if (style === true && data.types[keyend - 1] !== "end" && data.types[keyend] === "comment" && data.types[keyend + 1] !== "comment" && dd < keylen - 1) {
// missing a terminal comment causes many problems
keyend = keyend + 1;
}
if (ee < keyend) {
do {
if (style === false && dd === keylen - 1 && ee === keyend - 2 && data.token[ee] === "," && data.lexer[ee] === "script" && data.types[ee + 1] === "comment") {
// do not include terminal commas that are followed by a comment
ff = ff + 1;
} else {
parse.push(store, {
begin: data.begin[ee],
ender: data.begin[ee],
lexer: data.lexer[ee],
lines: data.lines[ee],
stack: data.stack[ee],
token: data.token[ee],
types: data.types[ee]
}, "");
ff = ff + 1;
}
//remove extra commas
if (data.token[ee] === delim[0] && (style === true || data.begin[ee] === data.begin[keys[dd][0]])) {
commaTest = true;
} else if (data.token[ee] !== delim[0] && data.types[ee] !== "comment") {
commaTest = false;
}
ee = ee + 1;
} while (ee < keyend);
}
// injecting the list delimiter
if (commaTest === false && store.token[store.token.length - 1] !== "x;" && (style === true || dd < keylen - 1)) {
ee = store.types.length - 1;
if (store.types[ee] === "comment") {
do {
ee = ee - 1;
} while (
ee > 0 && (store.types[ee] === "comment")
);
}
ee = ee + 1;
parse.splice({
data : store,
howmany: 0,
index : ee,
record : {
begin: begin,
ender: parse.count,
lexer: store.lexer[ee - 1],
lines: 0,
stack: stack,
token: delim[0],
types: delim[1]
}
});
ff = ff + 1;
}
dd = dd + 1;
} while (dd < keylen);
parse.splice({
data : data,
howmany: ff,
index : cc + 1
});
parse.linesSpace = lines;
parse.concat(data, store);
return;
}
}
return;
},
// an extension of Array.prototype.pop to work across the data structure
pop: function parse_pop(data: data): record {
const output = {
begin: data.begin.pop(),
ender: data.ender.pop(),
lexer: data.lexer.pop(),
lines: data.lines.pop(),
stack: data.stack.pop(),
token: data.token.pop(),
types: data.types.pop()
};
if (data === parse.data) {
parse.count = parse.count - 1;
}
return output;
},
// an extension of Array.prototype.push to work across the data structure
push: function parse_push(data:data, record:record, structure:string):void {
const ender = function parse_push_ender():void {
let a:number = parse.count;
const begin:number = data.begin[a];
if (
(data.lexer[a] === "markup" && sparser.options.lexer_options.markup.tag_sort === true) ||
((data.lexer[a] === "script" || data.lexer[a] === "style") && sparser.options.lexer_options[data.lexer[a]].object_sort === true)
) {
// sorting can result in a token whose begin value is greater than either
// its current index or the index of the end token, which results in an endless loop
//
// these end values are addressed at the end of the "parser" function with parse.sortCorrection
return;
}
do {
if (data.begin[a] === begin || (data.begin[data.begin[a]] === begin && data.types[a].indexOf("attribute") > -1 && data.types[a].indexOf("attribute_end") < 0)) {
data.ender[a] = parse.count;
} else {
a = data.begin[a];
}
a = a - 1;
} while (a > begin);
if (a > -1) {
data.ender[a] = parse.count;
}
};
parse
.datanames
.forEach(function parse_push_datanames(value) {
data[value].push(record[value]);
});
if (data === parse.data) {
parse.count = parse.count + 1;
parse.linesSpace = 0;
if (record.lexer !== "style") {
if (structure.replace(/(\{|\}|@|<|>|%|#|)/g, "") === "") {
if (record.types === "else") {
structure = "else";
} else {
structure = record.token;
}
} else if ((/^<\?(=|(php))/).test(structure) === false) {
structure = structure.replace(/(\{|\}|@|<|>|%|#|)\s*/g, "");
}
}
if (record.types === "start" || record.types.indexOf("_start") > 0) {
parse.structure.push([structure, parse.count]);
} else if (record.types === "end" || record.types.indexOf("_end") > 0) {
// this big condition fixes language specific else blocks that are children of start/end blocks not associated with the if/else chain
let case_ender:number = 0;
if (
parse.structure.length > 2 &&
(data.types[parse.structure[parse.structure.length - 1][1]] === "else" || data.types[parse.structure[parse.structure.length - 1][1]].indexOf("_else") > 0) &&
(data.types[parse.structure[parse.structure.length - 2][1]] === "start" || data.types[parse.structure[parse.structure.length - 2][1]].indexOf("_start") > 0) &&
(data.types[parse.structure[parse.structure.length - 2][1] + 1] === "else" || data.types[parse.structure[parse.structure.length - 2][1] + 1].indexOf("_else") > 0)
) {
parse.structure.pop();
data.begin[parse.count] = parse.structure[parse.structure.length - 1][1];
data.stack[parse.count] = parse.structure[parse.structure.length - 1][0];
data.ender[parse.count - 1] = parse.count;
case_ender = data.ender[data.begin[parse.count] + 1];
}
ender();
if (case_ender > 0) {
data.ender[data.begin[parse.count] + 1] = case_ender;
}
parse.structure.pop();
} else if (record.types === "else" || record.types.indexOf("_else") > 0) {
if (structure === "") {
structure = "else";
}
if (parse.count > 0 && (data.types[parse.count - 1] === "start" || data.types[parse.count - 1].indexOf("_start") > 0)) {
parse.structure.push([structure, parse.count]);
} else {
ender();
if (structure === "") {
parse.structure[parse.structure.length - 1] = ["else", parse.count];
} else {
parse.structure[parse.structure.length - 1] = [structure, parse.count];
}
}
}
}
},
// a custom sort tool that is a bit more intelligent and multidimensional than Array.prototype.sort
safeSort: function parse_safeSort(array: any[], operation:"ascend" | "descend" | "normal", recursive:boolean): any[] {
let extref = function parse_safeSort_extref(item: any):any {
//worthless function for backwards compatibility with older versions of V8 node.
return item;
};
const arTest = function parse_safeSort_arTest(item:any):boolean {
if (Array.isArray(item) === true) {
return true;
}
return false;
},
normal = function parse_safeSort_normal(item:any[]):any[] {
let storeb:any = item;
const done:any = [item[0]],
child = function safeSort_normal_child():void {
let a:number = 0;
const len:number = storeb.length;
if (a < len) {
do {
if (arTest(storeb[a]) === true) {
storeb[a] = parse_safeSort_normal(storeb[a]);
}
a = a + 1;
} while (a < len);
}
},
recurse = function parse_safeSort_normal_recurse(x:any) {
let a:number = 0;
const storea:any[] = [],
len:number = storeb.length;
if (a < len) {
do {
if (storeb[a] !== x) {
storea.push(storeb[a]);
}
a = a + 1;
} while (a < len);
}
storeb = storea;
if (storea.length > 0) {
done.push(storea[0]);
extref(storea[0]);
} else {
if (recursive === true) {
child();
}
item = storeb;
}
};
extref = recurse;
recurse(array[0]);
return item;
},
descend = function parse_safeSort_descend(item:any[]):any[] {
let c:number = 0;
const len:number = item.length,
storeb:any[] = item,
child = function parse_safeSort_descend_child():void {
let a:number = 0;
const lenc:number = storeb.length;
if (a < lenc) {
do {
if (arTest(storeb[a]) === true) {
storeb[a] = parse_safeSort_descend(storeb[a]);
}
a = a + 1;
} while (a < lenc);
}
},
recurse = function parse_safeSort_descend_recurse(value:string):string {
let a:number = c,
b:number = 0,
d:number = 0,
e:number = 0,
ind:any[] = [],
key:any = storeb[c],
tstore:string = "";
const tkey:string = typeof key;
if (a < len) {
do {
tstore = typeof storeb[a];
if (storeb[a] > key || (tstore > tkey)) {
key = storeb[a];
ind = [a];
} else if (storeb[a] === key) {
ind.push(a);
}
a = a + 1;
} while (a < len);
}
d = ind.length;
a = c;
b = d + c;
if (a < b) {
do {
storeb[ind[e]] = storeb[a];
storeb[a] = key;
e = e + 1;
a = a + 1;
} while (a < b);
}
c = c + d;
if (c < len) {
extref("");
} else {
if (recursive === true) {
child();
}
item = storeb;
}
return value;
};
extref = recurse;
recurse("");
return item;
},
ascend = function parse_safeSort_ascend(item:any[]):any[] {
let c:number = 0;
const len:number = item.length,
storeb:any[] = item,
child = function parse_safeSort_ascend_child():void {
let a:number = 0;
const lenc:number = storeb.length;
if (a < lenc) {
do {
if (arTest(storeb[a]) === true) {
storeb[a] = parse_safeSort_ascend(storeb[a]);
}
a = a + 1;
} while (a < lenc);
}
},
recurse = function parse_safeSort_ascend_recurse(value:string):string {
let a:number = c,
b:number = 0,
d:number = 0,
e:number = 0,
ind:any[] = [],
key:any = storeb[c],
tstore:string = "";
const tkey:string = typeof key;
if (a < len) {
do {
tstore = typeof storeb[a];
if (storeb[a] < key || tstore < tkey) {
key = storeb[a];
ind = [a];
} else if (storeb[a] === key) {
ind.push(a);
}
a = a + 1;
} while (a < len);
}
d = ind.length;
a = c;
b = d + c;
if (a < b) {
do {
storeb[ind[e]] = storeb[a];
storeb[a] = key;
e = e + 1;
a = a + 1;
} while (a < b);
}
c = c + d;
if (c < len) {
extref("");
} else {
if (recursive === true) {
child();
}
item = storeb;
}
return value;
};
extref = recurse;
recurse("");
return item;
};
if (arTest(array) === false) {
return array;
}
if (operation === "normal") {
return normal(array);
}
if (operation === "descend") {
return descend(array);
}
return ascend(array);
},
// this functionality provides corrections to the "begin" and "ender" values after use of object_sort
sortCorrection: function parse_sortCorrection(start:number, end:number):void {
let a:number = start,
endslen:number = -1;
const data:data = parse.data,
ends:number[] = [],
structure: number[] = (parse.structure.length < 2)
? [-1]
: [parse.structure[parse.structure.length - 2][1]];
// this first loop solves for the begin values
do {
if (
a > 0 &&
data.types[a].indexOf("attribute") > -1 &&
data.types[a].indexOf("end") < 0 &&
data.types[a - 1].indexOf("start") < 0 &&
data.types[a - 1].indexOf("attribute") < 0 &&
data.lexer[a] === "markup"
) {
structure.push(a - 1);
}
if (
a > 0 &&
data.types[a - 1].indexOf("attribute") > -1 &&
data.types[a].indexOf("attribute") < 0 &&
data.lexer[structure[structure.length - 1]] === "markup" &&
data.types[structure[structure.length - 1]].indexOf("start") < 0
) {
structure.pop();
}
if (data.begin[a] !== structure[structure.length - 1]) {
if (structure.length > 0) {
data.begin[a] = structure[structure.length - 1];
} else {
data.begin[a] = -1;
}
}
if (data.types[a].indexOf("else") > -1) {
if (structure.length > 0) {
structure[structure.length - 1] = a;
} else {
structure.push(a);
}
}
if (data.types[a].indexOf("end") > -1) {
structure.pop();
}
if (data.types[a].indexOf("start") > -1) {
structure.push(a);
}
a = a + 1;
} while (a < end);
// and now for the ender values
a = end;
do {
a = a - 1;
if (data.types[a].indexOf("end") > -1) {
ends.push(a);
endslen = endslen + 1;
}
if (endslen > -1) {
data.ender[a] = ends[endslen];
} else {
data.ender[a] = -1;
}
if (data.types[a].indexOf("start") > -1) {
ends.pop();
endslen = endslen - 1;
}
} while (a > start);
},
// a simple tool to take note of whitespace between tokens
spacer: function parse_spacer(args:spacer): number {
// * array - the characters to scan
// * index - the index to start scanning from
// * end - the length of the array, to break the loop
parse.linesSpace = 1;
do {
if (args.array[args.index] === "\n") {
parse.linesSpace = parse.linesSpace + 1;
parse.lineNumber = parse.lineNumber + 1;
}
if ((/\s/).test(args.array[args.index + 1]) === false) {
break;
}
args.index = args.index + 1;
} while (args.index < args.end);
return args.index;
},
// an extension of Array.prototype.splice to work across the data structure
splice: function parse_splice(spliceData: splice): void {
const finalItem:[number, string] = [parse.data.begin[parse.count], parse.data.token[parse.count]];
// * data - The data object to alter
// * howmany - How many indexes to remove
// * index - The index where to start
// * record - A new record to insert
if (spliceData.record !== undefined && spliceData.record.token !== "") {
parse
.datanames
.forEach(function parse_splice_datanames(value) {
spliceData
.data[value]
.splice(spliceData.index, spliceData.howmany, spliceData.record[value]);
});
if (spliceData.data === parse.data) {
parse.count = (parse.count - spliceData.howmany) + 1;
if (finalItem[0] !== parse.data.begin[parse.count] || finalItem[1] !== parse.data.token[parse.count]) {
parse.linesSpace = 0;
}
}
return;
}
parse
.datanames
.forEach(function parse_splice_datanames(value) {
spliceData
.data[value]
.splice(spliceData.index, spliceData.howmany);
});
if (spliceData.data === parse.data) {
parse.count = parse.count - spliceData.howmany;
parse.linesSpace = 0;
}
},
// parsing block comments and simultaneously applying word wrap
wrapCommentBlock: function parse_wrapCommentBlock(config: wrapConfig):[string, number] {
let a:number = config.start,
b:number = 0,
c:number = 0,
d:number = 0,
len:number = 0,
lines:string[] = [],
space:string = "",
bline:string = "",
spaceLine:RegExp,
emptyLine:boolean = false,
bulletLine:boolean = false,
numberLine:boolean = false,
bigLine:boolean = false,
output:string = "",
terml:number = config.terminator.length - 1,
term:string = config.terminator.charAt(terml),
twrap:number = 0;
const build:string[] = [],
second:string[] = [],
lf:"\r\n"|"\n" = (sparser.options.crlf === true)
? "\r\n"
: "\n",
sanitize = function parse_wrapCommentBlock_sanitize(input:string) {
return `\\${input}`;
},
regEsc:RegExp = (/(\/|\\|\||\*|\[|\]|\{|\})/g),
regEnd:RegExp = new RegExp(`\\s*${config.terminator.replace(regEsc, sanitize)}$`),
regIgnore:RegExp = new RegExp(`^(${config.opening.replace(regEsc, sanitize)}\\s*parse-ignore-start)`),
regStart:RegExp = new RegExp(`(${config.opening.replace(regEsc, sanitize)}\\s*)`),
wrap:number = sparser.options.wrap,
emptyLines = function parse_wrapCommentBlock_emptyLines() {
if ((/^\s+$/).test(lines[b + 1]) === true || lines[b + 1] === "") {
do {
b = b + 1;
} while (b < len && ((/^\s+$/).test(lines[b + 1]) === true || lines[b + 1] === ""));
}
if (b < len - 1) {
second.push("");
}
};
do {
build.push(config.chars[a]);
if (config.chars[a] === "\n") {
parse.lineNumber = parse.lineNumber + 1;
}
if (config.chars[a] === term && config.chars.slice(a - terml, a + 1).join("") === config.terminator) {
break;
}
a = a + 1;
} while (a < config.end);
output = build.join("");
if (regIgnore.test(output) === true) {
let termination:string = "\n";
a = a + 1;
do {
build.push(config.chars[a]);
a = a + 1;
} while (a < config.end && (config.chars[a - 1] !== "d" || (config.chars[a - 1] === "d" && build.slice(build.length - 16).join("") !== "parse-ignore-end")));
b = a;
terml = config.opening.length - 1;
term = config.opening.charAt(terml);
do {
if (config.opening === "/*" && config.chars[b - 1] === "/" && (config.chars[b] === "*" || config.chars[b] === "/")) {
break; // for script
}
if (config.opening !== "/*" && config.chars[b] === term && config.chars.slice(b - terml, b + 1).join("") === config.opening) {
break; // for markup
}
b = b - 1;
} while (b > config.start);
if (config.opening === "/*" && config.chars[b] === "*") {
termination = "\u002a/";
} else if (config.opening !== "/*") {
termination = config.terminator;
}
terml = termination.length - 1;
term = termination.charAt(terml);
if (termination !== "\n" || config.chars[a] !== "\n") {
do {
build.push(config.chars[a]);
if (termination === "\n" && config.chars[a + 1] === "\n") {
break;
}
if (config.chars[a] === term && config.chars.slice(a - terml, a + 1).join("") === termination) {
break;
}
a = a + 1;
} while (a < config.end);
}
if (config.chars[a] === "\n") {
a = a - 1;
}
output = build.join("").replace(/\s+$/, "");
return [output, a];
}
if (a === config.end || wrap < 1 || (output.length <= wrap && output.indexOf("\n") < 0) || sparser.options.preserve_comment === true || (config.opening === "/*" && output.indexOf("\n") > 0 && output.replace("\n", "").indexOf("\n") > 0 && (/\n(?!(\s*\*))/).test(output) === false)) {
return [output, a];
}
b = config.start;
if (b > 0 && config.chars[b - 1] !== "\n" && (/\s/).test(config.chars[b - 1]) === true) {
do {
b = b - 1;
} while (b > 0 && config.chars[b - 1] !== "\n" && (/\s/).test(config.chars[b - 1]) === true);
}
space = config.chars.slice(b, config.start).join("");
spaceLine = new RegExp(`\n${space}`, "g");
lines = output.replace(/\r\n/g, "\n").replace(spaceLine, "\n").split("\n");
len = lines.length;
lines[0] = lines[0].replace(regStart, "");
lines[len - 1] = lines[len - 1].replace(regEnd, "");
if (len < 2) {
lines = lines[0].split(" ");
}
if (lines[0] === "") {
lines[0] = config.opening;
} else {
lines.splice(0, 0, config.opening);
}
len = lines.length;
b = 0;
do {
bline = (b < len - 1)
? lines[b + 1].replace(/^\s+/, "")
: "";
if ((/^\s+$/).test(lines[b]) === true || lines[b] === "") {
emptyLines();
} else if (lines[b].slice(0, 4) === " ") {
second.push(lines[b]);
} else if (lines[b].replace(/^\s+/, "").length > wrap && lines[b].replace(/^\s+/, "").indexOf(" ") > wrap) {
lines[b] = lines[b].replace(/^\s+/, "");
c = lines[b].indexOf(" ");
second.push(lines[b].slice(0, c));
lines[b] = lines[b].slice(c + 1);
b = b - 1;
} else {
if (config.opening === "/*" && lines[b].indexOf("/*") !== 0) {
lines[b] = ` ${lines[b].replace(/^\s+/, "").replace(/\s+$/, "").replace(/\s+/g, " ")}`;
} else {
lines[b] = `${lines[b].replace(/^\s+/, "").replace(/\s+$/, "").replace(/\s+/g, " ")}`;
}
twrap = (b < 1)
? wrap - (config.opening.length + 1)
: wrap;
c = lines[b].length;
d = lines[b].replace(/^\s+/, "").indexOf(" ");
if (c > twrap && d > 0 && d < twrap) {
c = twrap;
do {
c = c - 1;
if ((/\s/).test(lines[b].charAt(c)) === true && c <= wrap) {
break;
}
} while (c > 0);
if (lines[b].slice(0, 4) !== " " && (/^\s*(\*|-)\s/).test(lines[b]) === true && (/^\s*(\*|-)\s/).test(lines[b + 1]) === false) {
lines.splice(b + 1, 0, "* ");
}
if (lines[b].slice(0, 4) !== " " && (/^\s*\d+\.\s/).test(lines[b]) === true && (/^\s*\d+\.\s/).test(lines[b + 1]) === false) {
lines.splice(b + 1, 0, "1. ");
}
if (c < 4) {
second.push(lines[b]);
bigLine = true;
} else if (b === len - 1) {
second.push(lines[b].slice(0, c));
lines[b] = lines[b].slice(c + 1);
bigLine = true;
b = b - 1;
} else if ((/^\s+$/).test(lines[b + 1]) === true || lines[b + 1] === "") {
second.push(lines[b].slice(0, c));
lines[b] = lines[b].slice(c + 1);
emptyLine = true;
b = b - 1;
} else if (lines[b + 1].slice(0, 4) !== " " && (/^\s*(\*|-)\s/).test(lines[b + 1]) === true) {
second.push(lines[b].slice(0, c));
lines[b] = lines[b].slice(c + 1);
bulletLine = true;
b = b - 1;
} else if (lines[b + 1].slice(0, 4) !== " " && (/^\s*\d+\.\s/).test(lines[b + 1]) === true) {
second.push(lines[b].slice(0, c));
lines[b] = lines[b].slice(c + 1);
numberLine = true;
b = b - 1;
} else if (lines[b + 1].slice(0, 4) === " ") {
second.push(lines[b].slice(0, c));
lines[b] = lines[b].slice(c + 1);
bigLine = true;
b = b - 1;
} else if (c + bline.length > wrap && bline.indexOf(" ") < 0) {
second.push(lines[b].slice(0, c));
lines[b] = lines[b].slice(c + 1);
bigLine = true;
b = b - 1;
} else if (lines[b].replace(/^\s+/, "").indexOf(" ") < wrap) {
if (lines[b].length > wrap) {
lines[b + 1] = lines[b].slice(c + 1) + lf + lines[b + 1];
} else {
lines[b + 1] = `${lines[b].slice(c + 1)} ${lines[b + 1]}`;
}
}
if (emptyLine === false && bulletLine === false && numberLine === false && bigLine === false) {
lines[b] = lines[b].slice(0, c);
}
} else if (lines[b + 1] !== undefined && ((lines[b].length + bline.indexOf(" ") > wrap && bline.indexOf(" ") > 0) || (lines[b].length + bline.length > wrap && bline.indexOf(" ") < 0))) {
second.push(lines[b]);
b = b + 1;
} else if (lines[b + 1] !== undefined && (/^\s+$/).test(lines[b + 1]) === false && lines[b + 1] !== "" && lines[b + 1].slice(0, 4) !== " " && (/^\s*(\*|-|(\d+\.))\s/).test(lines[b + 1]) === false) {
lines[b + 1] = `${lines[b]} ${lines[b + 1]}`;
emptyLine = true;
}
if (bigLine === false && bulletLine === false && numberLine === false) {
if (emptyLine === true) {
emptyLine = false;
} else if ((/^\s*(\*|-|(\d+\.))\s*$/).test(lines[b]) === false) {
if (b < len - 1 && lines[b + 1] !== "" && (/^\s+$/).test(lines[b]) === false && lines[b + 1].slice(0, 4) !== " " && (/^\s*(\*|-|(\d+\.))\s/).test(lines[b + 1]) === false) {
lines[b] = `${lines[b]} ${lines[b + 1]}`;
lines.splice(b + 1, 1);
len = len - 1;
b = b - 1;
} else {
if (config.opening === "/*" && lines[b].indexOf("/*") !== 0) {
second.push(` ${lines[b].replace(/^\s+/, "").replace(/\s+$/, "").replace(/\s+/g, " ")}`);
} else {
second.push(`${lines[b].replace(/^\s+/, "").replace(/\s+$/, "").replace(/\s+/g, " ")}`);
}
}
}
}
bigLine = false;
bulletLine = false;
numberLine = false;
}
b = b + 1;
} while (b < len);
if (second.length > 0) {
if (second[second.length - 1].length > wrap - (config.terminator.length + 1)) {
second.push(config.terminator);
} else {
second[second.length - 1] = `${second[second.length - 1]} ${config.terminator}`;
}
output = second.join(lf);
} else {
lines[lines.length - 1] = lines[lines.length - 1] + config.terminator;
output = lines.join(lf);
}
return [output, a];
},
// parsing line comments and simultaneously applying word wrap
wrapCommentLine: function parse_wrapCommentLine(config: wrapConfig):[string, number] {
let a:number = config.start,
b:number = 0,
output:string = "",
build:string[] = [];
const wrap:number = sparser.options.wrap,
recurse = function parse_wrapCommentLine_recurse():void {
let line:string = "";
do {
b = b + 1;
if (config.chars[b + 1] === "\n") {
return;
}
} while (b < config.end && (/\s/).test(config.chars[b]) === true);
if (config.chars[b] + config.chars[b + 1] === "//") {
build = [];
do {
build.push(config.chars[b]);
b = b + 1;
} while (b < config.end && config.chars[b] !== "\n");
line = build.join("");
if ((/^\/\/ (\*|-|(\d+\.))/).test(line) === false && line.slice(0, 6) !== "// " && (/^\/\/\s*$/).test(line) === false) {
output = `${output} ${line.replace(/(^\/\/\s*)/, "").replace(/\s+$/, "")}`;
a = b - 1;
parse_wrapCommentLine_recurse();
}
}
},
wordWrap = function parse_wrapCommentLine_wordWrap():void {
let c:number = 0,
d:number = 0;
const lines:string[] = [],
record:record = (parse.count > -1)
? {
begin: parse.structure[parse.structure.length - 1][1],
ender: -1,
lexer: config.lexer,
lines: parse.linesSpace,
stack: parse.structure[parse.structure.length - 1][0],
token: parse.data.token[parse.count],
types: "comment"
}
: {
begin: -1,
ender: -1,
lexer: config.lexer,
lines: parse.linesSpace,
stack: "global",
token: "",
types: "comment"
};
output = output.replace(/\s+/g, " ").replace(/\s+$/, "");
d = output.length;
if (wrap > d) {
return;
}
do {
c = wrap;
if (output.charAt(c) !== " ") {
do {
c = c - 1;
} while (c > 0 && output.charAt(c) !== " ");
if (c < 3) {
c = wrap;
do {
c = c + 1;
} while (c < d - 1 && output.charAt(c) !== " ");
}
}
lines.push(output.slice(0, c));
output = `// ${output.slice(c).replace(/^\s+/, "")}`;
d = output.length;
} while (wrap < d);
c = 0;
d = lines.length;
do {
record.token = lines[c];
parse.push(parse.data, record, "");
record.lines = 2;
parse.linesSpace = 2;
c = c + 1;
} while (c < d);
};
do {
build.push(config.chars[a]);
a = a + 1;
} while (a < config.end && config.chars[a] !== "\n");
if (a === config.end) {
// necessary because the wrapping logic expects line termination
config.chars.push("\n");
} else {
a = a - 1;
}
output = build.join("").replace(/\s+$/, "");
if ((/^(\/\/\s*parse-ignore\u002dstart)/).test(output) === true) {
let termination:string = "\n";
a = a + 1;
do {
build.push(config.chars[a]);
a = a + 1;
} while (a < config.end && (config.chars[a - 1] !== "d" || (config.chars[a - 1] === "d" && build.slice(build.length - 16).join("") !== "parse-ignore-end")));
b = a;
do {
b - b - 1;
} while (b > config.start && config.chars[b - 1] === "/" && (config.chars[b] === "*" || config.chars[b] === "/"));
if (config.chars[b] === "*") {
termination = "\u002a/";
}
if (termination !== "\n" || config.chars[a] !== "\n") {
do {
build.push(config.chars[a]);
if (termination === "\n" && config.chars[a + 1] === "\n") {
break;
}
a = a + 1;
} while (a < config.end && (termination === "\n" || (termination === "\u002a/" && (config.chars[a - 1] !== "*" || config.chars[a] !== "/"))));
}
if (config.chars[a] === "\n") {
a = a - 1;
}
output = build.join("").replace(/\s+$/, "");
return [output, a];
}
if (output === "//" || output.slice(0, 6) === "// " || sparser.options.preserve_comment === true) {
return [output, a];
}
output = output.replace(/(\/\/\s*)/, "// ");
if (wrap < 1 || (a === config.end - 1 && parse.data.begin[parse.count] < 1)) {
return [output, a];
}
b = a + 1;
recurse();
wordWrap();
return [output, a];
}
},
sparser:sparser = {
lexers: {},
libs: {},
options: {},
parse: parse,
parser: parser,
parseerror: "",
version: {
date: "",
number: ""
}
};
global.sparser = sparser;
}()); | the_stack |
import {Vector3, Matrix3} from '@math.gl/core';
import type {GLTFMeshPrimitive, GLTFWithBuffers} from '../types/gltf-types';
import type {GLTFLoaderOptions} from '../../gltf-loader';
import {getAccessorArrayTypeAndLength} from '../gltf-utils/gltf-utils';
import {BYTES, COMPONENTS} from '../gltf-utils/gltf-constants';
import {
Accessor,
BufferView,
MaterialNormalTextureInfo,
MaterialOcclusionTextureInfo,
TextureInfo as GLTFTextureInfo
} from '../types/gltf-json-schema';
import GLTFScenegraph from '../api/gltf-scenegraph';
/** Extension name */
const EXT_MESHOPT_TRANSFORM = 'KHR_texture_transform';
export const name = EXT_MESHOPT_TRANSFORM;
const scratchVector = new Vector3();
const scratchRotationMatrix = new Matrix3();
const scratchScaleMatrix = new Matrix3();
/** Extension textureInfo https://github.com/KhronosGroup/glTF/tree/main/extensions/2.0/Khronos/KHR_texture_transform#gltf-schema-updates */
type TextureInfo = {
/** The offset of the UV coordinate origin as a factor of the texture dimensions. */
offset?: [number, number];
/** Rotate the UVs by this many radians counter-clockwise around the origin. This is equivalent to a similar rotation of the image clockwise. */
rotation?: number;
/** The scale factor applied to the components of the UV coordinates. */
scale?: [number, number];
/** Overrides the textureInfo texCoord value if supplied, and if this extension is supported. */
texCoord?: number;
};
/** Intersection of all GLTF textures */
type CompoundGLTFTextureInfo = GLTFTextureInfo &
MaterialNormalTextureInfo &
MaterialOcclusionTextureInfo;
/** Parameters for TEXCOORD transformation */
type TransformParameters = {
/** Original texCoord value https://www.khronos.org/registry/glTF/specs/2.0/glTF-2.0.html#_textureinfo_texcoord */
originalTexCoord: number;
/** New texCoord value from extension https://github.com/KhronosGroup/glTF/tree/main/extensions/2.0/Khronos/KHR_texture_transform#gltf-schema-updates */
texCoord: number;
/** Transformation matrix */
matrix: Matrix3;
};
/**
* The extension entry to process the transformation
* @param gltfData gltf buffers and json
* @param options GLTFLoader options
*/
export async function decode(gltfData: GLTFWithBuffers, options: GLTFLoaderOptions) {
const gltfScenegraph = new GLTFScenegraph(gltfData);
const extension = gltfScenegraph.getExtension(EXT_MESHOPT_TRANSFORM);
if (!extension) {
return;
}
const materials = gltfData.json.materials || [];
for (let i = 0; i < materials.length; i++) {
transformTexCoords(i, gltfData);
}
}
/**
* Transform TEXCOORD by material
* @param materialIndex processing material index
* @param gltfData gltf buffers and json
*/
function transformTexCoords(materialIndex: number, gltfData: GLTFWithBuffers): void {
// Save processed texCoords in order no to process the same twice
const processedTexCoords: [number, number][] = [];
const material = gltfData.json.materials?.[materialIndex];
const baseColorTexture = material?.pbrMetallicRoughness?.baseColorTexture;
if (baseColorTexture) {
transformPrimitives(gltfData, materialIndex, baseColorTexture, processedTexCoords);
}
const emisiveTexture = material?.emissiveTexture;
if (emisiveTexture) {
transformPrimitives(gltfData, materialIndex, emisiveTexture, processedTexCoords);
}
const normalTexture = material?.normalTexture;
if (normalTexture) {
transformPrimitives(gltfData, materialIndex, normalTexture, processedTexCoords);
}
const occlusionTexture = material?.occlusionTexture;
if (occlusionTexture) {
transformPrimitives(gltfData, materialIndex, occlusionTexture, processedTexCoords);
}
const metallicRoughnessTexture = material?.pbrMetallicRoughness?.metallicRoughnessTexture;
if (metallicRoughnessTexture) {
transformPrimitives(gltfData, materialIndex, metallicRoughnessTexture, processedTexCoords);
}
}
/**
* Transform primitives of the particular material
* @param gltfData gltf data
* @param materialIndex primitives with this material will be transformed
* @param texture texture object
* @param processedTexCoords storage to save already processed texCoords
*/
function transformPrimitives(
gltfData: GLTFWithBuffers,
materialIndex: number,
texture: CompoundGLTFTextureInfo,
processedTexCoords: [number, number][]
) {
const transformParameters = getTransformParameters(texture, processedTexCoords);
if (!transformParameters) {
return;
}
const meshes = gltfData.json.meshes || [];
for (const mesh of meshes) {
for (const primitive of mesh.primitives) {
const material = primitive.material;
if (Number.isFinite(material) && materialIndex === material) {
transformPrimitive(gltfData, primitive, transformParameters);
}
}
}
}
/**
* Get parameters for TEXCOORD transformation
* @param texture texture object
* @param processedTexCoords storage to save already processed texCoords
* @returns texCoord couple and transformation matrix
*/
function getTransformParameters(
texture: CompoundGLTFTextureInfo,
processedTexCoords: [number, number][]
): TransformParameters | null {
const textureInfo = texture.extensions?.[EXT_MESHOPT_TRANSFORM];
const {texCoord: originalTexCoord = 0} = texture;
// If texCoord is not set in the extension, original attribute data will be replaced
const {texCoord = originalTexCoord} = textureInfo;
// Make sure that couple [originalTexCoord, extensionTexCoord] is not processed twice
const isProcessed =
processedTexCoords.findIndex(
([original, newTexCoord]) => original === originalTexCoord && newTexCoord === texCoord
) !== -1;
if (!isProcessed) {
const matrix = makeTransformationMatrix(textureInfo);
if (originalTexCoord !== texCoord) {
texture.texCoord = texCoord;
}
processedTexCoords.push([originalTexCoord, texCoord]);
return {originalTexCoord, texCoord, matrix};
}
return null;
}
/**
* Transform `TEXCOORD_0` attribute in the primitive
* @param gltfData gltf data
* @param primitive primitive object
* @param transformParameters texCoord couple and transformation matrix
*/
function transformPrimitive(
gltfData: GLTFWithBuffers,
primitive: GLTFMeshPrimitive,
transformParameters: TransformParameters
) {
const {originalTexCoord, texCoord, matrix} = transformParameters;
const texCoordAccessor = primitive.attributes[`TEXCOORD_${originalTexCoord}`];
if (Number.isFinite(texCoordAccessor)) {
// Get accessor of the `TEXCOORD_0` attribute
const accessor = gltfData.json.accessors?.[texCoordAccessor];
if (accessor && accessor.bufferView) {
// Get `bufferView` of the `accessor`
const bufferView = gltfData.json.bufferViews?.[accessor.bufferView];
if (bufferView) {
// Get `arrayBuffer` the `bufferView` look at
const {arrayBuffer, byteOffset: bufferByteOffset} = gltfData.buffers[bufferView.buffer];
// Resulting byteOffset is sum of the buffer, accessor and bufferView byte offsets
const byteOffset =
(bufferByteOffset || 0) + (accessor.byteOffset || 0) + (bufferView.byteOffset || 0);
// Deduce TypedArray type and its length from `accessor` and `bufferView` data
const {ArrayType, length} = getAccessorArrayTypeAndLength(accessor, bufferView);
// Number of bytes each component occupies
const bytes = BYTES[accessor.componentType];
// Number of components. For the `TEXCOORD_0` with `VEC2` type, it must return 2
const components = COMPONENTS[accessor.type];
// Multiplier to calculate the address of the `TEXCOORD_0` element in the arrayBuffer
const elementAddressScale = bufferView.byteStride || bytes * components;
// Data transform to Float32Array
const result = new Float32Array(length);
for (let i = 0; i < accessor.count; i++) {
// Take [u, v] couple from the arrayBuffer
const uv = new ArrayType(arrayBuffer, byteOffset + i * elementAddressScale, 2);
// Set and transform Vector3 per https://github.com/KhronosGroup/glTF/tree/main/extensions/2.0/Khronos/KHR_texture_transform#overview
scratchVector.set(uv[0], uv[1], 1);
scratchVector.transformByMatrix3(matrix);
// Save result in Float32Array
result.set([scratchVector[0], scratchVector[1]], i * components);
}
// If texCoord the same, replace gltf structural data
if (originalTexCoord === texCoord) {
updateGltf(accessor, bufferView, gltfData.buffers, result);
} else {
// If texCoord change, create new attribute
createAttribute(texCoord, accessor, primitive, gltfData, result);
}
}
}
}
}
/**
* Update GLTF structural objects with new data as we create new `Float32Array` for `TEXCOORD_0`.
* @param accessor accessor to change
* @param bufferView bufferView to change
* @param buffers binary buffers
* @param newTexcoordArray typed array with data after transformation
*/
function updateGltf(
accessor: Accessor,
bufferView: BufferView,
buffers: {arrayBuffer: ArrayBuffer; byteOffset: number; byteLength: number}[],
newTexCoordArray: Float32Array
): void {
accessor.componentType = 5126;
buffers.push({
arrayBuffer: newTexCoordArray.buffer,
byteOffset: 0,
byteLength: newTexCoordArray.buffer.byteLength
});
bufferView.buffer = buffers.length - 1;
bufferView.byteLength = newTexCoordArray.buffer.byteLength;
bufferView.byteOffset = 0;
delete bufferView.byteStride;
}
/**
*
* @param newTexCoord new `texCoord` value
* @param originalAccessor original accessor object, that store data before transformation
* @param primitive primitive object
* @param gltfData gltf data
* @param newTexCoordArray typed array with data after transformation
* @returns
*/
function createAttribute(
newTexCoord: number,
originalAccessor: Accessor,
primitive: GLTFMeshPrimitive,
gltfData: GLTFWithBuffers,
newTexCoordArray: Float32Array
) {
gltfData.buffers.push({
arrayBuffer: newTexCoordArray.buffer,
byteOffset: 0,
byteLength: newTexCoordArray.buffer.byteLength
});
const bufferViews = gltfData.json.bufferViews;
if (!bufferViews) {
return;
}
bufferViews.push({
buffer: gltfData.buffers.length - 1,
byteLength: newTexCoordArray.buffer.byteLength,
byteOffset: 0
});
const accessors = gltfData.json.accessors;
if (!accessors) {
return;
}
accessors.push({
bufferView: bufferViews?.length - 1,
byteOffset: 0,
componentType: 5126,
count: originalAccessor.count,
type: 'VEC2'
});
primitive.attributes[`TEXCOORD_${newTexCoord}`] = accessors.length - 1;
}
/**
* Construct transformation matrix from the extension data (transition, rotation, scale)
* @param extensionData extension data
* @returns transformation matrix
*/
function makeTransformationMatrix(extensionData: TextureInfo): Matrix3 {
const {offset = [0, 0], rotation = 0, scale = [1, 1]} = extensionData;
const translationMatirx = new Matrix3().set(1, 0, 0, 0, 1, 0, offset[0], offset[1], 1);
const rotationMatirx = scratchRotationMatrix.set(
Math.cos(rotation),
Math.sin(rotation),
0,
-Math.sin(rotation),
Math.cos(rotation),
0,
0,
0,
1
);
const scaleMatrix = scratchScaleMatrix.set(scale[0], 0, 0, 0, scale[1], 0, 0, 0, 1);
return translationMatirx.multiplyRight(rotationMatirx).multiplyRight(scaleMatrix);
} | the_stack |
declare let DecoderModule: any;
declare let TextDecoder: any;
export const TRACE_RENDERING = 0;
let YUV2RGB_TABLE = new Uint32Array(256 * 256 * 256);
function YUV2RGB(y, u, v) {
return YUV2RGB_TABLE[(y << 16) | (u << 8) | v];
}
export function clamp(v, a, b) {
if (v < a) v = a;
if (v > b) v = b;
return v;
}
function computeYUV2RGB(y, u, v) {
let rTmp = y + (1.370705 * (v - 128));
let gTmp = y - (0.698001 * (v - 128)) - (0.337633 * (u - 128));
let bTmp = y + (1.732446 * (u - 128));
let r = clamp(rTmp | 0, 0, 255) | 0;
let g = clamp(gTmp | 0, 0, 255) | 0;
let b = clamp(bTmp | 0, 0, 255) | 0;
return (b << 16) | (g << 8) | (r << 0);
}
function buildYUVTable() {
for (let y = 0; y < 256; y++) {
for (let u = 0; u < 256; u++) {
for (let v = 0; v < 256; v++) {
YUV2RGB_TABLE[(y << 16) | (u << 8) | v] = computeYUV2RGB(y, u, v);
}
}
}
}
buildYUVTable();
export interface FrameImagePlane {
buffer: ArrayBuffer,
depth: number;
width: number;
height: number;
stride: number;
xdec: number;
ydec: number;
}
export interface FrameImage {
hashCode: number,
Y: FrameImagePlane,
U: FrameImagePlane,
V: FrameImagePlane
}
function createImageData(image: FrameImage) {
let w = image.Y.width;
let h = image.Y.height;
let depth = image.Y.depth;
assert(depth == 8);
let YH = new Uint8Array(image.Y.buffer);
let UH = new Uint8Array(image.U.buffer);
let VH = new Uint8Array(image.V.buffer);
let Ys = image.Y.stride;
let Us = image.U.stride;
let Vs = image.V.stride;
let imageData = new ImageData(w, h);
let I = imageData.data;
let p = 0;
let bgr = 0;
let uxdec = image.U.xdec;
let vxdec = image.V.xdec;
let uydec = image.U.ydec;
let vydec = image.V.ydec;
for (let y = 0; y < h; y++) {
let yYs = y * Ys;
let yUs = (y >> uydec) * Us;
let yVs = (y >> vydec) * Vs;
for (let x = 0; x < w; x++) {
let Y = YH[yYs + x];
let U = UH[yUs + (x >> uxdec)];
let V = VH[yVs + (x >> vxdec)];
bgr = YUV2RGB(Y, U, V);
let r = (bgr >> 0) & 0xFF;
let g = (bgr >> 8) & 0xFF;
let b = (bgr >> 16) & 0xFF;
let index = (Math.imul(y, w) + x) << 2;
I[index + 0] = r;
I[index + 1] = g;
I[index + 2] = b;
I[index + 3] = 255;
}
}
return imageData;
}
function makeCanvas(image: FrameImage): HTMLCanvasElement {
var canvas = document.createElement("canvas");
var imageData = createImageData(image);
canvas.width = imageData.width;
canvas.height = imageData.height;
var ctx = canvas.getContext("2d");
ctx.putImageData(imageData, 0, 0);
return canvas;
}
export function makePattern(uri: string, scale: number, ready: (canvas: HTMLCanvasElement) => void) {
let image = new Image();
image.onload = function () {
var canvas = document.createElement("canvas");
canvas.width = image.width * scale;
canvas.height = image.height * scale;
let ctx = canvas.getContext("2d");
ctx.imageSmoothingEnabled = false;
ctx.drawImage(image, 0, 0, image.width, image.height, 0, 0, canvas.width, canvas.height);
ready(canvas);
}
image.src = uri;
}
export function assert(c: any, message: string = "") {
if (!c) {
throw new Error(message);
}
}
export function unreachable() {
throw new Error("Unreachable");
}
export function hashString(s: string) {
let hashValue = 0;
if (s.length === 0) {
return hashValue;
}
for (let i = 0; i < s.length; i++) {
hashValue = ((hashValue << 5) - hashValue) + s.charCodeAt(i);
hashValue |= 0;
}
return hashValue >>> 0;
}
// Use 31 colors, don't use 32 colors since hash(string) % 32 can cause colors
// collisions.
export const COLORS = [
"#126800",
"#3e2dd5",
"#87ba00",
"#305eff",
"#8eda53",
"#37007f",
"#e1c633",
"#0055d0",
"#ffab28",
"#00267a",
"#fc6800",
"#016fc7",
"#6e9000",
"#b2007c",
"#00ae63",
"#d80048",
"#00caed",
"#a31500",
"#02a4e3",
"#ff4553",
"#003d5b",
"#ff6c7e",
"#2a3700",
"#ff95c5",
"#a9d19d",
"#5e0060",
"#8f5600",
"#dcbaed",
"#511500",
"#f3b9a2",
"#5b0022"
];
export const HEAT_COLORS = [];
function generateHeatColors() {
function color(value) {
var h = (1.0 - value) * 240;
return "hsl(" + h + ", 100%, 50%)";
}
for (let i = 0; i < 256; i++) {
HEAT_COLORS.push(color(i / 256));
}
}
generateHeatColors();
export class AccountingSymbol {
constructor(public name: string, public bits: number, public samples: number, public x: number, public y: number) {
// ...
}
}
export type AccountingSymbolMap = { [name: string]: AccountingSymbol };
export class Accounting {
symbols: AccountingSymbol[] = null;
frameSymbols: AccountingSymbolMap = null;
constructor(symbols: AccountingSymbol[] = []) {
this.symbols = symbols;
}
createFrameSymbols() {
if (this.frameSymbols) {
return this.frameSymbols;
}
this.frameSymbols = Object.create(null);
this.frameSymbols = Accounting.flatten(this.symbols);
return this.frameSymbols;
}
countCache: { [filter: string]: { blocks: number[][], total: number, leftover: number } } = {};
countBits(filter: string): { blocks: number[][], total: number } {
if (!filter) {
filter = "__none__";
}
if (this.countCache[filter]) {
return this.countCache[filter];
}
let blocks = [];
let total = 0;
let leftover = 0;
this.symbols.forEach(symbol => {
if (filter !== "__none__" && symbol.name != filter) {
return;
}
let { x, y } = symbol;
if (x < 0 || y < 0) {
leftover += symbol.bits;
return;
}
if (!blocks[y]) {
blocks[y] = [];
}
if (blocks[y][x] === undefined) {
blocks[y][x] = 0;
}
blocks[y][x] += symbol.bits;
total += symbol.bits;
});
return this.countCache[filter] = { blocks: blocks, total, leftover };
}
createBlockSymbols(c: number, r: number) {
return Accounting.flatten(this.symbols.filter(symbol => {
return symbol.x === c && symbol.y === r;
}));
}
static flatten(sybmols: AccountingSymbol[]): AccountingSymbolMap {
let map = Object.create(null);
sybmols.forEach(symbol => {
let s = map[symbol.name];
if (!s) {
s = map[symbol.name] = new AccountingSymbol(symbol.name, 0, 0, symbol.x, symbol.y);
}
s.bits += symbol.bits;
s.samples += symbol.samples;
});
let ret = Object.create(null);
let names = [];
for (let name in map) names.push(name);
// Sort by bits.
names.sort((a, b) => map[b].bits - map[a].bits);
names.forEach(name => {
ret[name] = map[name];
});
return ret;
}
static getSortedSymbolNames(accountings: Accounting[]): string[] {
let set = {};
accountings.forEach(accounting => {
let frameSymbols = accounting.createFrameSymbols();
for (let name in frameSymbols) {
set[name] = undefined;
}
});
let names = Object.keys(set);
names.sort();
return names;
}
}
export class Histogram {
constructor(
public counts: { [id: string]: number },
public names: { [id: string]: number }) {
// ...
}
}
export class AnalyzerFrame {
json: {
frameType: number;
showFrame: number;
baseQIndex: number;
filter_level_y1: number;
filter_level_y2: number;
filter_level_u: number;
filter_level_v: number;
restoration_type_y: number;
restoration_type_u: number;
restoration_type_v: number;
clpfSize: number;
clpfStrengthY: number;
deltaQRes: number;
deltaQPresentFlag: number;
config: {
MI_SIZE: number
};
};
accounting: Accounting;
blockSizeHist: Histogram;
transformSizeHist: Histogram;
transformTypeHist: Histogram;
predictionModeHist: Histogram;
uvPredictionModeHist: Histogram;
skipHist: Histogram;
dualFilterTypeHist: Histogram;
frameImage: FrameImage;
decodeTime: number;
canvasImage: HTMLCanvasElement;
get image() : HTMLCanvasElement {
if (this.canvasImage) {
return this.canvasImage;
}
// Make canvas elements lazily, this speeds up loading.
this.canvasImage = makeCanvas(this.frameImage);
// Free frame image data, we don't need it anymore.
this.frameImage = null;
return this.canvasImage;
}
config: string;
blockSizeLog2Map: [number, number][];
transformSizeLog2Map: [number, number][];
miSizeLog2: number;
miSuperSizeLog2: number;
}
function getAccountingFromJson(json: any, name: string): Accounting {
var accounting = new Accounting();
if (json[name]) {
let names = json[name + "Map"];
let symbols = [];
let x = -1, y = -1;
for (let i = 0; i < json.symbols.length; i++) {
let symbol = json.symbols[i];
if (symbol.length == 2) {
x = symbol[0];
y = symbol[1];
} else {
let name = symbol[0];
let bits = symbol[1];
let samples = symbol[2];
symbols.push(new AccountingSymbol(names[name], bits, samples, x, y));
}
}
accounting.symbols = symbols;
}
return accounting;
}
function getHistogramFromJson(json: any, name: string): Histogram {
if (!json[name]) {
return null;
}
let counts = {};
json[name].forEach(row => {
row.forEach(v => {
if (counts[v] === undefined) {
counts[v] = 0;
}
counts[v]++;
});
});
return new Histogram(counts, json[name + "Map"]);
}
/**
* JSON arrays are RLE encoded. ..., x, [12], ... means that x repeats itself
* an additional 12 times. The RLE marker is a single element array.
*/
function uncompressArray(src: any []) {
let pre;
let dst = [];
let allUint8 = true;
for (let i = 0; i < src.length; i++) {
if (Array.isArray(src[i]) && src[i].length == 1) {
let count = src[i][0];
for (let j = 0; j < count; j++) {
dst.push(pre);
}
pre = undefined;
} else {
pre = src[i];
dst.push(pre);
if (pre !== (pre & 0xFF)) {
allUint8 = false;
}
}
}
if (allUint8) {
return new Uint8Array(dst);
}
return dst;
}
function uncompress(arrays) {
if (!arrays) {
return;
}
for (let i = 0; i < arrays.length; i++) {
arrays[i] = uncompressArray(arrays[i]);
}
}
function readFrameFromJson(json): AnalyzerFrame {
uncompress(json["blockSize"]);
uncompress(json["transformSize"]);
uncompress(json["transformType"]);
uncompress(json["mode"]);
uncompress(json["uv_mode"]);
uncompress(json["skip"]);
uncompress(json["filter"]);
uncompress(json["cdef_level"]);
uncompress(json["cdef_strength"]);
uncompress(json["motionVectors"]);
uncompress(json["referenceFrame"]);
uncompress(json["cfl_alpha_idx"]);
uncompress(json["cfl_alpha_sign"]);
uncompress(json["dualFilterType"]);
uncompress(json["delta_q"]);
uncompress(json["seg_id"]);
let frame = new AnalyzerFrame();
frame.json = json;
frame.accounting = getAccountingFromJson(json, "symbols");
frame.blockSizeHist = getHistogramFromJson(json, "blockSize");
frame.skipHist = getHistogramFromJson(json, "skip");
frame.transformSizeHist = getHistogramFromJson(json, "transformSize");
frame.transformTypeHist = getHistogramFromJson(json, "transformType");
frame.predictionModeHist = getHistogramFromJson(json, "mode");
frame.uvPredictionModeHist = getHistogramFromJson(json, "uv_mode");
frame.dualFilterTypeHist = getHistogramFromJson(json, "dualFilterType");
frame.miSizeLog2 = log2(json.config.MI_SIZE);
frame.miSuperSizeLog2 = log2(64); // TODO: Does this ever change?
frame.blockSizeLog2Map = makeBlockSizeLog2MapByValue(json["blockSizeMap"]);
frame.transformSizeLog2Map = makeTransformSizeLog2MapByValue(json["transformSizeMap"]);
return frame;
}
export function downloadFile(url: string): Promise<Uint8Array> {
return new Promise((resolve, reject) => {
if (url.startsWith(localFileProtocol)) {
let localFile = url.substring(localFileProtocol.length);
let file = localFiles[localFile];
if (file) {
resolve(new Uint8Array(file));
return;
} else {
reject(`Local file "${localFile}" does not exist.`);
return;
}
}
let xhr = new XMLHttpRequest();
let self = this;
xhr.open("GET", url, true);
xhr.responseType = "arraybuffer";
xhr.send();
xhr.addEventListener("progress", (e) => {
let progress = (e.loaded / e.total) * 100;
});
xhr.addEventListener("load", function () {
if (xhr.status != 200) {
reject();
return;
}
resolve(new Uint8Array(this.response));
});
xhr.addEventListener("error", function () {
reject(`Cannot download ${url}`);
});
});
}
export function downloadJson(url: string): Promise<Object> {
return new Promise((resolve, reject) => {
let xhr = new XMLHttpRequest();
let self = this;
xhr.open("GET", url, true);
xhr.responseType = "json";
xhr.send();
xhr.addEventListener("progress", (e) => {
let progress = (e.loaded / e.total) * 100;
});
xhr.addEventListener("load", function () {
if (xhr.status != 200) {
reject();
return;
}
resolve(this.response);
});
});
}
export function loadFramesFromJson(url: string): Promise<AnalyzerFrame[]> {
return new Promise((resolve, reject) => {
downloadJson(url).then((json: any) => {
resolve(json.filter(frame => !!frame).map(frame => {
return readFrameFromJson(frame);
}));
});
});
}
export class Size {
constructor(public w: number, public h: number) {
// ...
}
clone() {
return new Size(this.w, this.h);
}
equals(other: Size) {
return this.w == other.w || this.h == other.h;
}
area(): number {
return this.w * this.h;
}
multiplyScalar(scalar: number) {
if (isFinite(scalar)) {
this.w *= scalar;
this.h *= scalar;
} else {
this.w = 0;
this.h = 0;
}
return this;
}
roundUpToMultipleOfLog2(roundToLog2) {
let roundTo = 1 << roundToLog2;
this.w = (this.w + roundTo - 1) & ~(roundTo - 1);
this.h = (this.h + roundTo - 1) & ~(roundTo - 1);
return this;
}
}
export class Rectangle {
constructor(public x: number, public y: number, public w: number, public h: number) {
// ...
}
static createRectangleCenteredAtPoint(v: Vector, w: number, h: number) {
return new Rectangle(v.x - w / 2, v.y - h / 2, w, h);
}
static createRectangleFromSize(size: Size) {
return new Rectangle(0, 0, size.w, size.h);
}
set(x: number, y: number, w: number, h: number) {
this.x = x;
this.y = y;
this.w = w;
this.h = h;
return this;
}
containsPoint(point: Vector): boolean {
return (point.x >= this.x) &&
(point.x < this.x + this.w) &&
(point.y >= this.y) &&
(point.y < this.y + this.h);
}
getCenter(): Vector {
return new Vector(this.x + this.w / 2, this.y + this.h / 2);
}
clone(): Rectangle {
return new Rectangle(this.x, this.y, this.w, this.h);
}
multiplyScalar(scalar: number) {
this.x *= scalar;
this.y *= scalar;
this.w *= scalar;
this.h *= scalar;
return this;
}
}
export class Vector {
x: number;
y: number;
constructor(x: number, y: number) {
this.x = x;
this.y = y;
}
set(x: number, y: number) {
this.x = x;
this.y = y;
return this;
}
lerp(v: Vector, alpha: number) {
this.x += (v.x - this.x) * alpha;
this.y += (v.y - this.y) * alpha;
return this;
}
clone(): Vector {
return new Vector(this.x, this.y);
}
lengthSq() {
return this.x * this.x + this.y * this.y;
}
length() {
return Math.sqrt(this.x * this.x + this.y * this.y);
}
distanceTo(v: Vector) {
let x = this.x - v.x;
let y = this.y - v.y;
return Math.sqrt(x * x + y * y);
}
normalize() {
return this.divideScalar(this.length());
}
multiplyScalar(scalar) {
if (isFinite(scalar)) {
this.x *= scalar;
this.y *= scalar;
} else {
this.x = 0;
this.y = 0;
}
return this;
}
divide(v) {
this.x /= v.x;
this.y /= v.y;
return this;
}
divideScalar(scalar) {
return this.multiplyScalar(1 / scalar);
}
snap() {
// TODO: Snap to nearest pixel
this.x = this.x | 0;
this.y = this.y | 0;
return this;
}
sub(v: Vector): Vector {
this.x -= v.x;
this.y -= v.y;
return this;
}
add(v: Vector): Vector {
this.x += v.x;
this.y += v.y;
return this;
}
clampLength(min: number, max: number) {
let length = this.length();
this.multiplyScalar(Math.max(min, Math.min(max, length)) / length);
return this;
}
toString(): string {
return this.x + "," + this.y;
}
}
export class GridSize {
constructor(public cols: number, public rows: number) {
// ...
}
}
function getFramesIvf(ivf: Uint8Array): number {
const length = ivf.length;
let i = 32;
let frames = 0;
while (i < length) {
let frame_length = ivf[i] + (ivf[i+1]<<8) + (ivf[i+2]<<16) + (ivf[i+3]<<24);
i += 12 + frame_length;
frames++;
}
return frames;
}
export class Decoder {
worker: Worker;
workerCallbacks = [];
workerInfo: any = {};
decoder: string;
buffer: Uint8Array;
frames: AnalyzerFrame[] = [];
frameRate: number = 30;
totalFrames: number;
/** Whether to read image data after decoding a frame. */
shouldReadImageData: boolean = true;
constructor(nativeModule, worker) {
this.buffer = new Uint8Array(0);
this.worker = worker;
this.initWorker();
}
unload() {
this.worker = null;
this.buffer = null;
this.frames = null;
}
load(url): Promise<any> {
if (url.indexOf("://") < 0) {
url = window.location.origin + '/' + url;
}
return new Promise((resolve, reject) => {
var id = String(Math.random());
this.addWorkerCallback(id, (e) => {
3
if (e.data.payload) {
this.workerInfo = {
buildConfig: e.data.payload.buildConfig
}
resolve(null);
} else {
reject(`Cannot load decoder, check url: ${url}`);
}
});
this.worker.postMessage({
command: "load",
payload: [url],
id
});
});
}
openFileBytes(buffer: Uint8Array) {
this.frameRate = buffer[16] | buffer[17] << 24 | buffer[18] << 16 | buffer[19] << 24;
this.totalFrames = getFramesIvf(buffer);
this.buffer = buffer;
this.worker.postMessage({
command: "openFileBytes",
payload: buffer
});
}
setLayers(layers: number) {
this.worker.postMessage({
command: "setLayers",
payload: layers
});
}
initWorker() {
this.worker.addEventListener("message", (e) => {
if (!e.data.id) {
return;
}
this.workerCallbacks[e.data.id](e);
this.workerCallbacks[e.data.id] = null;
});
}
addWorkerCallback(id: string, fn: (e: any) => void) {
this.workerCallbacks[id] = fn;
}
/**
* Transfer buffers back to the worker thread so they can be reused. This reduces
* memory pressure.
*/
releaseFrameImageBuffers(frameImage: FrameImage) {
this.worker.postMessage({
command: "releaseFrameBuffers",
payload: {
Y: frameImage.Y.buffer,
U: frameImage.U.buffer,
V: frameImage.V.buffer
}
}, [frameImage.Y.buffer, frameImage.U.buffer, frameImage.V.buffer]);
assert(frameImage.Y.buffer.byteLength === 0 &&
frameImage.U.buffer.byteLength === 0 &&
frameImage.V.buffer.byteLength === 0, "Buffers must be transferred.");
}
readFrame(): Promise<AnalyzerFrame[]> {
let worker = this.worker;
let self = this;
let id = String(Math.random());
return new Promise((resolve, reject) => {
this.addWorkerCallback(id, function (e) {
let o = e.data.payload.json as Object[];
if (!o) {
reject();
return;
}
let frames: AnalyzerFrame[] = [];
for (let i = 0; i < o.length - 1; i++) {
let json = o[i];
let frame = readFrameFromJson(json);
frame.config = self.workerInfo.buildConfig;
frames.push(frame);
self.frames && self.frames.push(frame);
}
if (self.shouldReadImageData) {
frames[frames.length - 1].frameImage = e.data.payload.image;
}
frames[frames.length - 1].decodeTime = e.data.payload.decodeTime;
resolve(frames);
});
let shouldReadImageData = self.shouldReadImageData;
worker.postMessage({
command: "readFrame",
id,
shouldReadImageData
});
});
}
static loadDecoder(url: string): Promise<Decoder> {
return new Promise((resolve, reject) => {
let worker = new Worker("dist/analyzer_worker.bundle.js");
let decoder = new Decoder(null, worker);
decoder.load(url).then(() => {
resolve(decoder);
}).catch((x) => {
reject(x);
});
});
}
}
export let localFileProtocol = "local://";
export let localFiles = {};
const blockSizeLog2MapByName = {
BLOCK_2X2: [1, 1],
BLOCK_2X4: [1, 2],
BLOCK_4X2: [2, 1],
BLOCK_4X4: [2, 2],
BLOCK_4X8: [2, 3],
BLOCK_8X4: [3, 2],
BLOCK_8X8: [3, 3],
BLOCK_8X16: [3, 4],
BLOCK_16X8: [4, 3],
BLOCK_16X16: [4, 4],
BLOCK_16X32: [4, 5],
BLOCK_32X16: [5, 4],
BLOCK_32X32: [5, 5],
BLOCK_32X64: [5, 6],
BLOCK_64X32: [6, 5],
BLOCK_64X64: [6, 6],
BLOCK_64X128: [6, 7],
BLOCK_128X64: [7, 6],
BLOCK_128X128: [7, 7],
BLOCK_4X16: [2, 4],
BLOCK_16X4: [4, 2],
BLOCK_8X32: [3, 5],
BLOCK_32X8: [5, 3],
BLOCK_16X64: [4, 6],
BLOCK_64X16: [6, 4],
BLOCK_32X128: [5, 7],
BLOCK_128X32: [7, 5]
};
const transformSizeLog2MapByName = {
TX_2X2: [1, 1],
TX_4X4: [2, 2],
TX_4X8: [2, 3],
TX_4X16: [2, 4],
TX_8X4: [3, 2],
TX_8X8: [3, 3],
TX_8X16: [3, 4],
TX_8X32: [3, 5],
TX_16X4: [4, 2],
TX_16X8: [4, 3],
TX_16X16: [4, 4],
TX_16X32: [4, 5],
TX_32X8: [5, 3],
TX_32X16: [5, 4],
TX_32X32: [5, 5],
TX_32X64: [5, 6],
TX_64X32: [6, 5],
TX_64X64: [6, 6],
TX_16X64: [4, 6],
TX_64X16: [6, 4]
}
export function padLeft(v, n) {
let str = String(v);
while (str.length < n) str = " " + str;
return str;
}
export function log2(n: number): number {
switch (n) {
case 1: return 0;
case 2: return 1;
case 4: return 2;
case 8: return 3;
case 16: return 4;
case 32: return 5;
case 64: return 6;
default:
unreachable();
}
}
export function makeBlockSizeLog2MapByValue(blockSizeMap): [number, number][] {
let byValue = [];
for (let key in blockSizeMap) {
assert(key in blockSizeLog2MapByName, `Key ${key} not found in blockSizeLog2MapByName.`);
byValue[blockSizeMap[key]] = blockSizeLog2MapByName[key];
}
return byValue;
}
export function makeTransformSizeLog2MapByValue(transformSizeMap): [number, number][] {
let byValue = [];
for (let key in transformSizeMap) {
assert(key in transformSizeLog2MapByName, `Key ${key} not found in transformSizeLog2MapByName.`);
byValue[transformSizeMap[key]] = transformSizeLog2MapByName[key];
}
return byValue;
}
export function reverseMap(map: { [name: string]: number }): { [id: number]: string } {
let o = [];
for (let k in map) {
o[map[k]] = k
}
return o;
}
/**
* Hand selected using http://tools.medialab.sciences-po.fr/iwanthue/
*/
export const palette = {
blockSize: {
BLOCK_2X2: "#f4ffc3",
BLOCK_2X4: "#622cd8",
BLOCK_4X2: "#deff76",
BLOCK_4X4: "#ff50ed",
BLOCK_4X8: "#808900",
BLOCK_8X4: "#014bb5",
BLOCK_8X8: "#ffbd35",
BLOCK_8X16: "#6895ff",
BLOCK_16X8: "#e62b00",
BLOCK_16X16: "#02b4e1",
BLOCK_16X32: "#a45a00",
BLOCK_32X16: "#00a781",
BLOCK_32X32: "#ff70a6",
BLOCK_32X64: "#00372a",
BLOCK_64X32: "#ff9556",
BLOCK_64X64: "#7a0032"
},
transformSize: {
TX_2X2: "#f4ffc3",
TX_4X4: "#622cd8",
TX_4X8: "#deff76",
TX_4X16: "#ff50ed",
TX_8X4: "#808900",
TX_8X8: "#014bb5",
TX_8X16: "#ffbd35",
TX_8X32: "#6895ff",
TX_16X4: "#e62b00",
TX_16X8: "#02b4e1",
TX_16X16: "#a45a00",
TX_16X32: "#00a781",
TX_32X8: "#ff70a6",
TX_32X16: "#00372a",
TX_32X32: "#ff9556"
},
seg_id: {
0: "#f4ffc3",
1: "#622cd8",
2: "#deff76",
3: "#ff50ed",
4: "#6895ff",
5: "#014bb5",
6: "#ffbd35",
7: "#682bff",
8: "#e62b00",
},
transformType: {
DCT_DCT: "#f4ffc3",
ADST_DCT: "#622cd8",
DCT_ADST: "#deff76",
ADST_ADST: "#ff50ed",
FLIPADST_DCT: "#808900",
DCT_FLIPADST: "#014bb5",
FLIPADST_FLIPADST: "#ffbd35",
ADST_FLIPADST: "#6895ff",
FLIPADST_ADST: "#e62b00",
IDTX: "#02b4e1",
V_DCT: "#a45a00",
H_DCT: "#00a781",
V_ADST: "#ff70a6",
H_ADST: "#00372a",
V_FLIPADST: "#ff9556",
H_FLIPADST: "#7a0032"
},
skip: {
SKIP: "#6c0039",
NO_SKIP: "#00d041"
},
predictionMode: {
DC_PRED: "#6c0039",
V_PRED: "#00d041",
H_PRED: "#801cd1",
D45_PRED: "#a0ff78",
D135_PRED: "#ff4ff7",
D113_PRED: "#02c45a",
D157_PRED: "#2d64ff",
D203_PRED: "#91b900",
D67_PRED: "#001d80",
SMOOTH_PRED: "#78ff9f",
SMOOTH_V_PRED: "#08ff9f",
SMOOTH_H_PRED: "#f8ff9f",
PAETH_PRED: "#410065",
NEARESTMV: "#8affe8",
NEARMV: "#ee007d",
ZEROMV: "#01ad84",
NEWMV: "#c00045",
NEWFROMNEARMV: "#6beeff",
NEAREST_NEARESTMV: "#af1b00",
NEAREST_NEARMV: "#00468f",
NEAR_NEARESTMV: "#ff5a3b",
NEAR_NEARMV: "#007e7c",
NEAREST_NEWMV: "#ff696f",
NEW_NEARESTMV: "#006a43",
NEAR_NEWMV: "#b79dff",
NEW_NEARMV: "#b17d00",
ZERO_ZEROMV: "#00041a",
NEW_NEWMV: "#ffa574"
},
referenceFrame: {
INTRA_FRAME: "#f4ffc3",
LAST_FRAME: "#622cd8",
LAST2_FRAME: "#deff76",
LAST3_FRAME: "#ff50ed",
GOLDEN_FRAME: "#ff50ed",
BWDREF_FRAME: "#808900",
ALTREF_FRAME: "#014bb5"
},
dualFilterType: {
REG_REG: "#c95f3f",
REG_SMOOTH: "#4eb7a0",
REG_SHARP: "#b459c0",
SMOOTH_REG: "#77b84b",
SMOOTH_SMOOTH: "#d0406d",
SMOOTH_SHARP: "#627e3b",
SHARP_REG: "#6f7dcb",
SHARP_SMOOTH: "#c29743",
SHARP_SHARP: "#c06d93"
}
}
export function getColor(name: string, palette = undefined): string {
if (name === undefined) {
console.warn("Undefined name in getColor(), make sure ENUMs are exported correctly.");
return "#FF0000";
}
return (palette && palette[name]) || COLORS[hashString(name) % COLORS.length];
} | the_stack |
import React, {Component} from 'react';
import {View, StyleSheet, FlatList, ActivityIndicator, Text, ImageBackground} from 'react-native';
import {connect, DispatchProp} from 'react-redux';
import {NavigationInjectedProps, withNavigation} from 'react-navigation';
import MediaQuery from 'react-responsive';
import Message from './Message';
import {RootState} from '../../reducers';
import {addMessageToChat} from '../../slices/messages-slice';
import {getMessagesByChatId, getRepliesByThreadId} from '../../slices/messages-thunks';
import {markChatAsRead, getChatInfo, goToChannelDetails} from '../../slices/chats-thunks';
import Header from '../../components/Header';
import withTheme, {ThemeInjectedProps} from '../../contexts/theme/withTheme';
import InputToolbar from './InputToolbar';
import {getMember, goToUserProfile} from '../../slices/members-thunks';
import isLandscape from '../../utils/stylesheet/isLandscape';
import {meSelector, currentTeamTokenSelector} from '../../slices/teams-slice';
import px from '../../utils/normalizePixel';
import Touchable from '../../components/Touchable';
import ChannelMembersCount from './ChannelMembersCount';
import DirectPresense from './DirectPresense';
import Screen from '../../components/Screen';
import Typing from './Typing';
import {setCurrentChat, setCurrentThread} from '../../slices/chats-slice';
import select from '../../utils/select';
import UploadDropZoneWeb from './UploadDropZoneWeb';
import ChannelDetails from '../ChannelDetails';
// @ts-ignore
import ChannelDetailsIcon from '../../assets/icons/dock-right.svg';
import {openUploadDialog} from '../../slices/files-slice';
import {Platform} from '../../utils/platform';
export type ChatType = 'direct' | 'channel' | 'thread';
type Props = ReturnType<typeof mapStateToProps> &
NavigationInjectedProps &
DispatchProp<any> &
ThemeInjectedProps & {
chatId: string;
threadId: string;
chatType: ChatType;
};
class ChatUI extends Component<Props> {
_flatlistRef: any;
_scrollNode: any;
state = {
isChannelDetailsOpen: true,
};
async componentDidMount() {
let {chatType, chatId, dispatch} = this.props;
if (chatType === 'channel' || chatType === 'direct') {
this.getMessage();
dispatch(getChatInfo(chatId));
}
if (chatType === 'thread') {
this.getReplies();
}
this.isInverted() && this.registerScrollHandlder();
}
componentDidUpdate(prevProps: Props) {
let unreadCountIncreased = () => {
let {currentChat, chatType} = this.props;
let {currentChat: prevChat} = prevProps;
if (currentChat && prevChat) {
if (
chatType === 'channel' &&
currentChat.unread_count &&
currentChat.unread_count > prevChat.unread_count
)
return true;
if (
chatType === 'direct' &&
currentChat.dm_count &&
currentChat.dm_count > prevChat.dm_count
)
return true;
}
return false;
};
if (unreadCountIncreased()) {
this.markChatAsRead();
}
let chatChanged = this.props.chatId !== prevProps.chatId;
if (chatChanged) {
this.componentDidMount();
}
}
componentWillUnmount() {
let {chatType, dispatch} = this.props;
if (chatType === 'channel' || chatType === 'direct') {
dispatch(setCurrentChat({chatId: ''}));
}
if (chatType === 'thread') {
dispatch(setCurrentThread({threadId: ''}));
}
}
// Fixes inverted scroll on web
registerScrollHandlder() {
if (Platform.isNative) return;
this._scrollNode = this._flatlistRef.getScrollableNode();
this._scrollNode.addEventListener('wheel', this._invertedWheelEvent);
// enable hardware acceleration
// makes scrolling fast in safari and firefox
// https://stackoverflow.com/a/24157294
this._flatlistRef.setNativeProps({
style: {
transform: 'translate3d(0,0,0) scaleY(-1)',
},
});
}
_invertedWheelEvent = (e: React.WheelEvent) => {
this._scrollNode.scrollTop -= e.deltaY;
e.preventDefault();
};
async getMessage() {
let {lastMessageStatus, lastMessage, nextCursor, messagesList, dispatch, chatId} = this.props;
if (lastMessageStatus && lastMessageStatus.messageId && !lastMessageStatus.loading) {
dispatch(addMessageToChat({messageId: lastMessageStatus.messageId, chatId}));
dispatch(getMember(lastMessage.user));
}
if (nextCursor && nextCursor[chatId] !== 'end' && messagesList.length <= 1) {
await dispatch(getMessagesByChatId(chatId));
}
this.markChatAsRead();
}
getReplies() {
let {dispatch, threadId, chatId} = this.props;
dispatch(addMessageToChat({messageId: threadId as string, threadId: threadId as string}));
dispatch(getRepliesByThreadId(threadId, chatId));
}
getOlderMessages = () => {
let {chatType, chatId, threadId} = this.props;
if (chatType === 'channel' || chatType === 'direct') {
this.props.dispatch(getMessagesByChatId(chatId));
}
if (chatType === 'thread') {
this.props.dispatch(getRepliesByThreadId(threadId, chatId));
}
};
markChatAsRead = () => {
let {dispatch, chatId} = this.props;
dispatch(markChatAsRead(chatId, this.props.messagesList[0] as string));
};
openChatDetails = () => {
let {chatId, chatType, currentUser, navigation, dispatch} = this.props;
if (chatType === 'channel') dispatch(goToChannelDetails(chatId));
if (chatType === 'direct') dispatch(goToUserProfile(currentUser.id, navigation));
};
isInverted = () => this.props.chatType !== 'thread';
toggleChannelDetailsPanel = () => {
// if (Platform.isElectron) {
// ipcRenderer.send('resize-main-window', {width: 1280});
// }
this.setState({isChannelDetailsOpen: !this.state.isChannelDetailsOpen});
};
handleFileDropWeb = (files: File[]) => {
let {chatId, threadId} = this.props;
this.props.dispatch(openUploadDialog({params: {files, chatId, threadId}}));
};
renderMessageCell = ({item: messageId, index}: {item: string; index: number}) => {
let {chatType} = this.props;
let prevMessageId = this.isInverted()
? this.props.messagesList[index + 1]
: this.props.messagesList[index - 1];
let nextMessageId = this.isInverted()
? this.props.messagesList[index - 1]
: this.props.messagesList[index + 1];
let isThreadMainMsg = chatType === 'thread' && index === 0;
return (
<Message
messageId={messageId}
prevMessageId={prevMessageId}
nextMessageId={nextMessageId}
inverted={chatType !== 'thread'}
showDivider={isThreadMainMsg}
hideReplies={chatType === 'thread'}
hideAvatar={chatType === 'direct'}
/>
);
};
renderLoadingMore = () => {
let {loading, messagesList} = this.props;
if (loading && messagesList.length > 1)
return (
<View style={{width: '100%', alignItems: 'center'}}>
<ActivityIndicator size="small" color="#fff" />
</View>
);
return null;
};
keyExtractor = (messageId: string) => messageId.toString();
renderList() {
let {messagesList} = this.props;
const inverted = this.isInverted();
return (
<FlatList
ref={(ref) => (this._flatlistRef = ref)}
data={messagesList}
renderItem={this.renderMessageCell}
bounces={false}
initialNumToRender={6}
inverted={inverted}
keyExtractor={this.keyExtractor}
onEndReachedThreshold={0.5}
onEndReached={this.getOlderMessages}
ListFooterComponent={this.renderLoadingMore}
contentContainerStyle={{
paddingTop: inverted ? 0 : px(10),
paddingBottom: inverted ? px(10) : 0,
}}
/>
);
}
renderInputToolbar() {
return <InputToolbar chatId={this.props.chatId} threadId={this.props.threadId} />;
}
renderPresense() {
let {chatType, chatId, typingUsersCount} = this.props;
if (chatType === 'direct' && typingUsersCount === 0) {
return <DirectPresense chatId={chatId} />;
}
return null;
}
renderMembersCount() {
let {chatType, chatId, typingUsersCount, currentChat} = this.props;
if (chatType === 'channel' && typingUsersCount === 0 && !currentChat.is_private) {
return <ChannelMembersCount chatId={chatId} />;
}
return null;
}
renderChatName() {
let {chatType, currentChat, currentUser} = this.props;
let chatName = select(chatType, {
channel: `#${currentChat?.name_normalized}`,
direct:
currentUser?.profile.display_name_normalized || currentUser?.profile.real_name_normalized,
thread: 'Thread',
});
return <Text style={styles.chatName}>{chatName}</Text>;
}
renderTyping() {
return <Typing chatId={this.props.chatId} />;
}
renderIsPrivate() {
return <Text style={styles.chat_status}>private</Text>;
}
renderHeader() {
let {chatId, chatType, typingUsersCount, currentChat} = this.props;
let center = (
<Touchable onPress={this.openChatDetails} style={{alignItems: 'center'}}>
{this.renderChatName()}
{typingUsersCount > 0
? this.renderTyping()
: chatType === 'direct'
? this.renderPresense()
: chatType === 'channel'
? currentChat.is_private
? this.renderIsPrivate()
: this.renderMembersCount()
: null}
</Touchable>
);
let right = chatType === 'channel' && isLandscape() && (
<Touchable onPress={this.toggleChannelDetailsPanel} style={{marginRight: px(5)}}>
<ChannelDetailsIcon
fill={this.state.isChannelDetailsOpen ? '#fff' : '#D3ABD0'}
width={px(21)}
height={px(21)}
/>
</Touchable>
);
return <Header center={center} left={isLandscape() ? undefined : 'back'} right={right} />;
}
renderChannelDetails() {
let {chatId, chatType} = this.props;
let {isChannelDetailsOpen} = this.state;
return (
chatType === 'channel' &&
isChannelDetailsOpen && (
<MediaQuery minWidth={1280}>
<View style={{width: px(325)}}>
<ChannelDetails chatId={chatId} onDismiss={this.toggleChannelDetailsPanel} />
</View>
</MediaQuery>
)
);
}
render() {
let {currentChat, theme} = this.props;
if (!currentChat) return null;
return (
<Screen>
<ImageBackground
style={[styles.container, {backgroundColor: theme.backgroundColorDarker1}]}
resizeMode="repeat"
// TODO: change background image feature
// source={theme.isDark ? undefined : require('../../assets/img/wa1.png')}
>
<UploadDropZoneWeb onDrop={this.handleFileDropWeb}>
{this.renderHeader()}
{this.renderList()}
{this.renderInputToolbar()}
</UploadDropZoneWeb>
{this.renderChannelDetails()}
</ImageBackground>
</Screen>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'row',
},
chatName: {
color: '#fff',
fontSize: px(15.5),
fontWeight: 'bold',
},
chat_status: {
color: '#fff',
marginTop: px(2.5),
fontSize: px(13.5),
},
});
let defaultList: any = [];
const mapStateToProps = (state: RootState, ownProps: any) => {
let chatId = ownProps.chatId ?? ownProps.navigation?.getParam('chatId');
let threadId = ownProps.threadId ?? ownProps.navigation?.getParam('threadId');
let chatType = ownProps.chatType ?? ownProps.navigation?.getParam('chatType');
let currentChat = state.entities.chats.byId[chatId];
let currentUser = currentChat && state.entities.users.byId[currentChat.user_id];
let me = meSelector(state);
let messagesList = state.messages.list[threadId || chatId] || defaultList;
let lastMessageStatus = state.chats.lastMessages[chatId];
return {
chatId,
threadId,
currentChat,
currentUser,
messagesList,
nextCursor: state.messages.nextCursor[chatId],
loading: state.messages.loading[chatId],
chatType: (chatType ? chatType : currentChat?.is_im ? 'direct' : 'channel') as ChatType,
lastMessageStatus,
lastMessage:
state.entities.messages.byId[
(lastMessageStatus && lastMessageStatus.messageId && lastMessageStatus.messageId) || ''
],
me,
currentTeamToken: currentTeamTokenSelector(state),
typingUsersCount: state.chats.typingsUsers[chatId]?.length ?? 0,
};
};
export default connect(mapStateToProps)(withTheme(withNavigation(ChatUI))); | the_stack |
import { execSync } from "child_process";
import os from "os";
import path from "path";
import logger from "@docusaurus/logger";
import fs from "fs-extra";
import { kebabCase } from "lodash";
import prompts from "prompts";
import shell from "shelljs";
import supportsColor from "supports-color";
const RecommendedTemplate = "openapi";
function hasYarn() {
try {
execSync("yarnpkg --version", { stdio: "ignore" });
return true;
} catch (e) {
return false;
}
}
function isValidGitRepoUrl(gitRepoUrl: string) {
return ["https://", "git@"].some((item) => gitRepoUrl.startsWith(item));
}
async function updatePkg(pkgPath: string, obj: Record<string, unknown>) {
const content = await fs.readFile(pkgPath, "utf-8");
const pkg = JSON.parse(content);
const newPkg = Object.assign(pkg, obj);
await fs.outputFile(pkgPath, `${JSON.stringify(newPkg, null, 2)}\n`);
}
function getTemplateInstallPackage(
template: string,
originalDirectory: string
) {
let templateToInstall = "docusaurus-template";
if (template) {
const match = template.match(/^file:(.*)?$/);
if (match) {
templateToInstall = `file:${path.resolve(originalDirectory, match[1])}`;
} else if (
template.includes("://") ||
template.match(/^.+\.(tgz|tar\.gz)$/)
) {
// for tar.gz or alternative paths
templateToInstall = template;
} else {
// Add prefix 'cra-template-' to non-prefixed templates, leaving any
// @scope/ and @version intact.
const packageMatch = template.match(/^(@[^/]+\/)?([^@]+)?(@.+)?$/);
const scope = packageMatch?.[1] || "";
const templateName = packageMatch?.[2] || "";
const version = packageMatch?.[3] || "";
if (
templateName === templateToInstall ||
templateName.startsWith(`${templateToInstall}-`)
) {
// Covers:
// - cra-template
// - @SCOPE/cra-template
// - cra-template-NAME
// - @SCOPE/cra-template-NAME
templateToInstall = `${scope}${templateName}${version}`;
} else if (version && !scope && !templateName) {
// Covers using @SCOPE only
templateToInstall = `${version}/${templateToInstall}`;
} else {
// Covers templates without the `cra-template` prefix:
// - NAME
// - @SCOPE/NAME
templateToInstall = `${scope}${templateToInstall}-${templateName}${version}`;
}
}
}
return templateToInstall;
}
// Extract package name from tarball url or path.
function getPackageInfo(installPackage: string) {
const match = installPackage.match(/^file:(.*)?$/);
if (match) {
const installPackagePath = match[1];
const { name, version } = require(path.join(
installPackagePath,
"package.json"
));
return { name, version };
}
return { name: installPackage };
}
function createPackageJson(appPath: string, templateName: string) {
const appPackage = require(path.join(appPath, "package.json"));
const templatePath = path.dirname(
require.resolve(`${templateName}/package.json`, { paths: [appPath] })
);
const templateJsonPath = path.join(templatePath, "template.json");
let templateJson: any = {};
if (fs.existsSync(templateJsonPath)) {
templateJson = require(templateJsonPath);
}
const templatePackage = templateJson.package || {};
// Keys to ignore in templatePackage
const templatePackageIgnorelist = [
"name",
"version",
"description",
"keywords",
"bugs",
"license",
"author",
"contributors",
"files",
"browser",
"bin",
"man",
"directories",
"repository",
"peerDependencies",
"bundledDependencies",
"optionalDependencies",
"engineStrict",
"os",
"cpu",
"preferGlobal",
"private",
"publishConfig",
];
// Keys from templatePackage that will be merged with appPackage
const templatePackageToMerge = ["dependencies"]; // "dependencies", "scripts"
// Keys from templatePackage that will be added to appPackage,
// replacing any existing entries.
const templatePackageToReplace = Object.keys(templatePackage).filter(
(key) => {
return (
!templatePackageIgnorelist.includes(key) &&
!templatePackageToMerge.includes(key)
);
}
);
// Copy over some of the devDependencies
appPackage.dependencies = appPackage.dependencies || {};
const templateDependencies = templatePackage.dependencies || {};
appPackage.dependencies = {
...appPackage.dependencies,
...templateDependencies,
};
// Add templatePackage keys/values to appPackage, replacing existing entries
templatePackageToReplace.forEach((key) => {
appPackage[key] = templatePackage[key];
});
fs.writeFileSync(
path.join(appPath, "package.json"),
JSON.stringify(appPackage, null, 2) + os.EOL
);
}
export default async function init(
rootDir: string,
siteName?: string,
reqTemplate?: string,
cliOptions: Partial<{
useNpm: boolean;
skipInstall: boolean;
typescript: boolean;
}> = {}
): Promise<void> {
const useYarn = cliOptions.useNpm ? false : hasYarn();
let name = siteName;
// Prompt if siteName is not passed from CLI.
if (!name) {
const prompt = await prompts({
type: "text",
name: "name",
message: "What should we name this site?",
initial: "website",
});
name = prompt.name;
}
if (!name) {
logger.error("A website name is required.");
process.exit(1);
}
const dest = path.resolve(rootDir, name);
if (fs.existsSync(dest)) {
logger.error`Directory already exists at path=${dest}!`;
process.exit(1);
}
let template = reqTemplate ?? RecommendedTemplate;
logger.info("Creating new Docusaurus project...");
if (isValidGitRepoUrl(template)) {
logger.info`Cloning Git template path=${template}...`;
if (
shell.exec(`git clone --recursive ${template} ${dest}`, { silent: true })
.code !== 0
) {
logger.error`Cloning Git template name=${template} failed!`;
process.exit(1);
}
} else if (fs.existsSync(path.resolve(process.cwd(), template))) {
const templateDir = path.resolve(process.cwd(), template);
try {
await fs.copy(templateDir, dest);
} catch (err) {
logger.error`Copying local template path=${templateDir} failed!`;
throw err;
}
} else {
const appName = path.basename(dest);
fs.ensureDirSync(name);
const packageJson = {
name: appName,
version: "0.1.0",
private: true,
};
fs.writeFileSync(
path.join(dest, "package.json"),
JSON.stringify(packageJson, null, 2) + os.EOL
);
const originalDirectory = process.cwd();
const templatePackageName = getTemplateInstallPackage(
template,
originalDirectory
);
const templateInfo = getPackageInfo(templatePackageName);
const templateName = templateInfo.name;
shell.exec(
`cd "${name}" && ${
useYarn ? "yarn add" : "npm install --color always"
} ${templatePackageName}`,
{
env: {
...process.env,
// Force coloring the output, since the command is invoked by shelljs, which is not the interactive shell
...(supportsColor.stdout ? { FORCE_COLOR: "1" } : {}),
},
}
);
const templatePath = path.dirname(
require.resolve(`${templateName}/package.json`, { paths: [dest] })
);
createPackageJson(dest, templateName);
const templateDir = path.join(templatePath, "template");
if (fs.existsSync(templateDir)) {
fs.copySync(templateDir, dest);
} else {
logger.error("Could not locate supplied template.");
process.exit(1);
}
shell.exec(
`cd "${name}" && ${
useYarn ? "yarn remove" : "npm uninstall --color always"
} ${templateName}`,
{
env: {
...process.env,
// Force coloring the output, since the command is invoked by shelljs, which is not the interactive shell
...(supportsColor.stdout ? { FORCE_COLOR: "1" } : {}),
},
}
);
}
// Update package.json info.
try {
await updatePkg(path.join(dest, "package.json"), {
name: kebabCase(name),
version: "0.0.0",
private: true,
});
} catch (err) {
logger.error("Failed to update package.json.");
throw err;
}
// We need to rename the gitignore file to .gitignore
if (
!fs.pathExistsSync(path.join(dest, ".gitignore")) &&
fs.pathExistsSync(path.join(dest, "gitignore"))
) {
await fs.move(path.join(dest, "gitignore"), path.join(dest, ".gitignore"));
}
if (fs.pathExistsSync(path.join(dest, "gitignore"))) {
fs.removeSync(path.join(dest, "gitignore"));
}
const pkgManager = useYarn ? "yarn" : "npm";
// Display the most elegant way to cd.
const cdpath =
path.join(process.cwd(), name) === dest
? name
: path.relative(process.cwd(), name);
if (!cliOptions.skipInstall) {
logger.info`Installing dependencies with name=${pkgManager}...`;
if (
shell.exec(
`cd "${name}" && ${useYarn ? "yarn" : "npm install --color always"}`,
{
env: {
...process.env,
// Force coloring the output, since the command is invoked by shelljs, which is not the interactive shell
...(supportsColor.stdout ? { FORCE_COLOR: "1" } : {}),
},
}
).code !== 0
) {
logger.error("Dependency installation failed.");
logger.info`The site directory has already been created, and you can retry by typing:
code=${`cd ${cdpath}`}
code=${`${pkgManager} install`}`;
process.exit(0);
}
}
logger.success`Created path=${cdpath}.`;
logger.info`Inside that directory, you can run several commands:
code=${`${pkgManager} start`}
Starts the development server.
code=${`${pkgManager} ${useYarn ? "" : "run "}build`}
Bundles your website into static files for production.
code=${`${pkgManager} ${useYarn ? "" : "run "}serve`}
Serves the built website locally.
code=${`${pkgManager} deploy`}
Publishes the website to GitHub pages.
We recommend that you begin by typing:
code=${`cd ${cdpath}`}
code=${`${pkgManager} start`}
Happy building awesome websites!
`;
} | the_stack |
'use strict';
import * as React from 'react';
import { connect } from 'react-redux';
import { ORIGIN } from 'chord/music/common/origin';
import { IEpisode } from 'chord/sound/api/episode';
import { IPodcast } from 'chord/sound/api/podcast';
import { IRadio } from 'chord/sound/api/radio';
import { makeListKey } from 'chord/platform/utils/common/keys';
import { getUserProfileCount } from 'chord/workbench/api/utils/statistic';
import { ESize } from 'chord/music/common/size';
import { IStateGlobal } from 'chord/workbench/api/common/state/stateGlobal';
import EpisodeItemView from 'chord/workbench/parts/common/component/episodeItem';
import PodcastItemView from 'chord/workbench/parts/common/component/podcastItem';
import RadioItemView from 'chord/workbench/parts/common/component/radioItem';
import { NavMenu } from 'chord/workbench/parts/common/component/navMenu';
import PaginationView from 'chord/workbench/parts/common/component/pagination';
import { MenuButton } from 'chord/workbench/parts/common/component/buttons';
import { showRadioMenu } from 'chord/workbench/parts/menu/browser/action/menu';
import { soundApi } from 'chord/sound/core/api';
import { DEFAULT_ORDER, ORDERS, OFFSETS } from 'chord/sound/common/params';
const NAV_MENU_ITEMS = [
{ id: 'overview', name: 'OVERVIEW' },
{ id: 'episodes', name: 'EPISODES' },
{ id: 'podcasts', name: 'PODCASTS' },
{ id: 'favoritePodcasts', name: 'FAVORITE PODCASTS' },
{ id: 'followings', name: 'FOLLOWINGS' },
];
const INIT_ENTRY = {
[ORIGIN.ximalaya]: (type) => ({
...OFFSETS[ORIGIN.ximalaya],
total: null,
order: DEFAULT_ORDER[ORIGIN.ximalaya].radio[type],
keyword: '',
items: [],
}),
};
const INIT_STATES = {
[ORIGIN.ximalaya]: {
init: false,
view: 'overview',
episodesEntry: INIT_ENTRY[ORIGIN.ximalaya]('episodes'),
podcastsEntry: INIT_ENTRY[ORIGIN.ximalaya]('podcasts'),
favoritePodcastsEntry: INIT_ENTRY[ORIGIN.ximalaya]('favoritePodcasts'),
followingsEntry: INIT_ENTRY[ORIGIN.ximalaya]('followings'),
}
};
interface IRadioViewProps {
// 'overview' | 'episodes' | 'podcasts' | 'favoritePodcasts' | 'followings'
view: string;
radio: IRadio;
showRadioMenu: (e: React.MouseEvent<HTMLDivElement>, radio: IRadio) => void;
}
interface IEntry<T> {
offset: number;
limit: number;
total: number;
order: string;
keyword: string;
items: Array<T>;
}
interface IRadioViewState {
init: boolean;
// 'overview' | 'episodes' | 'podcasts' | 'favoritePodcasts' | 'followings'
view: string;
radioId: string;
episodesEntry: IEntry<IEpisode>;
podcastsEntry: IEntry<IPodcast>;
favoritePodcastsEntry: IEntry<IPodcast>;
followingsEntry: IEntry<IRadio>;
}
class RadioView extends React.Component<IRadioViewProps, IRadioViewState> {
constructor(props: IRadioViewProps) {
super(props);
let { origin, radioId } = this.props.radio;
this.state = { ...INIT_STATES[origin], radioId };
this.init = this.init.bind(this);
this.changeRadioNavMenuView = this.changeRadioNavMenuView.bind(this);
this.getEpisodes = this.getEpisodes.bind(this);
this.getPodcasts = this.getPodcasts.bind(this);
this.getFavoritePodcasts = this.getFavoritePodcasts.bind(this);
this.getRadioHeader = this.getRadioHeader.bind(this);
this.getEpisodeItemsView = this.getEpisodeItemsView.bind(this);
this.getPodcastItemsView = this.getPodcastItemsView.bind(this);
this.getFavoritePodcastItemsView = this.getFavoritePodcastItemsView.bind(this);
this.getFollowingItemsView = this.getFollowingItemsView.bind(this);
this._itemsView = this._itemsView.bind(this);
this.changePage = this.changePage.bind(this);
this.getPaginationView = this.getPaginationView.bind(this);
this.getOrderView = this.getOrderView.bind(this);
this.overviewView = this.overviewView.bind(this);
this.episodesView = this.episodesView.bind(this);
this.podcastsView = this.podcastsView.bind(this);
this.favoritePodcastsView = this.favoritePodcastsView.bind(this);
this.followingsView = this.followingsView.bind(this);
}
init() {
let { radioId } = this.props.radio;
let { episodesEntry, podcastsEntry, favoritePodcastsEntry, followingsEntry } = this.state;
Promise.all([
soundApi.radioEpisodeCount(radioId, { keyword: '' }),
soundApi.radioPodcastCount(radioId, { keyword: '' }),
soundApi.radioFavoritePodcastCount(radioId, { keyword: '' }),
soundApi.radioFollowingCount(radioId),
this.getEpisodes(0, episodesEntry.limit, episodesEntry.order, ''),
this.getPodcasts(0, podcastsEntry.limit, podcastsEntry.order, ''),
this.getFavoritePodcasts(0, favoritePodcastsEntry.limit, favoritePodcastsEntry.order, ''),
this.getFollowings(0, followingsEntry.limit, followingsEntry.order, ''),
]).then(
([episodeCount, podcastCount, favoritePodcastCount, followingCount,
episodes, podcasts, favoritePodcasts, followings]) => {
this.setState((prevState) => {
let episodesEntry = { ...prevState.episodesEntry, total: episodeCount, items: episodes };
let podcastsEntry = { ...prevState.podcastsEntry, total: podcastCount, items: podcasts };
let favoritePodcastsEntry = { ...prevState.favoritePodcastsEntry, total: favoritePodcastCount, items: favoritePodcasts };
let followingsEntry = { ...prevState.followingsEntry, total: followingCount, items: followings };
return {
...prevState,
init: true,
episodesEntry,
podcastsEntry,
favoritePodcastsEntry,
followingsEntry,
};
});
}
);
}
componentDidMount() {
this.init();
// Scroll to document top
window.scroll(0, 0);
}
static getDerivedStateFromProps(nextProps: IRadioViewProps, prevState: IRadioViewState) {
if (nextProps.radio.radioId != prevState.radioId) {
let { origin, radioId } = nextProps.radio;
let state = { ...INIT_STATES[origin], radioId };
return state;
}
return null;
}
getSnapshotBeforeUpdate() {
if (!this.state.init) {
this.init();
}
return null;
}
componentDidUpdate() { }
changeRadioNavMenuView(view: string) {
let { episodesEntry, podcastsEntry, favoritePodcastsEntry, followingsEntry } = this.state;
switch (view) {
case 'episodes':
this.changePage('episodes', 1, episodesEntry.limit, episodesEntry.order, '');
break;
case 'podcasts':
this.changePage('podcasts', 1, podcastsEntry.limit, podcastsEntry.order, '');
break;
case 'favoritePodcasts':
this.changePage('favoritePodcasts', 1, favoritePodcastsEntry.limit, favoritePodcastsEntry.order, '');
break;
case 'followings':
this.changePage('followings', 1, followingsEntry.limit, followingsEntry.order, '');
break;
default:
break;
}
this.setState({ view });
}
async getEpisodes(offset: number, limit: number, order: string, keyword: string) {
let { radioId } = this.props.radio;
return soundApi.radioEpisodes(radioId, { keyword, order }, offset, limit);
}
async getPodcasts(offset: number, limit: number, order: string, keyword: string) {
let { radioId } = this.props.radio;
return soundApi.radioPodcasts(radioId, { keyword, order }, offset, limit);
}
async getFavoritePodcasts(offset: number, limit: number, order: string, keyword: string) {
let { radioId } = this.props.radio;
return soundApi.radioFavoritePodcasts(radioId, { keyword, order }, offset, limit);
}
async getFollowings(offset: number, limit: number, order: string, keyword: string) {
let { radioId } = this.props.radio;
return soundApi.radioFollowings(radioId, offset, limit);
}
/**
* offset begins from 1
*/
changePage(type: string, offset: number, limit: number, order: string, keyword: string) {
switch (type) {
case 'episodes':
this.getEpisodes(offset - 1, limit, order, keyword)
.then(episodes => {
this.setState((prevState) => {
let episodesEntry = { ...prevState.episodesEntry, items: episodes, offset: offset - 1, limit, order, keyword };
return { episodesEntry };
});
});
break;
case 'podcasts':
this.getPodcasts(offset - 1, limit, order, keyword)
.then(podcasts => {
this.setState((prevState) => {
let podcastsEntry = { ...prevState.podcastsEntry, items: podcasts, offset: offset - 1, limit, order, keyword };
return { podcastsEntry };
});
});
break;
case 'favoritePodcasts':
this.getFavoritePodcasts(offset - 1, limit, order, keyword)
.then(podcasts => {
this.setState((prevState) => {
let favoritePodcastsEntry = { ...prevState.favoritePodcastsEntry, items: podcasts, offset: offset - 1, limit, order, keyword };
return { favoritePodcastsEntry };
});
});
break;
case 'followings':
this.getFollowings(offset - 1, limit, order, keyword)
.then(radios => {
this.setState((prevState) => {
let followingsEntry = { ...prevState.followingsEntry, items: radios, offset: offset - 1, limit, order, keyword };
return { followingsEntry };
});
});
break;
default:
break;
}
}
getPaginationView<T>(type: string, entry: IEntry<T>) {
let totalPages = Math.ceil(entry.total / entry.limit);
return <PaginationView
page={entry.offset + 1}
total={totalPages}
size={11}
handleClick={(page) => this.changePage(type, page, entry.limit, entry.order, entry.keyword)} />;
}
getOrderView<T>(type: string, entry: IEntry<T>, orders: any, namespace: string) {
return <NavMenu
namespace={namespace}
thisView={entry.order}
views={orders.map(({ id, name }) => ({ name, value: id }))}
handleClick={(id) => this.changePage(type, 1, entry.limit, id, entry.keyword)} />;
}
getEpisodeItemsView() {
let entry = this.state.episodesEntry;
return entry.items.map(
(episode, index) =>
<EpisodeItemView
key={makeListKey(index, 'radio', 'episode')}
episode={episode}
active={false}
short={false}
thumb={true}
handlePlay={null} />
);
}
getPodcastItemsView() {
let entry = this.state.podcastsEntry;
let podcastsView = entry.items.map(
(podcast, index) => (
<div className='col-xs-6 col-sm-4 col-md-3 col-lg-2 col-xl-2'
key={makeListKey(index, 'radio', 'podcast')}>
<PodcastItemView podcast={podcast} />
</div>
)
);
return podcastsView;
}
getFavoritePodcastItemsView() {
let entry = this.state.favoritePodcastsEntry;
let favoritePodcastsView = entry.items.map(
(podcast, index) => (
<div className='col-xs-6 col-sm-4 col-md-3 col-lg-2 col-xl-2'
key={makeListKey(index, 'radio', 'favorite', 'podcast')}>
<PodcastItemView podcast={podcast} />
</div>
)
);
return favoritePodcastsView;
}
getFollowingItemsView() {
let entry = this.state.followingsEntry;
let followingsView = entry.items.map(
(radio, index) => (
<div className='col-xs-6 col-sm-4 col-md-3 col-lg-2 col-xl-2'
key={makeListKey(index, 'radio', 'followings')}>
<RadioItemView radio={radio} />
</div>
)
);
return followingsView;
}
getRadioHeader() {
// Maybe need to set background image
let radio = this.props.radio;
let cover = radio.radioCoverPath || soundApi.resizeImageUrl(radio.origin, radio.radioCoverUrl, ESize.Middle);
let userProfileCount = getUserProfileCount(this.props.radio);
let navMenuView = <NavMenu
namespace={'podcast-orders'}
thisView={this.state.view}
views={NAV_MENU_ITEMS.map(({ id, name }) => ({ name, value: id }))}
handleClick={(id) => this.changeRadioNavMenuView(id)} />;
return (
<header className='user-header user-info'>
<figure className="avatar user-avatar"
style={{ backgroundImage: `url("${cover}")`, width: '200px', height: '200px', margin: '10px auto' }}></figure>
<h1 className='user-name'>{radio.radioName}</h1>
{/* following, follower and episode count */}
<h1 className='small' style={{ opacity: 0.4 }}>{userProfileCount}</h1>
<div className='header-buttons'>
<MenuButton click={(e) => this.props.showRadioMenu(e, radio)} />
</div>
{navMenuView}
</header>
);
}
overviewView() {
let episodeItemsView = this.getEpisodeItemsView();
let podcastItemsView = this.getPodcastItemsView();
let favoritePodcastItemsView = this.getFavoritePodcastItemsView();
let followingItemsView = this.getFollowingItemsView();
let radio = this.props.radio;
let makeItemsView = (view, title) => (
<section className='artist-podcasts'>
<div className='contentSpacing'>
<h1 className='search-result-title' style={{ textAlign: 'center' }}>{title}</h1>
<div className='container-fluid container-fluid--noSpaceAround'>
<div className='align-row-wrap grid--limit row'>
{view}
</div>
</div>
</div>
</section>
);
return (
<div>
{/* Episodes */}
<section className='container-fluid'>
<div className='row'>
<div className='contentSpacing'>
<div className='col-xs-12 col-lg-9 col-xl-8 description'
dangerouslySetInnerHTML={{ __html: radio.description }}>
</div>
<section className='col-sm-12 col-md-10 col-md-push-1 artist-toptracks'>
<h1 className='search-result-title' style={{ textAlign: 'center' }}>Songs</h1>
<section className='tracklist-container full-width'>
<ol>{episodeItemsView}</ol>
</section>
</section>
</div>
</div>
</section>
{/* Podcasts */}
{makeItemsView(podcastItemsView, 'Podcasts')}
{/* Favorite Podcasts */}
{makeItemsView(favoritePodcastItemsView, 'Favorite Podcasts')}
{/* Followings */}
{makeItemsView(followingItemsView, 'Followings')}
</div>
);
}
episodesView() {
let entry = this.state.episodesEntry;
let episodeItemsView = this.getEpisodeItemsView();
let paginationView = this.getPaginationView('episodes', entry);
let orders = ORDERS[this.props.radio.origin].radio.episodes;
let orderView = this.getOrderView('episodes', entry, orders, 'radio-episodes-orders');
return (
<section className='container-fluid'>
<div className='row'>
<div className='contentSpacing'>
<section className='col-sm-12 col-md-10 col-md-push-1 artist-toptracks'>
{ /* No Show */}
<h1 className='search-result-title' style={{ textAlign: 'center', display: 'none' }}>Episodes</h1>
{orderView}
<section className='tracklist-container full-width'>
<ol>{episodeItemsView}</ol>
</section>
</section>
</div>
</div>
{paginationView}
</section>
);
}
_itemsView(itemsView, orderView, paginationView, title) {
return (
<section className='artist-podcasts'>
<div className='contentSpacing'>
{ /* No Show */}
<h1 className='search-result-title' style={{ textAlign: 'center', display: 'none' }}>{title}</h1>
{orderView}
<div className='container-fluid container-fluid--noSpaceAround'>
<div className='align-row-wrap grid--limit row'>
{itemsView}
</div>
</div>
{paginationView}
</div>
</section>
);
}
podcastsView() {
let entry = this.state.podcastsEntry;
let itemsView = this.getPodcastItemsView();
let paginationView = this.getPaginationView('podcasts', entry);
let orders = ORDERS[this.props.radio.origin].radio.podcasts;
let orderView = this.getOrderView('podcasts', entry, orders, 'radio-podcasts-orders');
return this._itemsView(itemsView, orderView, paginationView, 'ALBUMS');
}
favoritePodcastsView() {
let entry = this.state.favoritePodcastsEntry;
let itemsView = this.getFavoritePodcastItemsView();
let paginationView = this.getPaginationView('favoritePodcasts', entry);
let orders = ORDERS[this.props.radio.origin].radio.favoritePodcasts;
let orderView = this.getOrderView('favoritePodcasts', entry, orders, 'radio-favorite-podcasts-orders');
return this._itemsView(itemsView, orderView, paginationView, 'FAVORITE PLAYLISTS');
}
followingsView() {
let entry = this.state.favoritePodcastsEntry;
let itemsView = this.getFollowingItemsView();
let paginationView = this.getPaginationView('followings', entry);
return this._itemsView(itemsView, null, paginationView, 'FOLLOWINGS');
}
render() {
if (!this.state || !this.state.init) return null;
let radioHeaderView = this.getRadioHeader();
let view = this.state.view;
let contentView;
if (view == 'overview') {
contentView = this.overviewView();
} else if (view == 'episodes') {
contentView = this.episodesView();
} else if (view == 'podcasts') {
contentView = this.podcastsView();
} else if (view == 'favoritePodcasts') {
contentView = this.favoritePodcastsView();
} else if (view == 'followings') {
contentView = this.followingsView();
} else {
return null;
}
return (
<div className='hw-accelerate'>
{radioHeaderView}
{contentView}
</div>
);
}
}
function mapStateToProps(state: IStateGlobal) {
return state.mainView.radioView;
}
function mapDispatchToProps(dispatch) {
return {
showRadioMenu: (e, radio) => dispatch(showRadioMenu(e, radio)),
};
}
export default connect(mapStateToProps, mapDispatchToProps)(RadioView); | the_stack |
module android.graphics.drawable{
import Canvas = android.graphics.Canvas;
import Rect = android.graphics.Rect;
import PixelFormat = android.graphics.PixelFormat;
import WeakReference = java.lang.ref.WeakReference;
import Runnable = java.lang.Runnable;
import StateSet = android.util.StateSet;
import Log = android.util.Log;
import SparseArray = android.util.SparseArray;
import SystemClock = android.os.SystemClock;
/**
* A helper class that contains several {@link Drawable}s and selects which one to use.
*
* You can subclass it to create your own DrawableContainers or directly use one its child classes.
*/
export class DrawableContainer extends Drawable implements Drawable.Callback {
private static DEBUG = Log.DBG_DrawableContainer;
private static TAG = "DrawableContainer";
/**
* To be proper, we should have a getter for dither (and alpha, etc.)
* so that proxy classes like this can save/restore their delegates'
* values, but we don't have getters. Since we do have setters
* (e.g. setDither), which this proxy forwards on, we have to have some
* default/initial setting.
*
* The initial setting for dither is now true, since it almost always seems
* to improve the quality at negligible cost.
*/
static DEFAULT_DITHER = true;
private mDrawableContainerState:DrawableContainer.DrawableContainerState;
private mCurrDrawable:Drawable;
private mAlpha = 0xFF;
private mCurIndex = -1;
mMutated=false;
// Animations.
private mAnimationRunnable:Runnable;
private mEnterAnimationEnd=0;
private mExitAnimationEnd=0;
private mLastDrawable:Drawable;
// overrides from Drawable
draw(canvas:Canvas) {
if (this.mCurrDrawable != null) {
this.mCurrDrawable.draw(canvas);
}
if (this.mLastDrawable != null) {
this.mLastDrawable.draw(canvas);
}
}
private needsMirroring():boolean{
return false && this.isAutoMirrored();
}
getPadding(padding:android.graphics.Rect):boolean {
const r = this.mDrawableContainerState.getConstantPadding();
let result;
if (r != null) {
padding.set(r);
result = (r.left | r.top | r.bottom | r.right) != 0;
} else {
if (this.mCurrDrawable != null) {
result = this.mCurrDrawable.getPadding(padding);
} else {
result = super.getPadding(padding);
}
}
if (this.needsMirroring()) {
const left = padding.left;
const right = padding.right;
padding.left = right;
padding.right = left;
}
return result;
}
setAlpha(alpha:number) {
if (this.mAlpha != alpha) {
this.mAlpha = alpha;
if (this.mCurrDrawable != null) {
if (this.mEnterAnimationEnd == 0) {
this.mCurrDrawable.mutate().setAlpha(alpha);
} else {
this.animate(false);
}
}
}
}
getAlpha():number {
return this.mAlpha;
}
setDither(dither:boolean) {
if (this.mDrawableContainerState.mDither != dither) {
this.mDrawableContainerState.mDither = dither;
if (this.mCurrDrawable != null) {
this.mCurrDrawable.mutate().setDither(this.mDrawableContainerState.mDither);
}
}
}
/**
* Change the global fade duration when a new drawable is entering
* the scene.
* @param ms The amount of time to fade in milliseconds.
*/
setEnterFadeDuration(ms:number) {
this.mDrawableContainerState.mEnterFadeDuration = ms;
}
/**
* Change the global fade duration when a new drawable is leaving
* the scene.
* @param ms The amount of time to fade in milliseconds.
*/
setExitFadeDuration(ms:number) {
this.mDrawableContainerState.mExitFadeDuration = ms;
}
protected onBoundsChange(bounds:android.graphics.Rect):void {
if (this.mLastDrawable != null) {
this.mLastDrawable.setBounds(bounds);
}
if (this.mCurrDrawable != null) {
this.mCurrDrawable.setBounds(bounds);
}
}
isStateful():boolean {
return this.mDrawableContainerState.isStateful();
}
setAutoMirrored(mirrored:boolean) {
this.mDrawableContainerState.mAutoMirrored = mirrored;
if (this.mCurrDrawable != null) {
this.mCurrDrawable.mutate().setAutoMirrored(this.mDrawableContainerState.mAutoMirrored);
}
}
isAutoMirrored():boolean {
return this.mDrawableContainerState.mAutoMirrored;
}
jumpToCurrentState() {
let changed = false;
if (this.mLastDrawable != null) {
this.mLastDrawable.jumpToCurrentState();
this.mLastDrawable = null;
changed = true;
}
if (this.mCurrDrawable != null) {
this.mCurrDrawable.jumpToCurrentState();
this.mCurrDrawable.mutate().setAlpha(this.mAlpha);
}
if (this.mExitAnimationEnd != 0) {
this.mExitAnimationEnd = 0;
changed = true;
}
if (this.mEnterAnimationEnd != 0) {
this.mEnterAnimationEnd = 0;
changed = true;
}
if (changed) {
this.invalidateSelf();
}
}
protected onStateChange(state:Array<number>):boolean {
if (this.mLastDrawable != null) {
return this.mLastDrawable.setState(state);
}
if (this.mCurrDrawable != null) {
return this.mCurrDrawable.setState(state);
}
return false;
}
protected onLevelChange(level:number):boolean {
if (this.mLastDrawable != null) {
return this.mLastDrawable.setLevel(level);
}
if (this.mCurrDrawable != null) {
return this.mCurrDrawable.setLevel(level);
}
return false;
}
getIntrinsicWidth():number {
if (this.mDrawableContainerState.isConstantSize()) {
return this.mDrawableContainerState.getConstantWidth();
}
return this.mCurrDrawable != null ? this.mCurrDrawable.getIntrinsicWidth() : -1;
}
getIntrinsicHeight():number {
if (this.mDrawableContainerState.isConstantSize()) {
return this.mDrawableContainerState.getConstantHeight();
}
return this.mCurrDrawable != null ? this.mCurrDrawable.getIntrinsicHeight() : -1;
}
getMinimumWidth():number {
if (this.mDrawableContainerState.isConstantSize()) {
return this.mDrawableContainerState.getConstantMinimumWidth();
}
return this.mCurrDrawable != null ? this.mCurrDrawable.getMinimumWidth() : 0;
}
getMinimumHeight():number {
if (this.mDrawableContainerState.isConstantSize()) {
return this.mDrawableContainerState.getConstantMinimumHeight();
}
return this.mCurrDrawable != null ? this.mCurrDrawable.getMinimumHeight() : 0;
}
drawableSizeChange(who:android.graphics.drawable.Drawable):void {
let callback = this.getCallback();
if (who == this.mCurrDrawable && callback != null && callback.drawableSizeChange) {
callback.drawableSizeChange(this);
}
}
invalidateDrawable(who:android.graphics.drawable.Drawable):void {
if (who == this.mCurrDrawable && this.getCallback() != null) {
this.getCallback().invalidateDrawable(this);
}
}
scheduleDrawable(who:android.graphics.drawable.Drawable, what:java.lang.Runnable, when:number):void {
if (who == this.mCurrDrawable && this.getCallback() != null) {
this.getCallback().scheduleDrawable(this, what, when);
}
}
unscheduleDrawable(who:android.graphics.drawable.Drawable, what:java.lang.Runnable):void {
if (who == this.mCurrDrawable && this.getCallback() != null) {
this.getCallback().unscheduleDrawable(this, what);
}
}
setVisible(visible:boolean, restart:boolean):boolean {
let changed = super.setVisible(visible, restart);
if (this.mLastDrawable != null) {
this.mLastDrawable.setVisible(visible, restart);
}
if (this.mCurrDrawable != null) {
this.mCurrDrawable.setVisible(visible, restart);
}
return changed;
}
getOpacity():number {
return this.mCurrDrawable == null || !this.mCurrDrawable.isVisible() ? PixelFormat.TRANSPARENT :
this.mDrawableContainerState.getOpacity();
}
selectDrawable(idx:number):boolean {
if (idx == this.mCurIndex) {
return false;
}
const now = SystemClock.uptimeMillis();
if (DrawableContainer.DEBUG) android.util.Log.i(DrawableContainer.TAG, toString() + " from " + this.mCurIndex + " to " + idx
+ ": exit=" + this.mDrawableContainerState.mExitFadeDuration
+ " enter=" + this.mDrawableContainerState.mEnterFadeDuration);
if (this.mDrawableContainerState.mExitFadeDuration > 0) {
if (this.mLastDrawable != null) {
this.mLastDrawable.setVisible(false, false);
}
if (this.mCurrDrawable != null) {
this.mLastDrawable = this.mCurrDrawable;
this.mExitAnimationEnd = now + this.mDrawableContainerState.mExitFadeDuration;
} else {
this.mLastDrawable = null;
this.mExitAnimationEnd = 0;
}
} else if (this.mCurrDrawable != null) {
this.mCurrDrawable.setVisible(false, false);
}
if (idx >= 0 && idx < this.mDrawableContainerState.mNumChildren) {
const d = this.mDrawableContainerState.getChild(idx);
this.mCurrDrawable = d;
this.mCurIndex = idx;
if (d != null) {
//this.mInsets = d.getOpticalInsets();
d.mutate();
if (this.mDrawableContainerState.mEnterFadeDuration > 0) {
this.mEnterAnimationEnd = now + this.mDrawableContainerState.mEnterFadeDuration;
} else {
d.setAlpha(this.mAlpha);
}
d.setVisible(this.isVisible(), true);
d.setDither(this.mDrawableContainerState.mDither);
//d.setColorFilter(this.mColorFilter);
d.setState(this.getState());
d.setLevel(this.getLevel());
d.setBounds(this.getBounds());
//d.setLayoutDirection(this.getLayoutDirection());
d.setAutoMirrored(this.mDrawableContainerState.mAutoMirrored);
} else {
//this.mInsets = Insets.NONE;
}
} else {
this.mCurrDrawable = null;
//this.mInsets = Insets.NONE;
this.mCurIndex = -1;
}
if (this.mEnterAnimationEnd != 0 || this.mExitAnimationEnd != 0) {
if (this.mAnimationRunnable == null) {
let t = this;
this.mAnimationRunnable = {
run() {
t.animate(true);
t.invalidateSelf();
}
};
} else {
this.unscheduleSelf(this.mAnimationRunnable);
}
// Compute first frame and schedule next animation.
this.animate(true);
}
this.invalidateSelf();
return true;
}
animate(schedule:boolean) {
const now = SystemClock.uptimeMillis();
let animating = false;
if (this.mCurrDrawable != null) {
if (this.mEnterAnimationEnd != 0) {
if (this.mEnterAnimationEnd <= now) {
this.mCurrDrawable.mutate().setAlpha(this.mAlpha);
this.mEnterAnimationEnd = 0;
} else {
let animAlpha = ((this.mEnterAnimationEnd-now)*255)
/ this.mDrawableContainerState.mEnterFadeDuration;
if (DrawableContainer.DEBUG) android.util.Log.i(DrawableContainer.TAG, toString() + " cur alpha " + animAlpha);
this.mCurrDrawable.mutate().setAlpha(((255-animAlpha)*this.mAlpha)/255);
animating = true;
}
}
} else {
this.mEnterAnimationEnd = 0;
}
if (this.mLastDrawable != null) {
if (this.mExitAnimationEnd != 0) {
if (this.mExitAnimationEnd <= now) {
this.mLastDrawable.setVisible(false, false);
this.mLastDrawable = null;
this.mExitAnimationEnd = 0;
} else {
let animAlpha = ((this.mExitAnimationEnd-now)*255)
/ this.mDrawableContainerState.mExitFadeDuration;
if (DrawableContainer.DEBUG) android.util.Log.i(DrawableContainer.TAG, toString() + " last alpha " + animAlpha);
this.mLastDrawable.mutate().setAlpha((animAlpha*this.mAlpha)/255);
animating = true;
}
}
} else {
this.mExitAnimationEnd = 0;
}
if (schedule && animating) {
this.scheduleSelf(this.mAnimationRunnable, now + 1000/60);
}
}
getCurrent():Drawable {
return this.mCurrDrawable;
}
getConstantState():Drawable.ConstantState {
if (this.mDrawableContainerState.canConstantState()) {
//this.mDrawableContainerState.mChangingConfigurations = this.getChangingConfigurations();
return this.mDrawableContainerState;
}
return null;
}
mutate():Drawable {
if (!this.mMutated && super.mutate() == this) {
this.mDrawableContainerState.mutate();
this.mMutated = true;
}
return this;
}
setConstantState(state:DrawableContainer.DrawableContainerState) {
this.mDrawableContainerState = state;
}
}
export module DrawableContainer{
/**
* A ConstantState that can contain several {@link Drawable}s.
*
* This class was made public to enable testing, and its visibility may change in a future
* release.
*/
export class DrawableContainerState implements Drawable.ConstantState{
mOwner:DrawableContainer;
private mDrawableFutures:SparseArray<ConstantStateFuture>;
mDrawables:Array<Drawable>;
get mNumChildren():number{
return this.mDrawables.length;
}
mVariablePadding=false;
mPaddingChecked=false;
mConstantPadding:Rect;
mConstantSize=false;
mComputedConstantSize=false;
mConstantWidth=0;
mConstantHeight=0;
mConstantMinimumWidth=0;
mConstantMinimumHeight=0;
mCheckedOpacity=false;
mOpacity=0;
mCheckedStateful=false;
mStateful=false;
mCheckedConstantState=false;
mCanConstantState=false;
mDither = DrawableContainer.DEFAULT_DITHER;
mMutated=false;
mEnterFadeDuration=0;
mExitFadeDuration=0;
mAutoMirrored=false;
constructor(orig:DrawableContainerState, owner:DrawableContainer){
this.mOwner = owner;
//mRes = res;
if (orig != null) {
//mChangingConfigurations = orig.mChangingConfigurations;
//mChildrenChangingConfigurations = orig.mChildrenChangingConfigurations;
this.mCheckedConstantState = true;
this.mCanConstantState = true;
this.mVariablePadding = orig.mVariablePadding;
this.mConstantSize = orig.mConstantSize;
this.mDither = orig.mDither;
this.mMutated = orig.mMutated;
//this.mLayoutDirection = orig.mLayoutDirection;
this.mEnterFadeDuration = orig.mEnterFadeDuration;
this.mExitFadeDuration = orig.mExitFadeDuration;
this.mAutoMirrored = orig.mAutoMirrored;
// Cloning the following values may require creating futures.
this.mConstantPadding = orig.getConstantPadding();
this.mPaddingChecked = true;
this.mConstantWidth = orig.getConstantWidth();
this.mConstantHeight = orig.getConstantHeight();
this.mConstantMinimumWidth = orig.getConstantMinimumWidth();
this.mConstantMinimumHeight = orig.getConstantMinimumHeight();
this.mComputedConstantSize = true;
this.mOpacity = orig.getOpacity();
this.mCheckedOpacity = true;
this.mStateful = orig.isStateful();
this.mCheckedStateful = true;
// Postpone cloning children and futures until we're absolutely
// sure that we're done computing values for the original state.
const origDr = orig.mDrawables;
this.mDrawables = new Array<Drawable>(0);
//this.mNumChildren = orig.mNumChildren;
const origDf = orig.mDrawableFutures;
if (origDf != null) {
this.mDrawableFutures = origDf.clone();
} else {
this.mDrawableFutures = new SparseArray<ConstantStateFuture>(this.mNumChildren);
}
const N = this.mNumChildren;
for (let i = 0; i < N; i++) {
if (origDr[i] != null) {
this.mDrawableFutures.put(i, new ConstantStateFuture(origDr[i]));
}
}
} else {
this.mDrawables = new Array<Drawable>(0);
//this.mNumChildren = 0;
}
}
addChild(dr:Drawable):number {
const pos = this.mNumChildren;
//if (pos >= this.mDrawables.length) {
// this.growArray(pos, pos+10);
//}
dr.setVisible(false, true);
dr.setCallback(this.mOwner);
//this.mDrawables[pos] = dr;
//this.mNumChildren++;
this.mDrawables.push(dr);
//this.mChildrenChangingConfigurations |= dr.getChangingConfigurations();
this.mCheckedStateful = false;
this.mCheckedOpacity = false;
this.mConstantPadding = null;
this.mPaddingChecked = false;
this.mComputedConstantSize = false;
return pos;
}
getCapacity():number {
return this.mDrawables.length;
}
private createAllFutures() {
if (this.mDrawableFutures != null) {
const futureCount = this.mDrawableFutures.size();
for (let keyIndex = 0; keyIndex < futureCount; keyIndex++) {
const index = this.mDrawableFutures.keyAt(keyIndex);
this.mDrawables[index] = this.mDrawableFutures.valueAt(keyIndex).get(this);
}
this.mDrawableFutures = null;
}
}
getChildCount() {
return this.mNumChildren;
}
/*
* @deprecated Use {@link #getChild} instead.
*/
getChildren():Array<Drawable> {
// Create all futures for backwards compatibility.
this.createAllFutures();
return this.mDrawables;
}
getChild(index:number):Drawable {
const result = this.mDrawables[index];
if (result != null) {
return result;
}
// Prepare future drawable if necessary.
if (this.mDrawableFutures != null) {
const keyIndex = this.mDrawableFutures.indexOfKey(index);
if (keyIndex >= 0) {
const prepared = this.mDrawableFutures.valueAt(keyIndex).get(this);
this.mDrawables[index] = prepared;
this.mDrawableFutures.removeAt(keyIndex);
return prepared;
}
}
return null;
}
mutate() {
// No need to call createAllFutures, since future drawables will
// mutate when they are prepared.
const N = this.mNumChildren;
const drawables = this.mDrawables;
for (let i = 0; i < N; i++) {
if (drawables[i] != null) {
drawables[i].mutate();
}
}
this.mMutated = true;
}
/**
* A boolean value indicating whether to use the maximum padding value
* of all frames in the set (false), or to use the padding value of the
* frame being shown (true). Default value is false.
*/
setVariablePadding(variable:boolean) {
this.mVariablePadding = variable;
}
getConstantPadding():Rect {
if (this.mVariablePadding) {
return null;
}
if ((this.mConstantPadding != null) || this.mPaddingChecked) {
return this.mConstantPadding;
}
this.createAllFutures();
let r = null;
const t = new Rect();
const N = this.mNumChildren;
const drawables = this.mDrawables;
for (let i = 0; i < N; i++) {
if (drawables[i].getPadding(t)) {
if (r == null) r = new Rect(0, 0, 0, 0);
if (t.left > r.left) r.left = t.left;
if (t.top > r.top) r.top = t.top;
if (t.right > r.right) r.right = t.right;
if (t.bottom > r.bottom) r.bottom = t.bottom;
}
}
this.mPaddingChecked = true;
return (this.mConstantPadding = r);
}
setConstantSize(constant:boolean) {
this.mConstantSize = constant;
}
isConstantSize():boolean {
return this.mConstantSize;
}
getConstantWidth():number {
if (!this.mComputedConstantSize) {
this.computeConstantSize();
}
return this.mConstantWidth;
}
getConstantHeight():number {
if (!this.mComputedConstantSize) {
this.computeConstantSize();
}
return this.mConstantHeight;
}
getConstantMinimumWidth():number {
if (!this.mComputedConstantSize) {
this.computeConstantSize();
}
return this.mConstantMinimumWidth;
}
getConstantMinimumHeight():number {
if (!this.mComputedConstantSize) {
this.computeConstantSize();
}
return this.mConstantMinimumHeight;
}
computeConstantSize() {
this.mComputedConstantSize = true;
this.createAllFutures();
const N = this.mNumChildren;
const drawables = this.mDrawables;
this.mConstantWidth = this.mConstantHeight = -1;
this.mConstantMinimumWidth = this.mConstantMinimumHeight = 0;
for (let i = 0; i < N; i++) {
const dr = drawables[i];
let s = dr.getIntrinsicWidth();
if (s > this.mConstantWidth) this.mConstantWidth = s;
s = dr.getIntrinsicHeight();
if (s > this.mConstantHeight) this.mConstantHeight = s;
s = dr.getMinimumWidth();
if (s > this.mConstantMinimumWidth) this.mConstantMinimumWidth = s;
s = dr.getMinimumHeight();
if (s > this.mConstantMinimumHeight) this.mConstantMinimumHeight = s;
}
}
setEnterFadeDuration(duration:number) {
this.mEnterFadeDuration = duration;
}
getEnterFadeDuration():number {
return this.mEnterFadeDuration;
}
setExitFadeDuration(duration:number) {
this.mExitFadeDuration = duration;
}
getExitFadeDuration():number {
return this.mExitFadeDuration;
}
getOpacity():number {
if (this.mCheckedOpacity) {
return this.mOpacity;
}
this.createAllFutures();
this.mCheckedOpacity = true;
const N = this.mNumChildren;
const drawables = this.mDrawables;
let op = (N > 0) ? drawables[0].getOpacity() : PixelFormat.TRANSPARENT;
for (let i = 1; i < N; i++) {
op = Drawable.resolveOpacity(op, drawables[i].getOpacity());
}
this.mOpacity = op;
return op;
}
isStateful():boolean {
if (this.mCheckedStateful) {
return this.mStateful;
}
this.createAllFutures();
this.mCheckedStateful = true;
const N = this.mNumChildren;
const drawables = this.mDrawables;
for (let i = 0; i < N; i++) {
if (drawables[i].isStateful()) {
this.mStateful = true;
return true;
}
}
this.mStateful = false;
return false;
}
canConstantState():boolean {
if (this.mCheckedConstantState) {
return this.mCanConstantState;
}
this.createAllFutures();
this.mCheckedConstantState = true;
const N = this.mNumChildren;
const drawables = this.mDrawables;
for (let i = 0; i < N; i++) {
if (drawables[i].getConstantState() == null) {
this.mCanConstantState = false;
return false;
}
}
this.mCanConstantState = true;
return true;
}
//abstract
newDrawable():android.graphics.drawable.Drawable {
return undefined;
}
}
/**
* Class capable of cloning a Drawable from another Drawable's
* ConstantState.
*/
class ConstantStateFuture{
private mConstantState:Drawable.ConstantState;
constructor(source:Drawable) {
this.mConstantState = source.getConstantState();
}
/**
* Obtains and prepares the Drawable represented by this future.
*
* @param state the container into which this future will be placed
* @return a prepared Drawable
*/
get(state:DrawableContainerState):Drawable {
const result = this.mConstantState.newDrawable();
//result.setLayoutDirection(state.mLayoutDirection);
result.setCallback(state.mOwner);
if (state.mMutated) {
result.mutate();
}
return result;
}
}
}
} | the_stack |
import * as singleLineString from "single-line-string";
import * as Web3 from "web3";
import { BigNumber } from "../../utils/bignumber";
// APIs
import { AdaptersAPI, ContractsAPI } from ".";
// Adapters
import { Adapter } from "../adapters";
// Wrappers
import {
DebtKernelContract,
DebtOrderDataWrapper,
DebtTokenContract,
TokenTransferProxyContract,
} from "../wrappers";
// Types
import { DebtOrderData, IssuanceCommitment, TxData } from "../types";
// Utils
import { NULL_ADDRESS } from "../../utils/constants";
import { applyNetworkDefaults, generateTxOptions } from "../../utils/transaction_utils";
import { Assertions } from "../invariants";
const ORDER_FILL_GAS_MAXIMUM = 600000;
const ACCEPT_GAS_MAXIMUM = 800000;
export const OrderAPIErrors = {
EXPIRED: () =>
singleLineString`Unable to fill debt order because
the order has expired`,
INVALID_UNDERWRITER_FEE: () =>
singleLineString`Debt order has an underwriter
fee but has no assigned underwriter `,
INVALID_RELAYER_FEE: () =>
singleLineString`Debt order has a relayer fee
but has no assigned relayer`,
INVALID_DEBTOR_FEE: () =>
singleLineString`Debt order cannot have a debtor fee
that is greater than the total principal`,
INVALID_FEES: () =>
singleLineString`Debt order creditor + debtor fee
does not equal underwriter + relayer fee`,
ORDER_CANCELLED: () => singleLineString`Debt order was cancelled`,
ORDER_ALREADY_CANCELLED: () => singleLineString`Debt order has already been cancelled`,
UNAUTHORIZED_ORDER_CANCELLATION: () => singleLineString`Debt order can only be cancelled
by the specified order's debtor`,
UNAUTHORIZED_ISSUANCE_CANCELLATION: () => singleLineString`Debt issuance can only be cancelled
by either the specified issuance's debtor,
or by the underwriter attesting to the
issuance's default risk`,
CREDITOR_BALANCE_INSUFFICIENT: () => singleLineString`Creditor balance is insufficient`,
CREDITOR_ALLOWANCE_INSUFFICIENT: () => singleLineString`Creditor allowance is insufficient`,
ISSUANCE_CANCELLED: () => singleLineString`Issuance was cancelled`,
ISSUANCE_ALREADY_CANCELLED: () => singleLineString`Issuance has already been cancelled`,
DEBT_ORDER_ALREADY_FILLED: () => singleLineString`Debt order has already been filled`,
INVALID_DEBTOR_SIGNATURE: () => singleLineString`Debtor signature is not valid for debt order`,
INVALID_CREDITOR_SIGNATURE: () =>
singleLineString`Creditor signature is not valid for debt order`,
INVALID_UNDERWRITER_SIGNATURE: () =>
singleLineString`Underwriter signature is not valid for debt order`,
ADAPTER_DOES_NOT_CONFORM_TO_INTERFACE: () =>
singleLineString`Supplied adapter does not conform to the
base adapter interface.`,
INSUFFICIENT_COLLATERALIZER_ALLOWANCE: () =>
singleLineString`Debtor has not granted sufficient allowance for collateral transfer.`,
INSUFFICIENT_COLLATERALIZER_BALANCE: () =>
singleLineString`Debtor does not have sufficient balance required for collateral transfer.`,
};
export class OrderAPI {
protected web3: Web3;
private assert: Assertions;
protected contracts: ContractsAPI;
private adapters: AdaptersAPI;
public constructor(web3: Web3, contracts: ContractsAPI, adapters: AdaptersAPI) {
this.web3 = web3;
this.contracts = contracts;
this.adapters = adapters;
this.assert = new Assertions(web3, this.contracts);
}
/**
* Asynchronously fills a signed debt order.
*
* If the order fills successfully, the creditor will be debited the
* principal amount, the debtor will receive the principal, and the
* underwriter and the relayer will receive their transaction fees
* (if applicable).
*
* The debt order must be signed by all relevant parties and the associated
* data must be valid in order for the order to be fulfilled.
*
* @param debtOrderData a valid, signed debt order.
* @param options any params needed to modify the Ethereum transaction.
* @return the hash of the ethereum transaction that fulfilled the debt order.
*/
public async fillAsync(debtOrderData: DebtOrderData, options?: TxData): Promise<string> {
const txOptions = await generateTxOptions(this.web3, ORDER_FILL_GAS_MAXIMUM, options);
debtOrderData = await applyNetworkDefaults(debtOrderData, this.contracts);
await this.assertFillableAsync(debtOrderData, options);
const { debtKernel } = await this.contracts.loadDharmaContractsAsync(txOptions);
const debtOrderDataWrapped = new DebtOrderDataWrapper(debtOrderData);
return debtKernel.fillDebtOrder.sendTransactionAsync(
debtOrderDataWrapped.getCreditor(),
debtOrderDataWrapped.getOrderAddresses(),
debtOrderDataWrapped.getOrderValues(),
debtOrderDataWrapped.getOrderBytes32(),
debtOrderDataWrapped.getSignaturesV(),
debtOrderDataWrapped.getSignaturesR(),
debtOrderDataWrapped.getSignaturesS(),
txOptions,
);
}
/**
* Throws with error message if a given order is not able to be filled.
*
* @param {DebtOrderData} debtOrderData
* @param {TxData} txOptions
* @returns {Promise<void>}
*/
public async assertFillableAsync(
debtOrderData: DebtOrderData,
txOptions?: TxData,
): Promise<void> {
const {
debtKernel,
debtToken,
tokenTransferProxy,
} = await this.contracts.loadDharmaContractsAsync(txOptions);
await this.assertValidityInvariantsAsync(debtOrderData, debtKernel, debtToken);
await this.assertConsensualityInvariants(debtOrderData, txOptions);
await this.assertCreditorBalanceAndAllowanceInvariantsAsync(
debtOrderData,
tokenTransferProxy,
txOptions,
);
await this.assertValidLoanTerms(debtOrderData);
}
/**
* Asynchronously fills a debt offer via the creditor proxy.
*
* @param {DebtOrderData} debtOrderData
* @param {TxData} txOptions
* @returns {Promise<string>}
*/
public async acceptOffer(debtOrderData: DebtOrderData, options?: TxData): Promise<string> {
const txOptions = await generateTxOptions(this.web3, ACCEPT_GAS_MAXIMUM, options);
const creditorProxy = await this.contracts.loadCreditorProxyContract();
const debtOrderDataWrapper = new DebtOrderDataWrapper(debtOrderData);
return creditorProxy.fillDebtOffer.sendTransactionAsync(
debtOrderDataWrapper.getCreditor(),
debtOrderDataWrapper.getOrderAddresses(),
debtOrderDataWrapper.getOrderValues(),
debtOrderDataWrapper.getOrderBytes32(),
debtOrderDataWrapper.getSignaturesV(),
debtOrderDataWrapper.getSignaturesR(),
debtOrderDataWrapper.getSignaturesS(),
txOptions,
);
}
/**
* Asserts that a debt order is ready to be filled by any creditor, by validating
* invariants except for those pertaining to the creditor.
*
* @param {DebtOrderData} debtOrderData
* @param {TxData} txOptions
* @returns {Promise<void>}
*/
public async assertReadyToFill(debtOrderData: DebtOrderData, txOptions?: TxData) {
const { debtKernel, debtToken } = await this.contracts.loadDharmaContractsAsync(txOptions);
await this.assertValidityInvariantsAsync(debtOrderData, debtKernel, debtToken);
await this.assert.order.validDebtorSignature(
debtOrderData,
txOptions,
OrderAPIErrors.INVALID_DEBTOR_SIGNATURE(),
);
if (debtOrderData.underwriter && debtOrderData.underwriter !== NULL_ADDRESS) {
await this.assert.order.validUnderwriterSignature(
debtOrderData,
txOptions,
OrderAPIErrors.INVALID_UNDERWRITER_SIGNATURE(),
);
}
await this.assertValidLoanTerms(debtOrderData);
}
/**
* Asynchronously cancel a debt order if it has yet to be filled.
*
* @param debtOrderData the debt order to be canceled.
* @param options any params needed to modify the Ethereum transaction.
* @return the hash of the resulting Ethereum transaction.
*/
public async cancelOrderAsync(debtOrderData: DebtOrderData, options?: TxData): Promise<string> {
const txOptions = await generateTxOptions(this.web3, ORDER_FILL_GAS_MAXIMUM, options);
const { debtKernel } = await this.contracts.loadDharmaContractsAsync(txOptions);
debtOrderData = await applyNetworkDefaults(debtOrderData, this.contracts);
const debtOrderDataWrapped = new DebtOrderDataWrapper(debtOrderData);
await this.assert.order.debtOrderNotCancelledAsync(
debtOrderData,
debtKernel,
OrderAPIErrors.ORDER_ALREADY_CANCELLED(),
);
await this.assert.order.issuanceNotCancelledAsync(
debtOrderDataWrapped.getIssuanceCommitment(),
debtKernel,
OrderAPIErrors.ISSUANCE_ALREADY_CANCELLED(),
);
this.assert.order.senderAuthorizedToCancelOrder(
debtOrderData,
txOptions,
OrderAPIErrors.UNAUTHORIZED_ORDER_CANCELLATION(),
);
return debtKernel.cancelDebtOrder.sendTransactionAsync(
debtOrderDataWrapped.getOrderAddresses(),
debtOrderDataWrapped.getOrderValues(),
debtOrderDataWrapped.getOrderBytes32(),
txOptions,
);
}
/**
* Given a DebtOrder instance, eventually returns true if that DebtOrder has
* been cancelled. Returns false otherwise.
*
* @example
* await dharma.order.isCancelled(debtOrder);
* => false
*
* @param {DebtOrder} debtOrder
* @param {TxData} txOptions
* @returns {Promise<boolean>}
*/
public async isCancelled(debtOrder: DebtOrderData, txOptions?: TxData): Promise<boolean> {
const { debtKernel } = await this.contracts.loadDharmaContractsAsync(txOptions);
const debtOrderWrapped = new DebtOrderDataWrapper(debtOrder);
const commitmentHash = debtOrderWrapped.getDebtorCommitmentHash();
return debtKernel.debtOrderCancelled.callAsync(commitmentHash);
}
/**
* Asynchronously checks whether the order is filled.
*
* @param debtOrderData a debt order.
* @param options any params needed to modify the Ethereum transaction.
* @return boolean representing whether the debt order is filled or not.
*/
public async checkOrderFilledAsync(
debtOrderData: DebtOrderData,
options?: TxData,
): Promise<boolean> {
const txOptions = await generateTxOptions(this.web3, ORDER_FILL_GAS_MAXIMUM, options);
const { debtToken } = await this.contracts.loadDharmaContractsAsync(txOptions);
const issuanceHash = await this.getIssuanceHash(debtOrderData);
return debtToken.exists.callAsync(new BigNumber(issuanceHash));
}
/**
* Given a complete debt order, asynchronously computes the issuanceHash
* (alias of debtAgreementId) of the debt order.
*
* Note: If the kernelVersion or issuanceVersion are not specified, the
* current DebtKernel and RepaymentRouter's addresses will be used
* respectively.
*
* @param debtOrderData Debt order for which we'd like to compute the issuance hash
* @return The debt order's issuanceHash (alias of debtAgreementId).
*/
public async getIssuanceHash(debtOrderData: DebtOrderData): Promise<string> {
debtOrderData = await applyNetworkDefaults(debtOrderData, this.contracts);
const debtOrderDataWrapped = new DebtOrderDataWrapper(debtOrderData);
return debtOrderDataWrapped.getIssuanceCommitmentHash();
}
public deserialize(debtOrderDataAsString: string): DebtOrderData {
const debtOrderData = JSON.parse(debtOrderDataAsString);
const bigNumberKeys = [
"creditorFee",
"debtorFee",
"expirationTimestampInSec",
"principalAmount",
"relayerFee",
"underwriterFee",
"underwriterRiskRating",
"salt",
];
bigNumberKeys.forEach((key) => {
debtOrderData[key] = new BigNumber(debtOrderData[key]);
});
return debtOrderData;
}
public serialize(debtOrderData: DebtOrderData): string {
return JSON.stringify(debtOrderData);
}
/**
* Generate a Dharma debt order, given the specified adapter and its associated
* parameters object.
*
* @param adapter The adapter to be leveraged in generating this particular debt
* order.
* @param params The parameters that will be used by the aforementioned adapter
* to generate the debt order.
* @return Newly generated debt order.
*/
public async generate(adapter: Adapter, params: object): Promise<DebtOrderData> {
this.assert.adapter.conformsToInterface(
adapter,
OrderAPIErrors.ADAPTER_DOES_NOT_CONFORM_TO_INTERFACE(),
);
return adapter.toDebtOrder(params);
}
/**
* Decode tightly-packed representation of debt agreement's terms in a
* given debt order into an object with human-interpretable keys and values.
*
* NOTE: If the terms contract in the given debt order does not correspond
* to any of the built-in adapters bundled into dharma.js, this method
* will throw.
*
* @param debtOrderData A Dharma debt order
* @return An object containing human-interpretable terms for the loan
*/
public async unpackTerms(debtOrderData: DebtOrderData): Promise<object> {
const { termsContract, termsContractParameters } = debtOrderData;
// Will throw if adapter cannot be found for given terms contract
const adapter = await this.adapters.getAdapterByTermsContractAddress(termsContract);
return adapter.unpackParameters(termsContractParameters);
}
public async cancelIssuanceAsync(
issuanceCommitment: IssuanceCommitment,
txOptions: TxData,
): Promise<string> {
const { debtKernel } = await this.contracts.loadDharmaContractsAsync(txOptions);
await this.assert.order.issuanceNotCancelledAsync(
issuanceCommitment,
debtKernel,
OrderAPIErrors.ISSUANCE_ALREADY_CANCELLED(),
);
this.assert.order.senderAuthorizedToCancelIssuance(
issuanceCommitment,
txOptions,
OrderAPIErrors.UNAUTHORIZED_ISSUANCE_CANCELLATION(),
);
return debtKernel.cancelIssuance.sendTransactionAsync(
issuanceCommitment.issuanceVersion,
issuanceCommitment.debtor,
issuanceCommitment.termsContract,
issuanceCommitment.termsContractParameters,
issuanceCommitment.underwriter,
issuanceCommitment.underwriterRiskRating,
issuanceCommitment.salt,
txOptions,
);
}
/**
* Throws if the debt order is not fillable by the prospective creditor.
*
* @param debtOrderData
* @param prospectiveCreditor
* @param txOptions
* @returns {Promise<void>}
*/
public async assertFillableBy(
debtOrderData: DebtOrderData,
prospectiveCreditor: string,
txOptions?: TxData,
): Promise<void> {
debtOrderData.creditor = prospectiveCreditor;
const tokenTransferProxy = await this.contracts.loadTokenTransferProxyAsync(txOptions);
await Promise.all([
this.assertReadyToFill(debtOrderData, txOptions),
this.assertCreditorBalanceAndAllowanceInvariantsAsync(
debtOrderData,
tokenTransferProxy,
txOptions,
),
]);
}
/**
* Determines if the debt order is fillable by the prospective creditor.
*
* @param debtOrderData
* @param prospectiveCreditor
* @param txOptions
* @returns {Promise<boolean>}
*/
public async isFillableBy(
debtOrderData: DebtOrderData,
prospectiveCreditor: string,
txOptions?: TxData,
): Promise<boolean> {
try {
await this.assertFillableBy(debtOrderData, prospectiveCreditor, txOptions);
return true;
} catch (e) {
return false;
}
}
/**
* Validates a given debt order's terms against the appropriate loan order adapter.
*
* @param {DebtOrderData} debtOrderData
* @returns {Promise<void>}
*/
private async assertValidLoanTerms(debtOrderData: DebtOrderData) {
const adapter = await this.adapters.getAdapterByTermsContractAddress(
debtOrderData.termsContract,
);
const loanOrder = await adapter.fromDebtOrder(debtOrderData);
await adapter.validateAsync(loanOrder);
}
private async assertValidityInvariantsAsync(
debtOrderData: DebtOrderData,
debtKernel: DebtKernelContract,
debtToken: DebtTokenContract,
): Promise<void> {
this.assert.order.validDebtorFee(debtOrderData, OrderAPIErrors.INVALID_DEBTOR_FEE());
this.assert.order.validUnderwriterFee(
debtOrderData,
OrderAPIErrors.INVALID_UNDERWRITER_FEE(),
);
this.assert.order.validRelayerFee(debtOrderData, OrderAPIErrors.INVALID_RELAYER_FEE());
this.assert.order.validFees(debtOrderData, OrderAPIErrors.INVALID_FEES());
await this.assert.order.notExpired(debtOrderData, OrderAPIErrors.EXPIRED());
await this.assert.order.debtOrderNotCancelledAsync(
debtOrderData,
debtKernel,
OrderAPIErrors.ORDER_CANCELLED(),
);
await this.assert.order.issuanceNotCancelledAsync(
debtOrderData,
debtKernel,
OrderAPIErrors.ISSUANCE_CANCELLED(),
);
await this.assert.order.notAlreadyIssuedAsync(
debtOrderData,
debtToken,
OrderAPIErrors.DEBT_ORDER_ALREADY_FILLED(),
);
}
private async assertConsensualityInvariants(debtOrderData: DebtOrderData, txOptions: object) {
await this.assert.order.validDebtorSignature(
debtOrderData,
txOptions,
OrderAPIErrors.INVALID_DEBTOR_SIGNATURE(),
);
await this.assert.order.validCreditorSignature(
debtOrderData,
txOptions,
OrderAPIErrors.INVALID_CREDITOR_SIGNATURE(),
);
if (debtOrderData.underwriter && debtOrderData.underwriter !== NULL_ADDRESS) {
await this.assert.order.validUnderwriterSignature(
debtOrderData,
txOptions,
OrderAPIErrors.INVALID_UNDERWRITER_SIGNATURE(),
);
}
}
private async assertCreditorBalanceAndAllowanceInvariantsAsync(
debtOrderData: DebtOrderData,
tokenTransferProxy: TokenTransferProxyContract,
txOptions: object,
): Promise<void> {
const principalToken = await this.contracts.loadERC20TokenAsync(
debtOrderData.principalToken,
txOptions,
);
await this.assert.order.sufficientCreditorBalanceAsync(
debtOrderData,
principalToken,
OrderAPIErrors.CREDITOR_BALANCE_INSUFFICIENT(),
);
await this.assert.order.sufficientCreditorAllowanceAsync(
debtOrderData,
principalToken,
tokenTransferProxy,
OrderAPIErrors.CREDITOR_ALLOWANCE_INSUFFICIENT(),
);
}
} | the_stack |
'use strict';
import * as vscode from 'vscode';
import * as ts from 'typescript';
import throttle = require('lodash.throttle');
// TODO Clean up defensive development guards (DEBUG, most try-catches, etc), as the extension seems to work without errors,
// and the vscode extension platform can mostly be trusted to do the right thing.
// TODO Clean up artificial delays to only those that are really needed, but testing those seems flaky.
const SYNC_THROTTLE_MS = 100;
const DEBUG = false;
if (DEBUG) {
process.on('unhandledRejection', (reason: any, _p: Promise<any>) => {
console.warn('UNHANDLED: %s', reason && reason.stack || reason);
});
}
// Tracks all documents with open subdocuments
const activeDocuments = new Map<vscode.TextDocument, {
subdoc: vscode.TextDocument,
closeSubdocumentWithReason(reason: string): Promise<void>,
}>();
// Remembers the previously picked language for convenience
let previouslyPickedLanguage = 'html';
// Guard against keeping Ctrl+Enter pressed
let opening = false;
// NOTE: One gets "TextEditor disposed" warnings if kept pressed, possibly indicative of some places needing a delay.
export function activate(_context: vscode.ExtensionContext) {
vscode.commands.registerTextEditorCommand('editor.openSubdocument', async editor => {
if (opening) {
return;
}
opening = true;
try {
// NOTE: Ctrl+Enter now toggles the subdocument when called on a subdocument, and so nested cases
// like string literal > markdown > html block won't work. But they wouldn't work in any case due to
// sync edits needing all three editors visible at the same time, and only two viewcolumns are currently used.
for (let handle of activeDocuments.values()) {
if (handle.subdoc === editor.document) {
// If called on a subdocument, close it, focus on original document, and sync cursor for convenience.
await handle.closeSubdocumentWithReason('Closed via toggling shortcut. This virtual document can be closed.');
// Early return
return;
}
}
await findAndOpenLiteralUnderCursor(editor);
} catch (err) {
if (DEBUG) {
console.error('openSubdocument error: %s', err && err.stack || err);
}
} finally {
opening = false;
}
});
vscode.commands.registerTextEditorCommand('editor.closeSubdocuments', async _editor => {
try {
for (let handle of activeDocuments.values()) {
// Alternatively could close only the document/subdocument that is open in the current editor, but let's close
// them all for now.
await handle.closeSubdocumentWithReason('Closed via shortcut. This virtual document can be closed.');
}
} catch (err) {
if (DEBUG) {
console.error('closeSubdocuments error: %s', err && err.stack || err);
}
}
});
async function findAndOpenLiteralUnderCursor(editor: vscode.TextEditor) {
try {
const doc = editor.document;
const cursorOffset = doc.offsetAt(editor.selection.active);
let templateStart = 0;
let templateEnd = 0;
const config = vscode.workspace.getConfiguration('templateLiteralEditor.regexes');
if (config.has(doc.languageId) && typeof config.get(doc.languageId) === 'string') {
// Just iterates from the top of the document with a regexp. Could have a plugin system of some sort,
// enabling custom parsers or expanding from the cursor, or some other scheme.
const text = doc.getText();
let matcher: RegExp;
try {
matcher = new RegExp(config.get(doc.languageId) as string, 'g');
} catch (err) {
console.error(
'INVALID REGEX in templateLiteralEditor.regexes.%s: %s\n%s',
doc.languageId, config.get(doc.languageId), err && err.stack || err
);
await vscode.window.showErrorMessage(
`Invalid regex in templateLiteralEditor.regexes.${doc.languageId}: ${config.get(doc.languageId)}\n${err && err.stack || err}`
);
throw err;
}
let match: RegExpExecArray | null;
while ((match = matcher.exec(text)) !== null) {
if (typeof match[1] === 'string' && typeof match[2] === 'string' && typeof match[3] === 'string') {
// Cursor at boundaries is ok, but only inner content is used as a template
if (match.index <= cursorOffset && cursorOffset <= matcher.lastIndex) {
// NOTE also surrogates work ok, as vscode column counter uses the same measurement as str.length
templateStart = match.index + match[1].length;
templateEnd = match.index + match[1].length + match[2].length;
break;
} else if (matcher.lastIndex > cursorOffset) {
// Don't bother iterating the rest of the doc
break;
}
}
}
} else if (doc.languageId === 'typescript' || doc.languageId === 'javascript') {
// Default JS and TS to proper tokenizing instead of regexp matching
const source = ts.createSourceFile(doc.fileName, doc.getText(), ts.ScriptTarget.Latest, /*setParentNodes*/ true);
// Find the outermost template literal
let template: ts.TemplateLiteral | undefined;
// getTokenAtPosition is not really public but widely used. May break in a future version.
let token = (ts as any).getTokenAtPosition(source, cursorOffset);
while (token) {
if (
token.kind === ts.SyntaxKind.NoSubstitutionTemplateLiteral ||
token.kind === ts.SyntaxKind.TemplateExpression
) {
template = token;
}
token = token.parent;
}
if (template) {
templateStart = template.getStart() + 1;
templateEnd = template.getEnd() - 1;
}
} else {
// Omitted
}
if (templateStart !== 0) {
const languages = await vscode.languages.getLanguages();
// How to get proper language list, with icons etc?
// Not possible yet I guess:
// https://github.com/Microsoft/vscode/blob/5aea732/src/vs/workbench/browser/parts/editor/editorStatus.ts#L747-L763
const sorted = [previouslyPickedLanguage].concat(languages.filter(lang => lang !== previouslyPickedLanguage));
const pickedLanguage = await vscode.window.showQuickPick(sorted, { placeHolder: 'Open in Language Mode' });
if (pickedLanguage) {
previouslyPickedLanguage = pickedLanguage;
try {
await activateSubdocument(
pickedLanguage,
editor,
doc.positionAt(templateStart),
doc.positionAt(templateEnd),
);
} catch (err) {
if (DEBUG) {
console.error('ACTIVATION ERROR: %s', err && err.stack || err);
}
throw err;
};
}
} else {
console.warn(
'Literal not found under cursor. If in error, please modify the source or templateLiteralEditor.regexes.%s configuration for your needs',
doc.languageId,
);
await vscode.window.showWarningMessage(
`Literal not found under cursor. If in error, please modify the source or templateLiteralEditor.regexes.${doc.languageId} configuration for your needs. Please also consider submitting your improved regexes to the vscode-template-literal-editor repository.`,
// Open as modal, so that next enter closes the message quickly without needing a mouse.
{ modal: true }
);
}
} catch (err) {
if (DEBUG) {
console.error('findAndOpenLiteralUnderCursor error: %s', err && err.stack || err);
}
throw err;
}
}
async function activateSubdocument(
language: string,
editor: vscode.TextEditor,
start: vscode.Position,
end: vscode.Position,
) {
const doc = editor.document;
// Keep track of document range where template literal resides
let templateRange = new vscode.Range(start, end);
// // Calculate cursor position relative to viewport top for subdocument scroll to match
// const cursorPosition = editor.selection.active;
// await vscode.commands.executeCommand('cursorMove', {
// to: 'viewPortTop',
// select: false,
// });
// await vscode.commands.executeCommand('cursorMove', {
// to: 'wrappedLineStart',
// select: false,
// });
// const viewPortTopPosition = editor.selection.active;
// // Move cursor back to where it was
// await vscode.commands.executeCommand('cursorMove', {
// to: 'down',
// by: 'line',
// value: cursorPosition.line - viewPortTopPosition.line
// });
// editor.selection = new vscode.Selection(cursorPosition, cursorPosition);
// Only one active subdocument per document allowed for simplicity.
if (activeDocuments.has(doc)) {
await activeDocuments.get(doc)!.closeSubdocumentWithReason('Reloading.');
// TODO test if editor reference could be lost due to focus shifting if subdocument happens to be in the same group
}
// Create subdocument with chosen language.
// Could be made configurable depending on template tag, keybinding, etc.
// Always creates a new untitled file.
// NOTE: setting content here to fix undo history including the initially empty doc. v1.11 api only
const subdoc = await vscode.workspace.openTextDocument({ language, content: doc.getText(templateRange) });
activeDocuments.set(doc, { subdoc, async closeSubdocumentWithReason() { } });
// Open subeditor in side by side view. Note that editor arrangement is fixed for simplicity.
// NOTE: use these, as editor objects will be stale when refocused in tabs, and won't reflect group changes in any case.
const editorViewColumn = editor.viewColumn;
const subeditorViewColumn = editorViewColumn === vscode.ViewColumn.One ? vscode.ViewColumn.Two : vscode.ViewColumn.One;
const subeditor = await vscode.window.showTextDocument(subdoc, subeditorViewColumn);
// Artificial delay, seems to fix original editor scrolling half a line sometimes. Perhaps due to revealLine (below).
await shortDelay();
// Move cursor to proper position
const targetPos = new vscode.Position(
Math.max(editor.selection.active.line - templateRange.start.line, 0),
Math.max(editor.selection.active.character - (editor.selection.active.line === templateRange.start.line ? templateRange.start.character : 0), 0)
);
await moveActiveCursorTo(targetPos);
// // How to scroll subdocument to match document viewport, and keep them in sync?
// // Would need to measure viewport width to calculate wrapping lines, etc...
// await vscode.commands.executeCommand('revealLine', {
// lineNumber: cursorSubposition.line,
// at: 'top'
// });
// // Proper implementation would leave dead space at top, so that lines would be matched even for small documents
// await vscode.commands.executeCommand('editorScroll', {
// to: 'up',
// by: 'line',
// value: cursorPosition.line - viewPortTopPosition.line,
// });
// Center viewport if possible, for now, until line sync is possible
await vscode.commands.executeCommand('revealLine', {
lineNumber: subeditor.selection.active.line,
at: 'center'
});
// await vscode.commands.executeCommand('editorScroll', {
// to: 'down',
// revealCursor: true,
// });
// const decorationType = vscode.window.createTextEditorDecorationType({
// isWholeLine: true,
// backgroundColor: '#222'
// })
// Experiment with cursor syncing
// vscode.window.onDidChangeTextEditorSelection(event => {
// // NOTE should not use subeditor, but editor.document === subdoc
// if (event.textEditor === subeditor) {
//
// (async() => {
//
// // Experimental line highlighter (won't be native-like)
// // editor.setDecorations(
// // decorationType, [
// // new vscode.Range(
// // templateRange.start.line + subeditor.selection.active.line,
// // 0,
// // templateRange.start.line + subeditor.selection.active.line,
// // 1,
// // )
// // ]
// // )
//
// // Experimental cursor sync (flickers)
// // await vscode.window.showTextDocument(doc, editor.viewColumn, /*preserveFocus*/ false);
// // await vscode.commands.executeCommand('cursorMove', {
// // to: 'down',
// // value: (templateRange.start.line + subeditor.selection.active.line) - editor.selection.active.line
// // });
// // await vscode.commands.executeCommand('cursorMove', {
// // to: 'right',
// // value: (subeditor.selection.active.line === 0 ? templateRange.start.character : 0) +
// // subeditor.selection.active.character - editor.selection.active.character
// // });
// // await vscode.window.showTextDocument(subdoc, subeditor.viewColumn, /*preserveFocus*/ false);
//
// })().catch(err => {
// if (DEBUG) {
// console.error('didChangeSelection error: %s', err && err.stack || err);
// }
// throw err;
// });
// }
// })
/**
* Handlers
*/
const documentCloseListener = vscode.workspace.onDidCloseTextDocument(async closedDoc => {
if (closedDoc === doc) {
try {
await closeSubdocumentWithReason('Source document closed. This virtual document can be closed.');
} catch (err) {
if (DEBUG) {
console.error('documentCloseListener error: %s', err && err.stack || err);
}
}
}
});
const subdocumentCloseListener = vscode.workspace.onDidCloseTextDocument(async closedDoc => {
if (closedDoc === subdoc) {
try {
await closeSubdocumentWithReason('Subdocument closed. This virtual document can be closed.');
} catch (err) {
if (DEBUG) {
console.error('subdocumentCloseListener error: %s', err && err.stack || err);
}
}
}
});
// These may prevent some sync issues, but may also annoy the user if they are unnecessary.
// Changing e.g. encodings and line endings are mostly untested.
// const configChangeListener = vscode.workspace.onDidChangeConfiguration(() => {
// disposeSubdocument('Workspace configuration changed. This virtual document can be closed.');
// });
// const optionsChangeListener = vscode.window.onDidChangeTextEditorOptions(({textEditor}) => {
// if (textEditor.document === doc || textEditor.document === subdoc) {
// disposeSubdocument('Document options changed. This virtual document can be closed.');
// }
// });
// Override ordinary save with saving of the original document.
function newSaveOverride(): vscode.Disposable {
return vscode.commands.registerTextEditorCommand('workbench.action.files.save', async () => {
try {
await doc.save();
} catch (err) {
if (DEBUG) {
console.error('Saving of document failed: %s', err && err.stack || err);
}
saveOverride.dispose();
}
});
}
let saveOverride = newSaveOverride();
// Always remove saveOverride when active editor changes, and set it again if focus is restored
// NOTE: disposing saveOverride is very important, as othewise nothing can be saved in vscode, for any document.
// So it is important to not fail setting this handler.
const activeTextEditorChangeListener = vscode.window.onDidChangeActiveTextEditor(newEditor => {
saveOverride.dispose();
if (newEditor && newEditor.document === subdoc) {
saveOverride = newSaveOverride();
}
});
/**
* Sync logic
*/
// Keep track of change origins to avoid circular edits.
let changeOrigin: 'document' | 'subdocument' | 'dispose' | null = null;
const contentChangeListener = vscode.workspace.onDidChangeTextDocument(change => {
// Suppress possible late edits
if (changeOrigin === 'dispose') {
return;
}
if (change.document === subdoc) {
if (changeOrigin === 'document') {
// Document sync received, mark further edits as ordinary/unknown
changeOrigin = null;
} else {
// We don't care about actual edits and partial templateRange synchronization,
// just copy everything in case there are changes
throttledSyncToDocument();
}
} else if (change.document === doc) {
if (changeOrigin === 'subdocument') {
// Subdocument sync received, mark further edits as ordinary/unknown
changeOrigin = null;
} else {
// Track only simple changes in original document (does not touch template boundaries)
const isValid = change.contentChanges.every(({ range: changeRange }) => {
return (
changeRange.end.isBefore(templateRange.start) ||
changeRange.start.isAfter(templateRange.end) ||
templateRange.contains(changeRange)
);
});
if (!isValid) {
// We don't track complex edits in original document, let's close
// subdocument for safety. We don't want to retokenize the document and
// try to infer which template is which.
closeSubdocumentWithReason(
'Source document has been modified. This virtual editor can be closed.'
).catch(err => {
if (DEBUG) {
console.error('onDidChangeTextDocument error: %s', err && err.stack || err);
}
});
} else {
// Defer sync until all contentChanges are processed, so that changes, content and templateRange match
let needsSync = false;
change.contentChanges.forEach(({ range: changeRange, text: changeText }) => {
if (changeRange.start.isAfter(templateRange.end)) {
// Simplest case: No templateRange update needed for changes below template
if (DEBUG) {
// Not actually needed, but can be enabled to see problems earlier
needsSync = true;
}
} else if (changeRange.end.isBefore(templateRange.start)) {
// General case before template, a bit complex due to depending on both changeRange and
// changeText line count etc
// TODO experiment with doc.eol from vscode 1.11
const insertedLines = changeText.split(/\r\n|\r|\n/);
const lineDiff = insertedLines.length - (changeRange.end.line - changeRange.start.line + 1);
let charDiff = 0;
if (changeRange.end.line < templateRange.start.line) {
// Simple change above template, just count lines and move the templateRange if needed
} else {
// Change touches the template start line
// first remove changeRange chars, it does not matter if there are multiple lines
charDiff -= (changeRange.end.character - changeRange.start.character);
// then add new changeText chars, only last line counts
// NOTE also surrogates work ok, as vscode column counter uses the same measurement as str.length
charDiff += insertedLines[insertedLines.length - 1].length;
if (insertedLines.length > 1) {
// If a line break is introduced, push to beginning of line
charDiff -= changeRange.start.character;
}
}
if (lineDiff || charDiff) {
// Move templateRange accordingly
templateRange = new vscode.Range(
// Start row and col may change
templateRange.start.line + lineDiff,
templateRange.start.character + charDiff,
// End row may change
templateRange.end.line + lineDiff,
// End col changes only if the templateRange is a single line
templateRange.isSingleLine ?
templateRange.end.character + charDiff :
templateRange.end.character
);
if (DEBUG) {
// Not actually needed, but can be enabled to see problems earlier
needsSync = true;
}
}
} else if (templateRange.contains(changeRange)) {
// General case inside template, also a bit complex due to depending on both changeRange and
// changeText line count etc
// TODO experiment with doc.eol from vscode 1.11
const insertedLines = changeText.split(/\r\n|\r|\n/);
const lineDiff = insertedLines.length - (changeRange.end.line - changeRange.start.line + 1);
let charDiff = 0;
if (changeRange.end.line < templateRange.end.line) {
// Simple change above template end, just count lines and move the templateRange end
// if needed
} else {
// Change touches the template end line
// first remove changeRange chars, it does not matter if there are multiple lines
charDiff -= (changeRange.end.character - changeRange.start.character);
// then add new changeText chars, only last line counts
// NOTE also surrogates work ok, as vscode column counter uses the same measurement as str.length
charDiff += insertedLines[insertedLines.length - 1].length;
if (insertedLines.length > 1) {
// If a line break is introduced, the last line starts at the beginning of line
charDiff -= changeRange.start.character;
}
}
// Move templateRange accordingly
templateRange = new vscode.Range(
// Start row and col stay the same
templateRange.start.line,
templateRange.start.character,
// End row and col may change
templateRange.end.line + lineDiff,
templateRange.end.character + charDiff
);
needsSync = true;
}
});
if (needsSync) {
throttledSyncToSubdocument();
}
}
}
}
});
// Throttle sync document edits, so that editing the subdocument stays quick.
// As there are async functions involved which may have a delay, may need guarding transactions if errors start to appear.
// NOTE: latest vscode edits (v1.22->) are slower than previously, so guards against re-entrancy instead of
// increasing throttling, to keep it snappy
let isSyncingToDocument = false;
const throttledSyncToDocument = throttle(async () => {
if (isSyncingToDocument) {
if (DEBUG) {
console.warn('throttledSyncToDocument overlap, will defer');
}
// Calls function again to not miss edits in case this is the last invocation in this stall.
throttledSyncToDocument();
return;
}
isSyncingToDocument = true;
try {
// We have to always take a new reference to the editor, as it may have been hidden
// and a new editor may need to be created.
const newEditor = await vscode.window.showTextDocument(doc, editorViewColumn, /*preserveFocus*/ true);
const editOk = await newEditor.edit(editBuilder => {
// We don't care about actual edits and partial templateRange synchronization,
// just copy everything in case there are changes
// Mark next edit as originating from subdocument. Does not consider multiple edits
// at the same time to both documents.
changeOrigin = 'subdocument';
editBuilder.replace(templateRange, subdoc.getText());
// We calculate new range based on subdoc size. Depends on both documents having the same config.
templateRange = new vscode.Range(
// Start row and col stay the same
templateRange.start.line,
templateRange.start.character,
// End row depends on subdoc line count
templateRange.start.line + subdoc.lineCount - 1,
// End col depends on whether there is only single line or more
(subdoc.lineCount === 1 ? templateRange.start.character : 0) +
subdoc.lineAt(subdoc.lineCount - 1).range.end.character
)
});
if (!editOk) {
// If there are multiple edits, they may not succeed, and then templateRange will be out of sync. Better to fail then.
throw new Error('Sync to document did not succeed');
}
} catch (err) {
if (DEBUG) {
console.error('DOC SYNC ERROR %s', err && err.stack || err);
}
try {
await closeSubdocumentWithReason(
'Source document could not be synced with subdocument. This virtual editor can be closed.'
);
} catch (err2) {
if (DEBUG) {
console.error('throttledSyncToDocument error: %s', err2 && err2.stack || err2);
}
}
} finally {
isSyncingToDocument = false;
}
}, SYNC_THROTTLE_MS);
// Throttle sync subdocument edits, so that editing document stays snappy
// This might be a bit more costly due to enabled language services in subdocument, so increase
// delay if needed. Delay could be made configurable.
// NOTE: If a large delay is needed, everything here may need to be guarded against subdocument
// closing before or in the middle of execution. But let's keep this simple and quick for now.
// NOTE: latest vscode edits (v1.22->) are slower than previously, so guards against re-entrancy instead of
// increasing throttling, to keep it snappy
let isSyncingToSubdocument = false;
const throttledSyncToSubdocument = throttle(async () => {
if (isSyncingToSubdocument) {
if (DEBUG) {
console.warn('throttledSyncToSubdocument overlap, will defer');
}
// Calls function again to not miss edits in case this is the last invocation in this stall.
throttledSyncToSubdocument();
return;
}
isSyncingToSubdocument = true;
try {
// We have to always take a new reference to the editor, as it may have been hidden
// and a new editor may need to be created.
const newSubeditor = await vscode.window.showTextDocument(
subdoc, subeditorViewColumn, /*preserveFocus*/ true
);
const editOk = await newSubeditor.edit(editBuilder => {
// We don't care about actual edits and partial templateRange synchronization,
// just copy everything in case there are changes. This may have a cost of
// calculating decorations etc again, but can be revisited if a need arises.
// Mark next edit as originating from document. Does not consider multiple edits
// at the same time to both documents.
changeOrigin = 'document';
const totalRange = subdoc.validateRange(new vscode.Range(0, 0, 100000, 100000));
// We copy whole literal to subdoc. Depends on both documents having the same config.
editBuilder.replace(totalRange, doc.getText(templateRange));
});
if (!editOk) {
// If there are multiple edits, they may not succeed, and then templateRange will be out of sync.
// Better to fail then.
throw new Error('Sync to subdocument did not succeed');
}
} catch (err) {
if (DEBUG) {
console.error('SUBDOC SYNC ERROR %s', err && err.stack || err);
}
try {
await closeSubdocumentWithReason(
'Subdocument could not be synced with original document. This virtual editor can be closed.'
);
} catch (err2) {
if (DEBUG) {
console.error('throttledSyncToSubdocument error: %s', err2 && err2.stack || err2);
}
}
} finally {
isSyncingToSubdocument = false;
}
}, SYNC_THROTTLE_MS);
async function closeSubdocumentWithReason(reason: string) {
try {
if (DEBUG) {
console.log('DISPOSING: %s', reason);
}
changeOrigin = 'dispose';
contentChangeListener.dispose();
documentCloseListener.dispose();
subdocumentCloseListener.dispose();
saveOverride.dispose();
activeTextEditorChangeListener.dispose();
activeDocuments.delete(doc);
// Close untitled subdocs via action, moves focus so may pipe quick keypresses to wrong doc unfortunately
await closeSubeditor();
} catch (err) {
if (DEBUG) {
console.error('closeSubdocumentWithReason error: %s', err && err.stack || err);
}
throw err;
}
}
// async function markSubdocumentAsTainted(reason: string) {
// if (vscode.workspace.textDocuments.indexOf(subdoc) >= 0) {
// try {
// let newSubeditor = await vscode.window.showTextDocument(
// subdoc, subeditorColumn, /*preserveFocus*/ true
// );
// let ok = await newSubeditor.edit(builder => {
// const totalRange = subdoc.validateRange(new vscode.Range(0, 0, 100000, 100000));
// builder.replace(totalRange, reason || 'This virtual editor can be closed.');
// });
// if (!ok) {
// throw new Error('Dispose edit could not succeed');
// }
// } catch (err) {
// if (DEBUG) {
// console.error('DISPOSE ERR %s', err && err.stack || err);
// }
// }
// }
// }
async function closeSubeditor() {
if (vscode.workspace.textDocuments.indexOf(subdoc) >= 0) {
// Note: subdocument may be visible in multiple editors, but luckily reverting seems to close all of them.
try {
// Save current focus, if available and valid
let returnDoc: vscode.TextDocument | undefined;
let returnViewColumn: vscode.ViewColumn | undefined;
let returnPos: vscode.Position | undefined;
// Artificial delay, trying to ensure correct editor is got when closing the subdoc.
await shortDelay();
const activeTextEditor = vscode.window.activeTextEditor;
if (activeTextEditor) {
if (activeTextEditor.document === subdoc) {
// Common case: closing subeditor via Ctrl+Enter or Ctrl+Shift+Backspace when subeditor is in focus.
// Focus on original document afterwards.
returnDoc = doc;
returnViewColumn = editorViewColumn;
// Sync also cursor in this case
returnPos = new vscode.Position(
templateRange.start.line + activeTextEditor.selection.active.line,
(activeTextEditor.selection.active.line === 0 ? templateRange.start.character : 0) +
activeTextEditor.selection.active.character
);
} else {
// Move focus otherwise back to where it was, if available
returnDoc = activeTextEditor.document;
returnViewColumn = activeTextEditor.viewColumn;
}
}
// Move focus temporarily to subdocument. Try to minimize time for the focus to be in wrong doc as the
// user may be typing.
await vscode.window.showTextDocument(subdoc, subeditorViewColumn, /*preserveFocus*/ false);
// Artificial delay, to prevent "TextEditor disposed" warning (in Extension Development Host only).
await shortDelay();
if (vscode.window.activeTextEditor && vscode.window.activeTextEditor.document === subdoc) {
await vscode.commands.executeCommand('workbench.action.revertAndCloseActiveEditor');
}
await shortDelay();
// May need a bit longer delay for larger documents in some environments, as the revealed editor is initializing?
// Get rid of these when VS Code race conditions are sorted out and focus is shifted
// reliably to original document on larger subdocuments.
// await new Promise(resolve => {
// setTimeout(() => {
// resolve();
// }, 100);
// });
// Move focus back to where it was, if available
if (returnDoc && returnViewColumn) {
await vscode.window.showTextDocument(
returnDoc, returnViewColumn, /*preserveFocus*/ false
);
// Artificial delay, to prevent "TextEditor disposed" warning (in Extension Development Host only).
await shortDelay();
if (returnPos) {
if (vscode.window.activeTextEditor && vscode.window.activeTextEditor.document === doc) {
await moveActiveCursorTo(returnPos);
// Don't center viewport for now, until line sync is possible
if (vscode.window.activeTextEditor) {
// await vscode.commands.executeCommand('revealLine', {
// lineNumber: vscode.window.activeTextEditor.selection.active.line,
// at: 'center'
// });
// await vscode.commands.executeCommand('editorScroll', {
// to: 'down',
// revealCursor: true,
// });
await shortDelay();
}
}
}
}
} catch (err) {
if (DEBUG) {
console.error('DISPOSE ERR %s', err && err.stack || err);
}
}
}
}
// We are ready, update document disposer to the proper one
activeDocuments.set(doc, { subdoc, closeSubdocumentWithReason });
}
}
async function moveActiveCursorTo(targetPosition: vscode.Position) {
const activeTextEditor = vscode.window.activeTextEditor;
if (activeTextEditor && activeTextEditor.document) {
let targetPos = activeTextEditor.document.validatePosition(targetPosition);
const lineDelta = targetPos.line - activeTextEditor.selection.active.line;
if (lineDelta) {
await vscode.commands.executeCommand('cursorMove', {
to: 'down',
by: 'line',
value: lineDelta
});
}
let charDelta = targetPos.character - activeTextEditor.selection.active.character;
// Let's limit iteration count in case target cannot be reached for some reason
for (let iter = 0; charDelta && iter < 100; iter++) {
if (DEBUG && iter === 90) {
console.warn('moveActiveCursorTo too many iterations, giving up.');
}
// Note: Revisit this in case VS Code behavior changes in the future.
await vscode.commands.executeCommand('cursorMove', {
to: 'left', // 'left' works better than 'right' when going across wrapped lines for some reason
by: 'character',
value: -charDelta // Capped at wrapped line start and end for some reason? So iterate when needed.
});
charDelta = targetPos.character - activeTextEditor.selection.active.character;
}
}
}
async function shortDelay() {
// await new Promise(resolve => { setImmediate(resolve); });
// Deferring to the beginning of next event loop run seems too short for extension api implementation.
// There should be a ping of some sort, or should this delay be configurable, to please slow environments?
await new Promise<void>(resolve => {
setTimeout(() => {
resolve();
}, 0);
});
}
// Cleanup on exit, to avoid stale editors on reload. Earlier this wouldn't work, and still cannot be tested on Extension
// Development Host, but now seems to usually work ok (at least something clears the editors).
export async function deactivate(_context: vscode.ExtensionContext) {
try {
for (let handle of activeDocuments.values()) {
await handle.closeSubdocumentWithReason('Extension deactivated. This virtual document can be closed.');
}
} catch (err) {
if (DEBUG) {
console.error('DEACTIVATE error: %s', err && err.stack || err);
}
}
} | the_stack |
module fng.services {
/**
* Operations on a whole record
*
* All methods should be state-less
*
*/
/*@ngInject*/
export function recordHandler($location, $window, $filter, $timeout, routingService, cssFrameworkService, SubmissionsService, SchemasService): fng.IRecordHandler {
// TODO: Put this in a service
const makeMongoId = (rnd = r16 => Math.floor(r16).toString(16)) => rnd(Date.now() / 1000) + " ".repeat(16).replace(/./g, () => rnd(Math.random() * 16));
function _handleCancel(resp: string) {
if (["cancel", "backdrop click", "escape key press"].indexOf(resp) === -1) {
throw resp;
}
}
var suffixCleanId = function suffixCleanId(inst, suffix) {
return (inst.id || "f_" + inst.name).replace(/\./g, "_") + suffix;
};
var walkTree = function(object, fieldname, element? , insertIntermediateObjects = false) {
// Walk through subdocs to find the required key
// for instance walkTree(master,'address.street.number',element)
// called by getData and setData
// element is used when accessing in the context of a input, as the id (like exams-2-grader)
// gives us the element of an array (one level down only for now). Leaving element blank returns the whole array
var parts = fieldname.split("."),
higherLevels = parts.length - 1,
workingRec = object;
for (var i = 0; i < higherLevels; i++) {
if (!workingRec) {
throw new Error(`walkTree failed: Object = ${object}, fieldname = ${fieldname}, i = ${i}`);
}
if (angular.isArray(workingRec)) {
workingRec = _.map(workingRec, function(obj) {
return obj[parts[i]];
});
} else {
if (insertIntermediateObjects && !workingRec[parts[i]]) {
workingRec[parts[i]] = {};
}
workingRec = workingRec[parts[i]];
}
if (angular.isArray(workingRec) && typeof element !== "undefined") {
if (element.scope && typeof element.scope === "function") {
// If we come across an array we need to find the correct position, if we have an element
workingRec = workingRec[element.scope().$index];
} else if (typeof element === "number") {
workingRec = workingRec[element];
} else {
throw new Error("Unsupported element type in walkTree " + fieldname);
}
}
if (!workingRec) {
break;
}
}
return {
lastObject: workingRec,
key: workingRec ? parts[higherLevels] : undefined
};
};
var setData = function setData(object, fieldname, element?, value?) {
var leafData = walkTree(object, fieldname, element, !!value);
if (leafData.lastObject && leafData.key) {
if (value) {
if (angular.isArray(leafData.lastObject)) {
for (var i = 0; i < leafData.lastObject.length; i++) {
leafData.lastObject[i][leafData.key] = value[i];
}
} else {
leafData.lastObject[leafData.key] = value;
}
} else {
delete leafData.lastObject[leafData.key];
}
}
};
var getData = function(object, fieldname, element?: any) {
var leafData = walkTree(object, fieldname, element);
var retVal;
if (leafData.lastObject && leafData.key) {
if (angular.isArray(leafData.lastObject)) {
retVal = _.map(leafData.lastObject, function(obj) {
return obj[leafData.key];
});
} else {
retVal = leafData.lastObject[leafData.key];
}
}
return retVal;
};
var updateRecordWithLookupValues = function(schemaElement, $scope, ctrlState: IFngCtrlState, ignoreDirty = false) {
// Update the master and the record with the lookup values, master first
if (!$scope.topLevelFormName || ($scope[$scope.topLevelFormName] && (ignoreDirty || $scope[$scope.topLevelFormName].$pristine))) {
updateObject(schemaElement.name, ctrlState.master, function(value) {
if (typeof value == "object" && value.id) {
return value;
} else {
return convertForeignKeys(schemaElement, value, $scope[suffixCleanId(schemaElement, "Options")], $scope[suffixCleanId(schemaElement, "_ids")]);
}
});
// Then copy the converted keys from master into record
var newVal = getData(ctrlState.master, schemaElement.name);
if (newVal) {
setData($scope.record, schemaElement.name, undefined, newVal);
}
}
};
// Split a field name into the next level and all following levels
function splitFieldName(aFieldName) {
var nesting = aFieldName.split("."),
result = [nesting[0]];
if (nesting.length > 1) {
result.push(nesting.slice(1).join("."));
}
return result;
}
var getListData = function getListData(record, fieldName, listSchema = null, $scope) {
let retVal = getData(record, fieldName) || "";
if (retVal && listSchema) {
// Convert list fields as per instructions in params (ideally should be the same as what is found in data_form getListFields
var schemaElm = _.find(listSchema, elm => (elm["name"] === fieldName));
if (schemaElm) {
switch (schemaElm["params"]) {
case undefined :
break;
case "timestamp" :
var timestamp = retVal.toString().substring(0, 8);
var date = new Date(parseInt(timestamp, 16) * 1000);
retVal = date.toLocaleDateString() + " " + date.toLocaleTimeString();
break;
default:
retVal = $scope.dataEventFunctions[schemaElm["params"]](record);
}
}
}
return retVal;
};
function updateObject(aFieldName, portion, fn) {
var fieldDetails = splitFieldName(aFieldName);
if (fieldDetails.length > 1) {
updateArrayOrObject(fieldDetails[1], portion[fieldDetails[0]], fn);
} else if (portion[fieldDetails[0]]) {
var theValue = portion[fieldDetails[0]];
// Strip out empty objects here (in case anyone added to an array and didn't populate it)
if (angular.isArray(theValue)) {
for (var i = theValue.length - 1; i >= 0; i--) {
var type = typeof theValue[i];
if (type === "undefined" || (type === "object" && Object.keys(theValue[i]).length === 0)) {
theValue.splice(i, 1);
}
}
}
portion[fieldDetails[0]] = fn(theValue);
}
}
function updateArrayOrObject(aFieldName, portion, fn) {
if (portion !== undefined) {
if (angular.isArray(portion)) {
for (var i = 0; i < portion.length; i++) {
updateObject(aFieldName, portion[i], fn);
}
} else {
updateObject(aFieldName, portion, fn);
}
}
}
// Set up the lookup lists (value and id) on the scope for an internal lookup. Called by convertToAngularModel and $watch
function setUpInternalLookupLists($scope: fng.IFormScope, options: string[] | string, ids: string[] | string, newVal, valueAttrib) {
let optionsArray = (typeof options === "string" ? $scope[options] : options);
let idsArray = (typeof ids === "string" ? $scope[ids] : ids);
optionsArray.length = 0;
idsArray.length = 0;
if (!!newVal && (newVal.length > 0)) {
newVal.forEach(a => {
let value = a[valueAttrib];
if (value && value.length > 0) {
optionsArray.push(value);
if (!a._id) {
a._id = makeMongoId();
}
idsArray.push(a._id);
}
});
}
}
var simpleArrayNeedsX = function(aSchema) {
var result = false;
if (aSchema.needsX) {
result = true;
} else if (!aSchema.directive) {
if (aSchema.type === "text") {
result = true;
} else if (aSchema.type === "select" && !aSchema.ids) {
result = true;
}
}
return result;
};
/* Look up a conversion set up by a plugin */
function getConversionObject(scope: any, entryName: string, schemaName?: string): any {
let conversions = scope.conversions;
if (schemaName) {
conversions = getData(conversions, schemaName) || {};
}
return conversions[entryName];
}
// Convert mongodb json to what we use in the browser, for example {_id:'xxx', array:['item 1'], lookup:'012abcde'} to {_id:'xxx', array:[{x:'item 1'}], lookup:'List description for 012abcde'}
// This will currently only work for a single level of nesting (conversionObject will not go down further without amendment, and offset needs to be an array, at least)
var convertToAngularModel = function(schema: IFormInstruction[], anObject, prefixLength, $scope, schemaName?: string, master?, offset?: number) {
master = master || anObject;
for (var i = 0; i < schema.length; i++) {
var schemaEntry = schema[i];
var fieldName = schemaEntry.name.slice(prefixLength);
if (!fieldName.length) {
fieldName = schemaEntry.name.split('.').pop();
}
var fieldValue = getData(anObject, fieldName);
if (schemaEntry.intType === 'date' && typeof fieldValue === 'string') {
setData(anObject, fieldName, null, new Date(fieldValue))
}
if (schemaEntry.schema) {
if (fieldValue) {
for (var j = 0; j < fieldValue.length; j++) {
fieldValue[j] = convertToAngularModel(schemaEntry.schema, fieldValue[j], 1 + fieldName.length, $scope, fieldName, master, j);
}
}
} else {
if (schemaEntry.internalRef) {
setUpInternalLookupLists($scope, schemaEntry.options, schemaEntry.ids, master[schemaEntry.internalRef.property], schemaEntry.internalRef.value);
}
// Convert {array:['item 1']} to {array:[{x:'item 1'}]}
var thisField = getListData(anObject, fieldName, null, $scope);
if (
schemaEntry.array &&
simpleArrayNeedsX(schemaEntry) &&
thisField &&
!(thisField.length > 0 && thisField[0].x) // Don't keep on coverting
) {
for (var k = 0; k < thisField.length; k++) {
thisField[k] = { x: thisField[k] };
}
}
// Convert {lookup:'012abcde'} to {lookup:'List description for 012abcde'}
var idList = $scope[suffixCleanId(schemaEntry, "_ids")];
let thisConversion: any;
if (fieldValue && idList && idList.length > 0) {
if (fieldName.indexOf(".") !== -1) {
throw new Error("Trying to directly assign to a nested field 332");
} // Not sure that this can happen, but put in a runtime test
if (
/*
Check we are starting with an ObjectId (ie not being called because of $watch on conversion, with a
converted value, which would cause an exception)
*/
fieldValue.toString().match(/^[a-f0-9]{24}$/) &&
/*
We are not suppressing conversions
*/
(!schemaEntry.internalRef || !schemaEntry.internalRef.noConvert)
) {
anObject[fieldName] = convertForeignKeys(schemaEntry, fieldValue, $scope[suffixCleanId(schemaEntry, "Options")], idList);
}
} else if (schemaEntry.select2) {
// Do nothing with these - handled elsewhere (and deprecated)
console.log("fng-select2 is deprecated - use fng-ui-select instead");
void (schemaEntry.select2);
} else if (fieldValue && (thisConversion = getConversionObject($scope, fieldName, schemaName)) &&
thisConversion.fngajax &&
!thisConversion.noconvert
) {
thisConversion.fngajax(fieldValue, schemaEntry, function(updateEntry, value) {
// Update the master and (preserving pristine if appropriate) the record
setData(master, updateEntry.name, offset, value);
preservePristine(angular.element("#" + updateEntry.id), function() {
setData($scope.record, updateEntry.name, offset, value);
});
});
}
}
}
return anObject;
};
// Convert foreign keys into their display for selects
// Called when the model is read and when the lookups are read
// No support for nested schemas here as it is called from convertToAngularModel which does that
function convertForeignKeys(schemaElement, input, values, ids) {
if (schemaElement.array) {
var returnArray = [];
var needsX = !schemaElement.directive || simpleArrayNeedsX(schemaElement);
for (var j = 0; j < input.length; j++) {
var val = input[j];
if (val && val.x) {
val = val.x;
}
var lookup = convertIdToListValue(val, ids, values, schemaElement.name);
if (needsX) {
lookup = { x: lookup };
}
returnArray.push(lookup);
}
return returnArray;
} else if (schemaElement.select2) {
return { id: input, text: convertIdToListValue(input, ids, values, schemaElement.name) };
} else {
return convertIdToListValue(input, ids, values, schemaElement.name);
}
}
// Convert ids into their foreign keys
// Called when saving the model
// No support for nested schemas here as it is called from convertToMongoModel which does that
function convertToForeignKeys(schemaElement, input, values, ids) {
if (schemaElement.array) {
var returnArray = [];
for (var j = 0; j < input.length; j++) {
returnArray.push(convertListValueToId(input[j], values, ids, schemaElement.name));
}
return returnArray;
} else {
return convertListValueToId(input, values, ids, schemaElement.name);
}
}
var convertListValueToId = function(value, valuesArray, idsArray, fname) {
var textToConvert = _.isObject(value) ? ((<any>value).x || (<any>value).text) : value;
if (textToConvert && textToConvert.match(/^[0-9a-f]{24}$/)) {
return textToConvert; // a plugin probably added this
} else {
var index = valuesArray.indexOf(textToConvert);
if (index === -1) {
throw new Error("convertListValueToId: Invalid data - value " + textToConvert + " not found in " + valuesArray + " processing " + fname);
}
return idsArray[index];
}
};
var preservePristine = function preservePristine(element, fn) {
// stop the form being set to dirty when a fn is called
// Use when the record (and master) need to be updated by lookup values displayed asynchronously
var modelController = element.inheritedData("$ngModelController");
var isClean = (modelController && modelController.$pristine);
if (isClean) {
// fake it to dirty here and reset after call to fn
modelController.$pristine = false;
}
fn();
if (isClean) {
modelController.$pristine = true;
}
};
var convertIdToListValue = function convertIdToListValue(id, idsArray, valuesArray, fname) {
if (typeof (id) === "object") {
id = id.id;
}
var index = idsArray.indexOf(id);
if (index === -1) {
index = valuesArray.indexOf(id); // This can get called twice - second time with converted value (not sure how atm) so protect against that...
if (index === -1) {
throw new Error("convertIdToListValue: Invalid data - id " + id + " not found in " + idsArray + " processing " + fname);
}
}
return valuesArray[index];
};
var processServerData = function processServerData(recordFromServer, $scope, ctrlState: IFngCtrlState) {
ctrlState.master = convertToAngularModel($scope.formSchema, recordFromServer, 0, $scope);
$scope.phase = "ready";
$scope.cancel();
};
function convertOldToNew(ref, val, attrib, newVals, oldVals) {
// check this is a change to an existing value, rather than a new one or one being deleted
if (oldVals && oldVals.length > 0 && oldVals.length === newVals.length && val[attrib]) {
let index = oldVals.findIndex(a => a[ref.value] === val[attrib]);
if (index > -1) {
let newVal = newVals[index][ref.value];
if (newVal) {
val[attrib] = newVal;
}
}
}
}
function fillFormFromBackendCustomSchema(schema, $scope: fng.IFormScope, formGeneratorInstance, recordHandlerInstance, ctrlState: IFngCtrlState) {
var listOnly = (!$scope.id && !$scope.newRecord);
// passing null for formSchema parameter prevents all the work being done when we are just after the list data,
// but should be removed when/if formschemas are cached
formGeneratorInstance.handleSchema("Main " + $scope.modelName, schema, listOnly ? null : $scope.formSchema, $scope.listSchema, "", true, $scope, ctrlState);
function processLookupHandlers(newValue, oldValue) {
// If we have any internal lookups then update the references
$scope.internalLookups.forEach((lkp: fng.IFngInternalLookupHandlerInfo) => {
let newVal = newValue[lkp.ref.property];
let oldVal = oldValue[lkp.ref.property];
setUpInternalLookupLists($scope, lkp.lookupOptions, lkp.lookupIds, newVal, lkp.ref.value);
// now change the looked-up values that matched the old to the new
if ((newVal && newVal.length > 0) || (oldVal && oldVal.length > 0)) {
lkp.handlers.forEach((h) => {
if (h.possibleArray) {
let arr = getData($scope.record, h.possibleArray, null);
if (arr && arr.length > 0) {
arr.forEach(a => convertOldToNew(lkp.ref, a, h.lastPart, newVal, oldVal));
}
} else if (angular.isArray($scope.record[h.lastPart])) {
$scope.record[h.lastPart].forEach(a => {
convertOldToNew(lkp.ref, a, "x", newVal, oldVal);
});
} else {
convertOldToNew(lkp.ref, $scope.record, h.lastPart, newVal, oldVal);
}
});
}
});
// If we have any list lookups then update the references
$scope.listLookups.forEach((lkp: fng.IFngLookupListHandlerInfo) => {
function extractIdVal(obj: any, idString: string): any {
let retVal = obj[idString];
if (retVal && retVal.id) {
retVal = retVal.id;
}
return retVal;
}
function blankListLookup(inst: IFormInstruction) {
setData($scope.record, inst.name);
}
let idString = lkp.ref.id.slice(1);
if (idString.includes(".")) {
throw new Error(`No support for nested list lookups yet - ${JSON.stringify(lkp.ref)}`);
}
let newVal = extractIdVal(newValue, idString);
let oldVal = extractIdVal(oldValue, idString);
if (newVal !== oldVal) {
lkp.handlers.forEach((h) => {
$scope[h.formInstructions.options].length = 0;
$scope[h.formInstructions.ids].length = 0;
});
if (newVal) {
SubmissionsService.readRecord(lkp.ref.collection, newVal).then(
(response) => {
lkp.handlers.forEach((h) => {
let optionsList = $scope[h.formInstructions.options];
let idList = $scope[h.formInstructions.ids];
let data = response.data[lkp.ref.property] || [];
for (var i = 0; i < data.length; i++) {
var option = data[i][lkp.ref.value];
var pos = _.sortedIndex(optionsList, option);
// handle dupes
if (optionsList[pos] === option) {
option = option + " (" + data[i]._id + ")";
pos = _.sortedIndex(optionsList, option);
}
optionsList.splice(pos, 0, option);
idList.splice(pos, 0, data[i]._id);
}
if (Object.keys(oldValue).length === 0) {
// Not sure how safe this is, but the record is fresh so I think it's OK...
updateRecordWithLookupValues(h.formInstructions, $scope, ctrlState, true);
} else {
// Here we are reacting to a change in the lookup pointer in the record.
// We need to blank our lookup field as it will not exist
blankListLookup(h.formInstructions)
}
});
}
);
} else {
lkp.handlers.forEach((h) => {
blankListLookup(h.formInstructions);
});
}
}
});
}
function notifyReady() {
$scope.phase = "ready";
$scope.cancel();
processLookupHandlers($scope.record, {});
}
if (listOnly) {
ctrlState.allowLocationChange = true;
} else {
var force = true;
if (!$scope.newRecord) {
$scope.dropConversionWatcher = $scope.$watchCollection("conversions", function(newValue, oldValue) {
if (newValue !== oldValue && $scope.originalData) {
processServerData($scope.originalData, $scope, ctrlState);
}
});
}
$scope.$watch("record", function(newValue, oldValue) {
if (newValue !== oldValue) {
if (Object.keys(oldValue).length > 0 && $scope.dropConversionWatcher) {
$scope.dropConversionWatcher(); // Don't want to convert changed data
$scope.dropConversionWatcher = null;
}
force = formGeneratorInstance.updateDataDependentDisplay(newValue, oldValue, force, $scope);
processLookupHandlers(newValue, oldValue);
}
}, true);
if ($scope.id) {
// Going to read a record
if (typeof $scope.dataEventFunctions.onBeforeRead === "function") {
$scope.dataEventFunctions.onBeforeRead($scope.id, function(err) {
if (err) {
$scope.showError(err);
} else {
recordHandlerInstance.readRecord($scope, ctrlState);
}
});
} else {
recordHandlerInstance.readRecord($scope, ctrlState);
}
} else {
// New record
ctrlState.allowLocationChange = false;
ctrlState.master = $scope.setDefaults($scope.formSchema);
let passedRecord = $scope.initialiseNewRecord || $location.$$search.r;
if (passedRecord) {
try {
Object.assign(ctrlState.master, JSON.parse(passedRecord));
if (!$scope["newRecordsStartPristine"]) {
// Although this is a new record we are making it dirty from the url so we need to $setDirty
$scope.$on("fngCancel", () => {
$timeout(() => {
if ($scope[$scope.topLevelFormName]) {
$scope[$scope.topLevelFormName].$setDirty();
}
}, 1000); // Has to fire after the setPristime timeout.
});
}
} catch (e) {
console.log("Error parsing specified record : " + e.message);
}
}
if (typeof $scope.dataEventFunctions.onInitialiseNewRecord === "function") {
console.log("onInitialiseNewRecord is deprecated - use the async version - onNewRecordInit(data,cb)");
$scope.dataEventFunctions.onInitialiseNewRecord(ctrlState.master);
}
if (typeof $scope.dataEventFunctions.onNewRecordInit === "function") {
$scope.dataEventFunctions.onNewRecordInit(ctrlState.master, function(err) {
if (err) {
$scope.showError(err);
} else {
notifyReady();
}
});
} else {
notifyReady();
}
}
}
}
function handleError($scope: fng.IFormScope) {
return function(response: any): void {
if ([200, 400].indexOf(response.status) !== -1) {
var errorMessage = "";
for (var errorField in response.data.errors) {
if (response.data.errors.hasOwnProperty(errorField)) {
errorMessage += "<li><b>" + $filter("titleCase")(errorField) + ": </b> ";
switch (response.data.errors[errorField].type) {
case "enum" :
errorMessage += "You need to select from the list of values";
break;
default:
errorMessage += response.data.errors[errorField].message;
break;
}
errorMessage += "</li>";
}
}
if (errorMessage.length > 0) {
errorMessage = response.data.message + "<br /><ul>" + errorMessage + "</ul>";
} else {
errorMessage = response.data.message || response.data.err || "Error! Sorry - No further details available.";
}
$scope.showError(errorMessage);
} else {
$scope.showError(response.status + " " + JSON.stringify(response.data));
}
};
}
function handleIncomingData(data, $scope, ctrlState: IFngCtrlState) {
ctrlState.allowLocationChange = false;
$scope.phase = "reading";
if (typeof $scope.dataEventFunctions.onAfterRead === "function") {
$scope.dataEventFunctions.onAfterRead(data);
}
$scope.originalData = data;
processServerData(data, $scope, ctrlState);
}
function addArrayLookupToLookupList($scope: IFormScope, formInstructions: IFormInstruction, ref: IBaseArrayLookupReference,
lookups: (IFngInternalLookupHandlerInfo | IFngLookupListHandlerInfo)[]) {
let nameElements = formInstructions.name.split(".");
let refHandler: IFngInternalLookupHandlerInfo | IFngLookupListHandlerInfo = lookups.find((lkp) => {
return lkp.ref.property === ref.property && lkp.ref.value === ref.value;
});
let thisHandler: IFngSingleLookupHandler = {
formInstructions: formInstructions,
lastPart: nameElements.pop(),
possibleArray: nameElements.join(".")
};
if (!refHandler) {
refHandler = {
ref: ref,
lookupOptions: [],
lookupIds: [],
handlers: []
};
lookups.push(refHandler);
}
refHandler.handlers.push(thisHandler);
$scope[formInstructions.options] = refHandler.lookupOptions;
$scope[formInstructions.ids] = refHandler.lookupIds;
}
return {
readRecord: function readRecord($scope, ctrlState: IFngCtrlState) {
$scope.readingRecord = SubmissionsService.readRecord($scope.modelName, $scope.id);
$scope.readingRecord
.then(function(response) {
let data: any = angular.copy(response.data);
handleIncomingData(data, $scope, ctrlState);
}, function(error) {
if (error.status === 404) {
$location.path("/404");
} else {
$scope.handleHttpError(error);
}
});
},
scrollTheList: function scrollTheList($scope) {
var pagesLoaded = $scope.pagesLoaded;
SubmissionsService.getPagedAndFilteredList($scope.modelName, {
aggregate: $location.$$search.a,
find: $location.$$search.f,
limit: $scope.pageSize,
skip: pagesLoaded * $scope.pageSize,
order: $location.$$search.o
})
.then(function(response) {
let data: any = response.data;
if (angular.isArray(data)) {
// I have seen an intermittent problem where a page is requested twice
if (pagesLoaded === $scope.pagesLoaded) {
$scope.pagesLoaded++;
$scope.recordList = $scope.recordList.concat(data);
} else {
console.log("DEBUG: infinite scroll component asked for a page twice - the model was " + $scope.modelName);
}
} else {
$scope.showError(data, "Invalid query");
}
}, $scope.handleHttpError);
},
deleteRecord: function deleteRecord(id, $scope, ctrlState) {
SubmissionsService.deleteRecord($scope.modelName, id)
.then(function() {
if (typeof $scope.dataEventFunctions.onAfterDelete === "function") {
$scope.dataEventFunctions.onAfterDelete(ctrlState.master);
}
routingService.redirectTo()("onDelete", $scope, $location);
}, (err) => {
if (err.status === 404) {
// Someone already deleted it
routingService.redirectTo()("onDelete", $scope, $location);
} else {
$scope.showError(`${err.statusText} (${err.status}) while deleting record<br />${err.data}`, 'Error deleting record');
}
});
},
updateDocument: function updateDocument(dataToSave, options, $scope: fng.IFormScope, ctrlState: IFngCtrlState) {
$scope.phase = "updating";
SubmissionsService.updateRecord($scope.modelName, $scope.id, dataToSave)
.then(function(response) {
let data: any = response.data;
if (data.success !== false) {
if (typeof $scope.dataEventFunctions.onAfterUpdate === "function") {
$scope.dataEventFunctions.onAfterUpdate(data, ctrlState.master);
}
if (options.redirect) {
if (options.allowChange) {
ctrlState.allowLocationChange = true;
}
$window.location = options.redirect;
} else {
handleIncomingData(data, $scope, ctrlState);
$scope.setPristine(false);
}
} else {
$scope.showError(data);
}
}, $scope.handleHttpError);
},
createNew: function createNew(dataToSave, options, $scope: fng.IFormScope, ctrlState: IFngCtrlState) {
SubmissionsService.createRecord($scope.modelName, dataToSave)
.then(function(response) {
let data: any = response.data;
if (data.success !== false) {
ctrlState.allowLocationChange = true;
if (typeof $scope.dataEventFunctions.onAfterCreate === "function") {
$scope.dataEventFunctions.onAfterCreate(data);
}
if (options.redirect) {
$window.location = options.redirect;
} else {
routingService.redirectTo()("edit", $scope, $location, data._id);
}
} else {
$scope.showError(data);
}
}, $scope.handleHttpError);
},
getListData: getListData,
suffixCleanId: suffixCleanId,
setData: setData,
setUpLookupOptions: function setUpLookupOptions(lookupCollection, schemaElement, $scope, ctrlState, handleSchema) {
var optionsList = $scope[schemaElement.options] = [];
var idList = $scope[schemaElement.ids] = [];
SchemasService.getSchema(lookupCollection)
.then(function(response) {
let data: any = response.data;
var listInstructions = [];
handleSchema("Lookup " + lookupCollection, data, null, listInstructions, "", false, $scope, ctrlState);
var dataRequest;
if (typeof schemaElement.filter !== "undefined" && schemaElement.filter) {
dataRequest = SubmissionsService.getPagedAndFilteredList(lookupCollection, schemaElement.filter);
} else {
dataRequest = SubmissionsService.getAll(lookupCollection);
}
dataRequest
.then(function(response) {
let data: any = angular.copy(response.data);
if (data) {
for (var i = 0; i < data.length; i++) {
var option = "";
for (var j = 0; j < listInstructions.length; j++) {
let thisVal: string = data[i][listInstructions[j].name];
option += thisVal ? thisVal + " " : "";
}
option = option.trim();
var pos = _.sortedIndex(optionsList, option);
// handle dupes (ideally people will use unique indexes to stop them but...)
if (optionsList[pos] === option) {
option = option + " (" + data[i]._id + ")";
pos = _.sortedIndex(optionsList, option);
}
optionsList.splice(pos, 0, option);
idList.splice(pos, 0, data[i]._id);
}
if ($scope.readingRecord) {
$scope.readingRecord
.then(() => {
updateRecordWithLookupValues(schemaElement, $scope, ctrlState);
})
}
}
});
});
},
setUpLookupListOptions: function setUpLookupListOptions(ref: IFngLookupListReference, formInstructions: IFormInstruction, $scope: IFormScope, ctrlState: IFngCtrlState) {
let optionsList = $scope[formInstructions.options] = [];
let idList = $scope[formInstructions.ids] = [];
if (ref.id[0] === "$") {
// id of document that contains out lookup list comes from record, so we need to deal with in $watch by adding it to listLookups
addArrayLookupToLookupList($scope, formInstructions, ref, $scope.listLookups)
} else {
// we can do it now
SubmissionsService.readRecord(ref.collection, $scope.$eval(ref.id)).then(
(response) => {
let data = response.data[ref.property];
for (var i = 0; i < data.length; i++) {
var option = data[i][ref.value];
var pos = _.sortedIndex(optionsList, option);
// handle dupes
if (optionsList[pos] === option) {
option = option + " (" + data[i]._id + ")";
pos = _.sortedIndex(optionsList, option);
}
optionsList.splice(pos, 0, option);
idList.splice(pos, 0, data[i]._id);
}
updateRecordWithLookupValues(formInstructions, $scope, ctrlState);
}
);
}
},
handleInternalLookup: function handleInternalLookup($scope: IFormScope, formInstructions: IFormInstruction, ref: IFngInternalLookupReference) {
addArrayLookupToLookupList($scope, formInstructions, ref, $scope.internalLookups);
},
preservePristine: preservePristine,
// Reverse the process of convertToAngularModel
convertToMongoModel: function convertToMongoModel(schema, anObject, prefixLength, $scope, schemaName?: string) {
function convertLookup(lookup, conversionInst) {
var retVal;
if (conversionInst && conversionInst.fngajax) {
if (lookup) {
retVal = lookup.id || lookup;
}
} else if (lookup) {
retVal = lookup.text || (lookup.x ? lookup.x.text : lookup);
}
return retVal;
}
for (var i = 0; i < schema.length; i++) {
const schemaI = schema[i];
const fieldname = schemaI.name.slice(prefixLength);
const thisField = getListData(anObject, fieldname, null, $scope);
if (schemaI.schema) {
if (thisField) {
for (let j = 0; j < thisField.length; j++) {
thisField[j] = convertToMongoModel(schemaI.schema, thisField[j], 1 + fieldname.length, $scope, fieldname);
}
}
} else {
// Convert {array:[{x:'item 1'}]} to {array:['item 1']}
if (schemaI.array && simpleArrayNeedsX(schemaI) && thisField) {
for (let k = 0; k < thisField.length; k++) {
thisField[k] = thisField[k].x;
}
}
// Convert {lookup:'List description for 012abcde'} to {lookup:'012abcde'}
const idList = $scope[suffixCleanId(schemaI, "_ids")];
if (idList && idList.length > 0) {
updateObject(fieldname, anObject, function(value) {
return convertToForeignKeys(schemaI, value, $scope[suffixCleanId(schemaI, "Options")], idList);
});
} else {
let thisConversion = getConversionObject($scope, fieldname, schemaName);
if (thisConversion) {
const lookup = getData(anObject, fieldname, null);
let newVal;
if (schemaI.array) {
newVal = [];
if (lookup) {
for (let n = 0; n < lookup.length; n++) {
newVal[n] = convertLookup(lookup[n], thisConversion);
}
}
} else {
newVal = convertLookup(lookup, thisConversion);
}
setData(anObject, fieldname, null, newVal);
}
}
}
}
return anObject;
},
convertIdToListValue: convertIdToListValue,
handleError: handleError,
decorateScope: function decorateScope($scope: fng.IFormScope, $uibModal, recordHandlerInstance: fng.IRecordHandler, ctrlState: IFngCtrlState) {
$scope.handleHttpError = handleError($scope);
$scope.cancel = function() {
angular.copy(ctrlState.master, $scope.record);
$scope.$broadcast("fngCancel", $scope);
// Let call backs etc resolve in case they dirty form, then clean it
$timeout($scope.setPristine);
};
//listener for any child scopes to display messages
// pass like this:
// scope.$emit('showErrorMessage', {title: 'Your error Title', body: 'The body of the error message'});
// or
// scope.$broadcast('showErrorMessage', {title: 'Your error Title', body: 'The body of the error message'});
$scope.$on("showErrorMessage", function(event, args) {
if (!event.defaultPrevented) {
event.defaultPrevented = true;
$scope.showError(args.body, args.title);
}
});
$scope.showError = function(error: any, alertTitle?: string) {
$scope.alertTitle = alertTitle ? alertTitle : "Error!";
if (typeof error === "string") {
$scope.errorMessage = error;
} else if (!error) {
$scope.errorMessage = `An error occurred - that's all we got. Sorry.`;
} else if (error.message && typeof error.message === "string") {
$scope.errorMessage = error.message;
} else if (error.data && error.data.message) {
$scope.errorMessage = error.data.message;
} else {
try {
$scope.errorMessage = JSON.stringify(error);
} catch (e) {
$scope.errorMessage = error;
}
}
$scope.errorHideTimer = window.setTimeout(function() {
$scope.dismissError();
$scope.$digest();
}, 3500 + (1000 * ($scope.alertTitle + $scope.errorMessage).length / 50));
$scope.errorVisible = true;
window.setTimeout(() => {
$scope.$digest();
})
};
$scope.clearTimeout = function() {
if ($scope.errorHideTimer) {
clearTimeout($scope.errorHideTimer);
delete $scope.errorHideTimer;
}
};
$scope.dismissError = function() {
$scope.clearTimeout;
$scope.errorVisible = false;
delete $scope.errorMessage;
delete $scope.alertTitle;
};
$scope.stickError = function() {
clearTimeout($scope.errorHideTimer);
};
$scope.prepareForSave = function(cb: (error: string, dataToSave?: any) => void): void {
//Convert the lookup values into ids
let dataToSave = recordHandlerInstance.convertToMongoModel($scope.formSchema, angular.copy($scope.record), 0, $scope);
if ($scope.id) {
if (typeof $scope.dataEventFunctions.onBeforeUpdate === "function") {
$scope.dataEventFunctions.onBeforeUpdate(dataToSave, ctrlState.master, function(err) {
if (err) {
cb(err);
} else {
cb(null, dataToSave);
}
});
} else {
cb(null, dataToSave);
}
} else {
if (typeof $scope.dataEventFunctions.onBeforeCreate === "function") {
$scope.dataEventFunctions.onBeforeCreate(dataToSave, function(err) {
if (err) {
cb(err);
} else {
cb(null, dataToSave);
}
});
} else {
cb(null, dataToSave);
}
}
};
$scope.save = function(options) {
options = options || {};
$scope.prepareForSave((err, dataToSave) => {
if (err) {
if (err !== "_update_handled_") {
$timeout(() => {
$scope.showError(err);
});
}
} else if ($scope.id) {
recordHandlerInstance.updateDocument(dataToSave, options, $scope, ctrlState);
} else {
recordHandlerInstance.createNew(dataToSave, options, $scope, ctrlState);
}
});
};
$scope.newClick = function() {
routingService.redirectTo()("new", $scope, $location);
};
$scope.$on("$locationChangeStart", function(event, next) {
// let changed = !$scope.isCancelDisabled();
// let curPath = window.location.href.split('/');
// let nextPath = next.split('/');
// let tabChangeOnly = true;
// let i = 0;
// do {
// i += 1;
// if (curPath[i] !== nextPath[i]) {
// tabChangeOnly = false;
// }
// } while (tabChangeOnly && curPath[i] !== 'edit');
// if (tabChangeOnly) {
// // let dataToReturn = recordHandlerInstance.convertToMongoModel($scope.formSchema, angular.copy($scope.record), 0, $scope);
// SubmissionsService.setUpForTabChange($scope.modelName, $scope.id, $scope.record, ctrlState.master, changed);
// } else if (!ctrlState.allowLocationChange && changed) {
if (!ctrlState.allowLocationChange && !$scope.isCancelDisabled()) {
event.preventDefault();
const modalInstance = $uibModal.open({
template:
`<div class="modal-header">
<h3>Record modified</h3>
</div>
<div class="modal-body">
<p>Would you like to save your changes?</p>
</div>
<div class="modal-footer">
<button class="btn btn-primary dlg-yes" ng-click="yes()">Yes</button>
<button class="btn btn-warning dlg-no" ng-click="no()">No</button>
<button class="btn dlg-cancel" ng-click="cancel()">Cancel</button>
</div>`,
controller: "SaveChangesModalCtrl",
backdrop: "static"
});
modalInstance.result
.then(function(result) {
if (result) {
$scope.save({ redirect: next, allowChange: true }); // save changes
} else {
ctrlState.allowLocationChange = true;
$window.location = next;
}
}
)
.catch(_handleCancel);
}
});
$scope.deleteClick = function() {
if ($scope.record._id) {
let confirmDelete: Promise<boolean>;
if ($scope.unconfirmedDelete) {
confirmDelete = Promise.resolve(true);
} else {
let modalInstance = $uibModal.open({
template:
`<div class="modal-header">
<h3>Delete Item</h3>
</div>
<div class="modal-body">
<p>Are you sure you want to delete this record?</p>
</div>
<div class="modal-footer">
<button class="btn btn-primary dlg-no" ng-click="cancel()">No</button>
<button class="btn btn-warning dlg-yes" ng-click="yes()">Yes</button>
</div>`,
controller: "SaveChangesModalCtrl",
backdrop: "static"
});
confirmDelete = modalInstance.result;
}
confirmDelete.then(
function(result) {
function doTheDeletion() {
recordHandlerInstance.deleteRecord($scope.id, $scope, ctrlState);
}
if (result) {
if (typeof $scope.dataEventFunctions.onBeforeDelete === "function") {
$scope.dataEventFunctions.onBeforeDelete(ctrlState.master, function(err) {
if (err) {
if (err !== "_delete_handled_") {
$scope.showError(err);
}
} else {
doTheDeletion();
}
});
} else {
doTheDeletion();
}
}
}
)
.catch(_handleCancel);
}
};
$scope.isCancelDisabled = function() {
if ($scope[$scope.topLevelFormName] && $scope[$scope.topLevelFormName].$pristine) {
return true;
} else if (typeof $scope.disableFunctions.isCancelDisabled === "function") {
return $scope.disableFunctions.isCancelDisabled($scope.record, ctrlState.master, $scope[$scope.topLevelFormName]);
} else {
return false;
}
};
$scope.isSaveDisabled = function() {
$scope.whyDisabled = undefined;
let pristine = false;
function generateWhyDisabledMessage(form, subFormName?: string) {
form.$$controls.forEach(c => {
if (c.$invalid) {
if (c.$$controls) {
// nested form
generateWhyDisabledMessage(c, c.$name)
} else {
$scope.whyDisabled += "<br /><strong>";
if (subFormName) {
$scope.whyDisabled += subFormName + ' ';
}
if (
cssFrameworkService.framework() === "bs2" &&
c.$$element &&
c.$$element.parent() &&
c.$$element.parent().parent() &&
c.$$element.parent().parent().find("label") &&
c.$$element.parent().parent().find("label").text()
) {
$scope.whyDisabled += c.$$element.parent().parent().find("label").text();
} else if (
cssFrameworkService.framework() === "bs3" &&
c.$$element &&
c.$$element.parent() &&
c.$$element.parent().parent() &&
c.$$element.parent().parent().parent() &&
c.$$element.parent().parent().parent().find("label") &&
c.$$element.parent().parent().parent().find("label").text()
) {
$scope.whyDisabled += c.$$element.parent().parent().parent().find("label").text();
} else {
$scope.whyDisabled += c.$name;
}
$scope.whyDisabled += "</strong>: ";
if (c.$error) {
for (let type in c.$error) {
if (c.$error.hasOwnProperty(type)) {
switch (type) {
case "required":
$scope.whyDisabled += "Field missing required value. ";
break;
case "pattern":
$scope.whyDisabled += "Field does not match required pattern. ";
break;
default:
$scope.whyDisabled += type + ". ";
}
}
}
}
}
}
});
}
if ($scope[$scope.topLevelFormName]) {
if ($scope[$scope.topLevelFormName].$invalid) {
$scope.whyDisabled = 'The form data is invalid:';
generateWhyDisabledMessage($scope[$scope.topLevelFormName]);
} else if ($scope[$scope.topLevelFormName].$pristine) {
// Don't have disabled message - should be obvious from Cancel being disabled,
// and the message comes up when the Save button is clicked.
pristine = true;
}
} else {
$scope.whyDisabled = "Top level form name invalid";
}
if (pristine || !!$scope.whyDisabled) {
return true;
} else if (typeof $scope.disableFunctions.isSaveDisabled !== "function") {
return false;
} else {
let retVal = $scope.disableFunctions.isSaveDisabled($scope.record, ctrlState.master, $scope[$scope.topLevelFormName]);
if (typeof retVal === "string") {
$scope.whyDisabled = retVal;
} else {
$scope.whyDisabled = "An application level user-specified function is inhibiting saving the record";
}
return !!retVal;
}
};
$scope.isDeleteDisabled = function() {
if (!$scope.id) {
return true;
} else if (typeof $scope.disableFunctions.isDeleteDisabled === "function") {
return $scope.disableFunctions.isDeleteDisabled($scope.record, ctrlState.master, $scope[$scope.topLevelFormName]);
} else {
return false;
}
};
$scope.isNewDisabled = function() {
if (typeof $scope.disableFunctions.isNewDisabled === "function") {
return $scope.disableFunctions.isNewDisabled($scope.record, ctrlState.master, $scope[$scope.topLevelFormName]);
} else {
return false;
}
};
$scope.setDefaults = function(formSchema: IFormInstruction[], base = ''): any {
const retVal = {};
formSchema.forEach(s => {
if (s.defaultValue !== undefined) {
retVal[s.name.replace(base, '')] = s.defaultValue;
}
});
return retVal;
};
$scope.getVal = function(expression, index) {
if (expression.indexOf("$index") === -1 || typeof index !== "undefined") {
expression = expression.replace(/\$index/g, index);
return $scope.$eval("record." + expression);
}
//else {
// Used to show error here, but angular seems to call before record is populated sometimes
// throw new Error('Invalid expression in getVal(): ' + expression);
//}
};
$scope.sortableOptions = {
update: function() {
if ($scope.topLevelFormName) {
$scope[$scope.topLevelFormName].$setDirty();
}
}
};
$scope.setUpCustomLookupOptions = function (schemaElement: IFormInstruction, ids: string[], options: string[], baseScope: any): void {
for (const scope of [$scope, baseScope]) {
if (scope) {
// need to be accessible on our scope for generation of the select options, and - for nested schemas -
// on baseScope for the conversion back to ids done by prepareForSave
scope[schemaElement.ids] = ids;
scope[schemaElement.options] = options;
}
}
let data = getData($scope.record, schemaElement.name);
if (!data) {
return;
}
if (angular.isArray(data)) {
for (let i = 0; i < data.length; i++) {
data[i] = convertIdToListValue(data[i], ids, options, schemaElement.name);
}
} else {
data = convertIdToListValue(data, ids, options, schemaElement.name);
}
setData($scope.record, schemaElement.name, undefined, data);
}
},
fillFormFromBackendCustomSchema: fillFormFromBackendCustomSchema,
fillFormWithBackendSchema: function fillFormWithBackendSchema($scope, formGeneratorInstance, recordHandlerInstance, ctrlState: IFngCtrlState) {
SchemasService.getSchema($scope.modelName, $scope.formName)
.then(function(response) {
let schema: any = response.data;
fillFormFromBackendCustomSchema(schema, $scope, formGeneratorInstance, recordHandlerInstance, ctrlState);
}, $scope.handleHttpError);
}
};
}
} | the_stack |
import {
NullVisitor,
ParsedVisitor,
ParseMethod,
ParseResult,
ParseIntersectionType,
ParseTypeAliasDeclaration,
ParseGeneric,
ParseInterfaceDeclaration,
ParseProperty,
ParseVariableDeclaration,
ParseClassDeclaration,
ParseUnionType,
ParseValueType,
ParsePrimitiveType,
ParseTypeLiteral,
ParseNode,
ParseDecorator,
ParseObjectLiteral,
ParseArrayLiteral,
Primitives,
ParseEmpty,
} from '../parsed-nodes';
import { flatten } from 'lodash';
import { NodeTags, mergeClassMembers } from '../utils';
/**
* @description
* The object properties that should be used from the
* `@Component` decorator of an angular component.
*/
const COMPONENT_DECORATOR_ITEMS = ['selector', 'exportAs', 'inputs'];
/**
* @description
* A list of NodeTags for properties that should not be exported.
* Those properties where only used to resolve types.
*/
const INTERNAL_MEMBERS: NodeTags[] = ['hasUnderscore', 'private', 'protected', 'internal', 'unrelated'];
const ANGULAR_LIFE_CYCLE_METHODS = [
'ngOnChanges',
'ngOnInit',
'ngDoCheck',
'ngAfterContentInit',
'ngAfterContentChecked',
'ngAfterViewInit',
'ngAfterViewChecked',
'ngOnDestroy',
];
/**
* @description
* Resolve primitive values like booleans to true/false in case that
* we need the possible variants.
*/
function resolvePrimitiveType(nodeType: ParsePrimitiveType): string[] | null {
switch (nodeType.type) {
case Primitives.Boolean:
return ['true', 'false'];
default:
// don't resolve undefined and null Values we dont need the undefined or null state
return null;
}
}
/**
* @class
* @classdesc
* The meta resolver is responsible to take the parsed abstract syntax tree that was
* generated with the parsed nodes classes and resolves it to a more human readable object
* format. So any nodes that we do not need in the output will be dropped by returning null
* and the nodes where we need the information are getting visited and returned as custom
* objects or arrays.
* We try to reduce the complex AST to the angular components that can have properties as members.
* This Resolver needs the resolved references from the `ReferenceResolver` to generate a meaningful
* output.
*
* The output is going to be consumed by the `@sketchmine/app-builder` to generate the different variants
* of the angular components.
*/
export class MetaResolver extends NullVisitor implements ParsedVisitor {
/**
* @description
* returns a visited array of the literal values
*/
visitArrayLiteral(node: ParseArrayLiteral): any {
return this.visitAllWithParent(node.values, node);
}
/**
* @description
* Visits a class declaration and determines if it is an angular component
* or just a normal es6 class.
*/
visitClassDeclaration(node: ParseClassDeclaration): any {
// if the class is marked as design unrelated we don't care anymore
if (node.tags.includes('unrelated')) {
return undefined;
}
const members = this.visitAllWithParent(node.members, node);
const extending = this.visitWithParent(node.extending, node);
const mergedMembers = mergeClassMembers(extending, ...members);
// if it is not an angular component we do not need the class information and
// the decorator information we only want to know the extends and members
// of this class
if (!node.isAngularComponent() || node._parentNode.constructor !== ParseResult) {
// return the members array merged with the extending
return mergedMembers;
}
const decorator = this.visitWithParent(node.decorators[0], node);
const selector = decorator && decorator.selector
? decorator.selector
.split(',')
.map(s => s.replace(/[\`\s\'\"]/gm, ''))
: null;
const componentName = /.+?\/([^\/]+?).ts$/.exec(node.location.path);
// add the keys of the members to the allowed members set this set is used to check if
// only @Inputs and inputs from the component decorator are used as members.
const allowedMembers = new Set<string>(members.map(member => member.key));
if (decorator.inputs && decorator.inputs.length) {
for (let i = 0, max = decorator.inputs.length; i < max; i += 1) {
const input = decorator.inputs[i].replace(/[\`\s\'\"]/gm, '');
allowedMembers.add(input);
}
}
return {
name: node.name,
/** @example https://regex101.com/r/YduQlF/1 */
component: componentName && componentName.length && componentName.length > 0 ? componentName[1] : node.name,
selector,
angularComponent: node.isAngularComponent(),
decorator,
combinedVariants: !node.tags.includes('noCombinations'),
members: mergedMembers.filter(m => allowedMembers.has(m.key)),
};
}
/**
* @description
* visits a decorator, when it is a component decorator on a class declaration
* we know it is an angular component, otherwise we ignore it at the moment
* can be used later on to identify host listeners for click events!
*/
visitDecorator(node: ParseDecorator): any {
if (
node._parentNode &&
node._parentNode.constructor === ParseClassDeclaration &&
(node._parentNode as ParseClassDeclaration).isAngularComponent()
) {
const properties = {};
node.args.forEach((arg: ParseObjectLiteral) =>
arg.properties.forEach((prop: ParseProperty) => {
// if the
if (COMPONENT_DECORATOR_ITEMS.includes(prop.name)) {
properties[prop.name] = this.propertyVisitStrategy(prop);
}
}));
return properties;
}
// TODO: lukas.holzer visitDecorator in method and property as well
// currently not reachable! – we check for angular components in visit class declaration
// log.warning(`Only @Component decorators are handled yet!
// Not @${node.name}
// `);
}
/**
* @description
* returns an array of the values from the properties.
*/
visitObjectLiteral(node: ParseObjectLiteral): any {
return this.visitAll(node.properties);
}
/**
* @description
* visits the extending reference and the members of an interface declaration.
*/
visitInterfaceDeclaration(node: ParseInterfaceDeclaration): any {
const members = this.visitAllWithParent(node.members, node);
const extending = this.visitWithParent(node.extending, node);
if (!!extending) {
// return the merged and flattened values from the extending and members
// and filter all falsy values (undefined, null)
return flatten([extending, members]).filter(v => !!v);
}
return members;
}
/**
* @description
* return a flattened array of the visited types of an intersection type.
* An intersection type combines multiple types to one.
*/
visitIntersectionType(node: ParseIntersectionType): any {
const types = this.visitAllWithParent(node.types, node);
return flatten(types);
}
/**
* @description
* visits the method and if the parent node is a class it will return undefined
* if the methods parent is not a class declaration we want to know the return type
* in case that the method is used to assign data.
*
* @todo lukas.holzer@dynatrace.com check for method parameters later
* if there is the need to call methods on a component to get a state!
*/
visitMethod(node: ParseMethod): any {
const returnType = this.visitWithParent(node.returnType, node);
// if the parent node is not a class declaration we do not need the parameters only
// the return type.
// if the parent type is a property then the property was a method like
// `compareWith(fn: (v1: T, v2: T) => boolean)` – In this case we want to know that the property is a method!
if (
node._parentNode &&
node._parentNode.constructor !== ParseClassDeclaration &&
node._parentNode.constructor !== ParseProperty
) {
return returnType;
}
const isLifeCycleHook = ANGULAR_LIFE_CYCLE_METHODS.includes(node.name);
const isInternal = node.tags.some((tag: NodeTags) =>
INTERNAL_MEMBERS.includes(tag));
// if the method is private or internal or an angular life cycle hook we can skip
// it in the resulting object.
if (isInternal || isLifeCycleHook) {
return;
}
return {
type: 'method',
key: node.name,
parameters: this.visitAllWithParent(node.parameters, node),
returnType,
};
}
/**
* @description
* returns a visited array of all values of a union type
*/
visitUnionType(node: ParseUnionType): any {
return this.visitAllWithParent(node.types, node);
}
visitProperty(node: ParseProperty): any {
// we only want to parse @Input's of an Angular component
// so when a property is a child of a class declaration we need to
// check if it has an input decorator if it is not we can ignore it
if (
node._parentNode &&
node._parentNode.constructor === ParseClassDeclaration &&
(<ParseClassDeclaration>node._parentNode).isAngularComponent() &&
!node.isAngularInput()
) {
return;
}
// If a property includes some tags like private or internal we do not want
// to use this properties so return undefined instead.
const isInternal = node.tags.some((tag: NodeTags) =>
INTERNAL_MEMBERS.includes(tag));
const value = this.propertyVisitStrategy(node as ParseProperty);
if (isInternal || !value) {
return;
}
const propertyValue = Array.isArray(value) ? value : [value];
// only string values are allowed in the property Values array every value has to be escaped as string!
if (typeof propertyValue[0] !== 'string') {
return;
}
return {
type: 'property',
key: node.name,
value: propertyValue,
};
}
visitPrimitiveType(node: ParsePrimitiveType) {
return resolvePrimitiveType(node);
}
visitTypeLiteral(node: ParseTypeLiteral) {
return this.visitAllWithParent(node.members, node);
}
visitValueType(node: ParseValueType) {
if (typeof node.value === 'number') {
return `${node.value}`;
}
// if we have an empty string return undefined
if (
typeof node.value === 'string' &&
node.value.replace(/\"/gm, '').trim().length === 0
) {
return;
}
return node.value;
}
/**
* @description
* The visit result function is used to visit files. Every file has its ParseResult.
* In case that we want to get the information from our angular components we can filter
* all root nodes for class declarations and then for angular components.
*/
visitResult(node: ParseResult): any[] {
const rootNodes = [];
for (let i = 0, max = node.nodes.length; i < max; i += 1) {
const rootNode = node.nodes[i];
if (
rootNode &&
rootNode.constructor === ParseClassDeclaration &&
(<ParseClassDeclaration>rootNode).isAngularComponent()
) {
const visitedNode = this.visitWithParent(rootNode, node);
if (visitedNode) {
rootNodes.push(visitedNode);
}
}
}
return rootNodes;
}
/**
* @description
* returns the visited type of the type alias declaration.
*/
visitTypeAliasDeclaration(node: ParseTypeAliasDeclaration): any {
return this.visitWithParent(node.type, node);
}
/**
* @description
* returns the visited value of the variable declaration.
*/
visitVariableDeclaration(node: ParseVariableDeclaration): any {
return this.propertyVisitStrategy(node as ParseVariableDeclaration);
}
/**
* @description
* uses the visiting strategy `propertyVisitStrategy` to return the passed type or value.
*/
visitGeneric(node: ParseGeneric): any {
const constraint = this.visitWithParent(node.constraint, node);
const value = this.propertyVisitStrategy(node as ParseGeneric);
if (!!constraint) {
// return the merged and flattened values from the constraints and values
// and filter all falsy values (undefined, null)
return flatten([constraint, value]).filter(v => !!v);
}
// if we have no constraints return only the value.
return value;
}
/**
* @description
* Uses the visit function from the NullVisitor but modifies the node so that it
* has an internal property `_parentNode` on it to identify later in which context
* the node is used.
*/
visitWithParent(node: ParseNode, parent: ParseNode): any {
// if we have no node return undefined.
if (!node) { return; }
// add the internal _parentNode reference to the node itself.
node._parentNode = parent;
// call the visit function from the NullVisitor with the modified node.
return super.visit(node);
}
/**
* @description
* Uses the same principles like the `visitAll` function from the NullVisitor,
* with the only difference that it passes a parent node to the wrapping `visitWithParent` function.
*/
visitAllWithParent(nodes: ParseNode[], parent: ParseNode): any[] {
// if we have no nodes return an empty array like the `visitAll` function in the NullVisitor.
if (!nodes) { return []; }
const result = [];
for (let i = 0, max = nodes.length; i < max; i += 1) {
const node = nodes[i];
const visited = this.visitWithParent(node, parent);
if (visited) {
result.push(visited);
}
}
return result;
}
/**
* @description
* this function is used to define a strategy how values and types should be handled
* in case of which information should be prioritized. In case that we often have no type
* but maybe the value can be used as type.
*/
private propertyVisitStrategy(node: ParseProperty | ParseGeneric) {
if (!!node.type && node.type && node.type.constructor !== ParseEmpty) {
return this.visitWithParent(node.type, node);
}
if (!!node.value) {
return this.visitWithParent(node.value, node);
}
}
} | the_stack |
import * as React from "react"
import {
/*forceCenter,*/ forceSimulation,
forceX,
forceY,
/*forceCollide,*/ forceLink,
forceManyBody
} from "d3-force"
import { bboxCollide } from "d3-bboxCollide"
import { scaleLinear } from "d3-scale"
import { min, max } from "d3-array"
import AnnotationLabel from "react-annotation/lib/Types/AnnotationLabel"
import Frame from "./Frame"
import DownloadButton from "./DownloadButton"
import {
calculateMargin,
adjustedPositionSize,
TitleType
} from "./svg/frameFunctions"
import { pointOnArcAtAngle } from "./svg/pieceDrawing"
import {
drawNodes,
drawEdges,
topologicalSort,
hierarchicalRectNodeGenerator,
matrixNodeGenerator,
radialRectNodeGenerator,
chordNodeGenerator,
chordEdgeGenerator,
matrixEdgeGenerator,
arcEdgeGenerator,
sankeyNodeGenerator,
wordcloudNodeGenerator,
circleNodeGenerator,
areaLink,
ribbonLink,
circularAreaLink,
radialLabelGenerator,
dagreEdgeGenerator,
softStack
} from "./svg/networkDrawing"
import { stringToFn } from "./data/dataFunctions"
import {
networkNodeDownloadMapping,
networkEdgeDownloadMapping
} from "./downloadDataMapping"
import {
sankeyLeft,
sankeyRight,
sankeyCenter,
sankeyJustify,
sankeyCircular
} from "d3-sankey-circular"
import { chord, ribbon } from "d3-chord"
import { arc } from "d3-shape"
import {
tree,
hierarchy,
pack,
cluster,
treemap,
partition,
packSiblings
} from "d3-hierarchy"
import {
networkFrameChangeProps,
xyframeproptypes,
ordinalframeproptypes,
networkframeproptypes
} from "./constants/frame_props"
import {
htmlFrameHoverRule,
svgNodeRule,
svgReactAnnotationRule,
svgEncloseRule,
svgRectEncloseRule,
svgHullEncloseRule,
svgHighlightRule
} from "./annotationRules/networkframeRules"
import { desaturationLayer } from "./annotationRules/baseRules"
import { genericFunction } from "./generic_utilities/functions"
import pathBounds from "svg-path-bounding-box"
import { nodesEdgesFromHierarchy } from "./processing/network"
import { AnnotationType } from "./types/annotationTypes"
const emptyArray = []
const matrixRenderOrder: ReadonlyArray<"nodes" | "edges"> = ["nodes", "edges"]
const generalRenderOrder: ReadonlyArray<"nodes" | "edges"> = ["edges", "nodes"]
const baseNodeProps = {
id: undefined,
degree: 0,
inDegree: 0,
outDegree: 0,
x: 0,
y: 0,
x1: 0,
x0: 0,
y1: 0,
y0: 0,
height: 0,
width: 0,
radius: 0,
r: 0,
direction: undefined,
textHeight: 0,
textWidth: 0,
fontSize: 0,
scale: 1,
nodeSize: 0,
component: -99,
shapeNode: false
}
const baseNetworkSettings = {
iterations: 500,
hierarchicalNetwork: false
}
const baseGraphSettings = {
nodeHash: new Map(),
edgeHash: new Map(),
nodes: [],
edges: [],
hierarchicalNetwork: false,
type: "force"
}
const basicMiddle = d => ({
edge: d,
x: (d.source.x + d.target.x) / 2,
y: (d.source.y + d.target.y) / 2
})
const edgePointHash = {
sankey: d => ({
edge: d,
x: (d.source.x1 + d.target.x0) / 2,
y: d.circularPathData
? d.circularPathData.verticalFullExtent
: ((d.y0 + d.y1) / 2 + (d.y0 + d.y1) / 2) / 2
}),
force: basicMiddle,
tree: basicMiddle,
cluster: basicMiddle
}
const hierarchicalTypeHash = {
dendrogram: tree,
tree,
circlepack: pack,
cluster,
treemap,
partition
}
const hierarchicalProjectable = {
partition: true,
cluster: true,
tree: true,
dendrogram: true
}
const radialProjectable = {
partition: true,
cluster: true,
tree: true,
dendrogram: true
}
/*
const customEdgeHashProject = {
offset: glyphProject.offset,
parallel: glyphProject.parallel
}
const customEdgeHashMutate = {
particle: glyphMutate.particle
}
*/
function determineNodeIcon(baseCustomNodeIcon, networkSettings, size, nodes) {
if (baseCustomNodeIcon) return baseCustomNodeIcon
const center = [size[0] / 2, size[1] / 2]
switch (networkSettings.type) {
case "sankey":
return sankeyNodeGenerator
case "partition":
return networkSettings.projection === "radial"
? radialRectNodeGenerator(size, center, networkSettings)
: hierarchicalRectNodeGenerator
case "treemap":
return networkSettings.projection === "radial"
? radialRectNodeGenerator(size, center, networkSettings)
: hierarchicalRectNodeGenerator
case "circlepack":
return circleNodeGenerator
case "wordcloud":
return wordcloudNodeGenerator
case "chord":
return chordNodeGenerator(size)
case "dagre":
return hierarchicalRectNodeGenerator
case "matrix":
return matrixNodeGenerator(size, nodes)
}
return circleNodeGenerator
}
function determineEdgeIcon({
baseCustomEdgeIcon,
networkSettings,
size,
graph,
nodes
}) {
if (baseCustomEdgeIcon) return baseCustomEdgeIcon
switch (networkSettings.type) {
case "partition":
return () => null
case "treemap":
return () => null
case "circlepack":
return () => null
case "wordcloud":
return () => null
case "chord":
return chordEdgeGenerator(size)
case "matrix":
return matrixEdgeGenerator(size, nodes)
case "arc":
return arcEdgeGenerator(size)
case "dagre":
if (graph) return dagreEdgeGenerator(graph.graph().rankdir)
}
return undefined
}
function breadthFirstCompontents(baseNodes, hash) {
const componentHash = {
"0": { componentNodes: [], componentEdges: [] }
}
const components = [componentHash["0"]]
let componentID = 0
traverseNodesBF(baseNodes, true)
function traverseNodesBF(nodes, top) {
nodes.forEach(node => {
const hashNode = hash.get(node)
if (!hashNode) {
componentHash["0"].componentNodes.push(node)
} else if (hashNode.component === -99) {
if (top === true) {
componentID++
componentHash[componentID] = {
componentNodes: [],
componentEdges: []
}
components.push(componentHash[componentID])
}
hashNode.component = componentID
componentHash[componentID].componentNodes.push(node)
componentHash[componentID].componentEdges.push(...hashNode.edges)
const traversibleNodes = [...hashNode.connectedNodes]
traverseNodesBF(traversibleNodes, hash)
}
})
}
return components.sort(
(a, b) => b.componentNodes.length - a.componentNodes.length
)
}
const projectedCoordinateNames = { y: "y", x: "x" }
const sankeyOrientHash = {
left: sankeyLeft,
right: sankeyRight,
center: sankeyCenter,
justify: sankeyJustify
}
const xScale = scaleLinear()
const yScale = scaleLinear()
const matrixify = ({ edgeHash, nodes, edgeWidthAccessor, nodeIDAccessor }) => {
const matrix = []
nodes.forEach(nodeSource => {
const nodeSourceID = nodeIDAccessor(nodeSource)
const sourceRow = []
matrix.push(sourceRow)
nodes.forEach(nodeTarget => {
const nodeTargetID = nodeIDAccessor(nodeTarget)
const theEdge = edgeHash.get(`${nodeSourceID}|${nodeTargetID}`)
if (theEdge) {
sourceRow.push(edgeWidthAccessor(theEdge))
} else {
sourceRow.push(0)
}
})
})
return matrix
}
import { GenericObject } from "./types/generalTypes"
import {
NodeType,
NetworkFrameProps,
NetworkFrameState,
NetworkSettingsType
} from "./types/networkTypes"
import { AnnotationLayerProps } from "./AnnotationLayer"
class NetworkFrame extends React.Component<
NetworkFrameProps,
NetworkFrameState
> {
static defaultProps = {
annotations: [],
foregroundGraphics: [],
annotationSettings: {},
size: [500, 500],
className: "",
name: "networkframe",
networkType: { type: "force", iterations: 500 },
filterRenderedNodes: (d: NodeType) => d.id !== "root-generated"
}
static displayName = "NetworkFrame"
constructor(props: NetworkFrameProps) {
super(props)
this.state = {
dataVersion: undefined,
nodeData: [],
edgeData: [],
adjustedPosition: [],
adjustedSize: [],
backgroundGraphics: null,
foregroundGraphics: null,
projectedNodes: [],
projectedEdges: [],
renderNumber: 0,
nodeLabelAnnotations: [],
graphSettings: {
type: "empty-start",
nodes: [],
edges: [],
nodeHash: new Map(),
edgeHash: new Map(),
hierarchicalNetwork: false
},
edgeWidthAccessor: stringToFn<number>("weight"),
legendSettings: {},
margin: { top: 0, left: 0, right: 0, bottom: 0 },
networkFrameRender: {},
nodeIDAccessor: stringToFn<string>("id"),
nodeSizeAccessor: genericFunction(5),
overlay: [],
projectedXYPoints: [],
sourceAccessor: stringToFn<string | GenericObject>("source"),
targetAccessor: stringToFn<string | GenericObject>("target"),
title: { title: undefined }
}
}
componentWillUnmount() {
if (this.props.onUnmount) {
this.props.onUnmount(this.props, this.state)
}
}
componentWillMount() {
Object.keys(this.props).forEach(d => {
if (!networkframeproptypes[d]) {
if (xyframeproptypes[d]) {
console.error(
`${d} is an XYFrame prop are you sure you're using the right frame?`
)
} else if (ordinalframeproptypes[d]) {
console.error(
`${d} is an OrdinalFrame prop are you sure you're using the right frame?`
)
} else {
console.error(`${d} is not a valid NetworkFrame prop`)
}
}
})
this.calculateNetworkFrame(this.props)
}
componentWillReceiveProps(nextProps: NetworkFrameProps) {
if (
(this.state.dataVersion &&
this.state.dataVersion !== nextProps.dataVersion) ||
(!this.state.projectedNodes && !this.state.projectedEdges)
) {
this.calculateNetworkFrame(nextProps)
} else if (
this.props.size[0] !== nextProps.size[0] ||
this.props.size[1] !== nextProps.size[1] ||
(!this.state.dataVersion &&
networkFrameChangeProps.find(d => {
return this.props[d] !== nextProps[d]
}))
) {
this.calculateNetworkFrame(nextProps)
}
}
onNodeClick(d: Object, i: number) {
if (this.props.onNodeClick) {
this.props.onNodeClick(d, i)
}
}
onNodeEnter(d: Object, i: number) {
if (this.props.onNodeEnter) {
this.props.onNodeEnter(d, i)
}
}
onNodeOut(d: Object, i: number) {
if (this.props.onNodeOut) {
this.props.onNodeOut(d, i)
}
}
calculateNetworkFrame(currentProps: NetworkFrameProps) {
const {
graph,
nodes = Array.isArray(graph) || typeof graph === "function"
? emptyArray
: (graph && graph.nodes) || emptyArray,
edges = typeof graph === "function"
? emptyArray
: Array.isArray(graph)
? graph
: (graph && graph.edges) || emptyArray,
networkType,
size,
nodeStyle,
nodeClass,
canvasNodes,
edgeStyle,
edgeClass,
canvasEdges,
nodeRenderMode,
edgeRenderMode,
nodeLabels,
title: baseTitle,
margin: baseMargin,
hoverAnnotation,
customNodeIcon: baseCustomNodeIcon,
customEdgeIcon: baseCustomEdgeIcon,
filterRenderedNodes
} = currentProps
let { edgeType } = currentProps
let networkSettings: NetworkSettingsType
const nodeHierarchicalIDFill = {}
let networkSettingsKeys = ["type"]
if (typeof networkType === "string") {
networkSettings = {
type: networkType,
...baseNetworkSettings,
graphSettings: baseGraphSettings
}
} else {
if (networkType) networkSettingsKeys = Object.keys(networkType)
networkSettings = {
type: "force",
...baseNetworkSettings,
...networkType,
graphSettings: baseGraphSettings
}
}
if (
networkSettings.projection === "vertical" &&
networkSettings.type === "sankey"
) {
networkSettings.direction = "down"
}
networkSettingsKeys.push("height", "width")
const title =
typeof baseTitle === "object" &&
!React.isValidElement(baseTitle) &&
baseTitle !== null
? (baseTitle as TitleType)
: ({ title: baseTitle, orient: "top" } as TitleType)
const margin = calculateMargin({
margin: baseMargin,
title
})
const { adjustedPosition, adjustedSize } = adjustedPositionSize({
size,
margin
})
networkSettings.graphSettings.nodes = nodes
networkSettings.graphSettings.edges = edges
let { edgeHash, nodeHash } = networkSettings.graphSettings
const createPointLayer =
networkSettings.type === "treemap" ||
networkSettings.type === "partition" ||
networkSettings.type === "sankey"
const nodeIDAccessor = stringToFn<string>(
currentProps.nodeIDAccessor,
d => d.id
)
const sourceAccessor = stringToFn<string | GenericObject>(
currentProps.sourceAccessor,
d => d.source
)
const targetAccessor = stringToFn<string | GenericObject>(
currentProps.targetAccessor,
d => d.target
)
const nodeSizeAccessor: (args?: GenericObject) => number =
typeof currentProps.nodeSizeAccessor === "number"
? genericFunction(currentProps.nodeSizeAccessor)
: stringToFn<number>(currentProps.nodeSizeAccessor, d => d.r || 5)
const edgeWidthAccessor = stringToFn<number>(
currentProps.edgeWidthAccessor,
d => d.weight || 1
)
const nodeStyleFn = stringToFn<GenericObject>(nodeStyle, () => ({}), true)
const nodeClassFn = stringToFn<string>(nodeClass, () => "", true)
const nodeRenderModeFn = stringToFn<string | GenericObject>(
nodeRenderMode,
undefined,
true
)
const nodeCanvasRenderFn =
canvasNodes && stringToFn<boolean>(canvasNodes, undefined, true)
let { projectedNodes, projectedEdges } = this.state
const isHierarchical =
typeof networkSettings.type === "string" &&
hierarchicalTypeHash[networkSettings.type]
const changedData =
!this.state.projectedNodes ||
!this.state.projectedEdges ||
this.state.graphSettings.nodes !== nodes ||
this.state.graphSettings.edges !== edges ||
isHierarchical
if (networkSettings.type === "dagre") {
const dagreGraph = graph as {
nodes: Function
edges: Function
node: Function
edge: Function
}
const dagreNodeHash = {}
projectedNodes = dagreGraph.nodes().map(n => {
const baseNode = dagreGraph.node(n)
dagreNodeHash[n] = {
...baseNode,
x0: baseNode.x - baseNode.width / 2,
x1: baseNode.x + baseNode.width / 2,
y0: baseNode.y - baseNode.height / 2,
y1: baseNode.y + baseNode.height / 2,
id: n,
shapeNode: true,
sourceLinks: [],
targetLinks: []
}
return dagreNodeHash[n]
})
projectedEdges = dagreGraph.edges().map(e => {
const dagreEdge = dagreGraph.edge(e)
const baseEdge = {
...dagreEdge,
points: dagreEdge.points.map(d => ({ ...d }))
}
baseEdge.source = projectedNodes.find(p => p.id === e.v)
baseEdge.target = projectedNodes.find(p => p.id === e.w)
baseEdge.points.unshift({ x: baseEdge.source.x, y: baseEdge.source.y })
baseEdge.points.push({ x: baseEdge.target.x, y: baseEdge.target.y })
dagreNodeHash[e.v].targetLinks.push(baseEdge)
dagreNodeHash[e.w].sourceLinks.push(baseEdge)
return baseEdge
})
} else if (changedData) {
edgeHash = new Map()
nodeHash = new Map()
networkSettings.graphSettings.edgeHash = edgeHash
networkSettings.graphSettings.nodeHash = nodeHash
projectedNodes = []
projectedEdges = []
nodes.forEach(node => {
const projectedNode = { ...node }
const id = nodeIDAccessor(projectedNode)
nodeHash.set(id, projectedNode)
nodeHash.set(node, projectedNode)
projectedNodes.push(projectedNode)
projectedNode.id = id
projectedNode.inDegree = 0
projectedNode.outDegree = 0
projectedNode.degree = 0
})
let operationalEdges = edges
let baseEdges = edges
if (isHierarchical && Array.isArray(edges)) {
const createdHierarchicalData = softStack(
edges,
projectedNodes,
sourceAccessor,
targetAccessor,
nodeIDAccessor
)
if (createdHierarchicalData.isHierarchical) {
baseEdges = createdHierarchicalData.hierarchy
projectedNodes = []
} else {
console.error(
"You've sent an edge list that is not strictly hierarchical (there are nodes with multiple parents) defaulting to force-directed network layout"
)
networkSettings.type = "force"
}
}
if (!Array.isArray(baseEdges)) {
networkSettings.hierarchicalNetwork = true
const rootNode = hierarchy(baseEdges, networkSettings.hierarchyChildren)
rootNode.sum(networkSettings.hierarchySum || (d => d.value))
if (isHierarchical) {
const layout = networkSettings.layout || isHierarchical
const hierarchicalLayout = layout()
const networkSettingKeys = Object.keys(networkSettings)
if (
(networkSettings.type === "dendrogram" ||
networkSettings.type === "tree" ||
networkSettings.type === "cluster") &&
hierarchicalLayout.separation
) {
hierarchicalLayout.separation(
(a, b) =>
(nodeSizeAccessor({ ...a, ...a.data }) || 1) +
(networkSettings.nodePadding || 0) +
(nodeSizeAccessor({ ...b, ...b.data }) || 1)
)
}
networkSettingKeys.forEach(key => {
if (hierarchicalLayout[key]) {
hierarchicalLayout[key](networkSettings[key])
}
})
const layoutSize =
networkSettings.projection === "horizontal" && isHierarchical
? [adjustedSize[1], adjustedSize[0]]
: adjustedSize
if (!networkSettings.nodeSize && hierarchicalLayout.size) {
hierarchicalLayout.size(layoutSize)
}
hierarchicalLayout(rootNode)
}
operationalEdges = nodesEdgesFromHierarchy(rootNode, nodeIDAccessor)
.edges
}
baseNodeProps.shapeNode = createPointLayer
if (Array.isArray(operationalEdges)) {
operationalEdges.forEach(edge => {
const source = sourceAccessor(edge)
const target = targetAccessor(edge)
const sourceTarget = [source, target]
sourceTarget.forEach(nodeDirection => {
if (!nodeHash.get(nodeDirection)) {
const nodeObject: NodeType =
typeof nodeDirection === "object"
? {
...baseNodeProps,
...nodeDirection
}
: {
...baseNodeProps,
id: nodeDirection,
createdByFrame: true
}
const nodeIDValue = nodeObject.id || nodeIDAccessor(nodeObject)
nodeHierarchicalIDFill[nodeIDValue]
? (nodeHierarchicalIDFill[nodeIDValue] += 1)
: (nodeHierarchicalIDFill[nodeIDValue] = 1)
if (!nodeObject.id) {
const nodeSuffix =
nodeHierarchicalIDFill[nodeIDValue] === 1
? ""
: `-${nodeHierarchicalIDFill[nodeIDValue]}`
nodeObject.id = `${nodeIDValue}${nodeSuffix}`
}
nodeHash.set(nodeDirection, nodeObject)
projectedNodes.push(nodeObject)
}
})
const edgeWeight = edge.weight || 1
const sourceNode = nodeHash.get(source)
const targetNode = nodeHash.get(target)
targetNode.inDegree += edgeWeight
sourceNode.outDegree += edgeWeight
targetNode.degree += edgeWeight
sourceNode.degree += edgeWeight
const edgeKey = `${nodeIDAccessor(sourceNode) ||
source}|${nodeIDAccessor(targetNode) || target}`
const newEdge = Object.assign({}, edge, {
source: nodeHash.get(source),
target: nodeHash.get(target)
})
edgeHash.set(edgeKey, newEdge)
projectedEdges.push(newEdge)
})
}
} else {
edgeHash = new Map()
networkSettings.graphSettings.edgeHash = edgeHash
projectedEdges.forEach(edge => {
const edgeSource =
typeof edge.source === "string"
? edge.source
: nodeIDAccessor(edge.source)
const edgeTarget =
typeof edge.target === "string"
? edge.target
: nodeIDAccessor(edge.target)
const edgeKey = `${edgeSource}|${edgeTarget}`
edgeHash.set(edgeKey, edge)
})
}
const customNodeIcon = determineNodeIcon(
baseCustomNodeIcon,
networkSettings,
adjustedSize,
projectedNodes
)
const customEdgeIcon = determineEdgeIcon({
baseCustomEdgeIcon,
networkSettings,
size: adjustedSize,
nodes: projectedNodes,
graph
})
if (
(networkSettings.type === "sankey" ||
networkSettings.type === "flowchart") &&
topologicalSort(projectedNodes, projectedEdges) === null
) {
networkSettings.customSankey = sankeyCircular
}
networkSettings.width = size[0]
networkSettings.height = size[1]
let networkSettingsChanged = false
networkSettingsKeys.forEach(key => {
if (
key !== "edgeType" &&
key !== "graphSettings" &&
networkSettings[key] !== this.state.graphSettings[key]
) {
networkSettingsChanged = true
}
})
//Support bubble chart with circle pack and with force
if (networkSettings.type === "sankey") {
edgeType = d =>
d.circular
? circularAreaLink(d)
: edgeType === "angled"
? ribbonLink(d)
: areaLink(d)
} else if (isHierarchical) {
projectedNodes.forEach(node => {
if (createPointLayer) {
node.x = (node.x0 + node.x1) / 2
node.y = (node.y0 + node.y1) / 2
}
if (
typeof networkSettings.type === "string" &&
hierarchicalProjectable[networkSettings.type] &&
networkSettings.projection === "horizontal"
) {
const ox = node.x
node.x = node.y
node.y = ox
if (createPointLayer) {
const ox0 = node.x0
const ox1 = node.x1
node.x0 = node.y0
node.x1 = node.y1
node.y0 = ox0
node.y1 = ox1
}
} else if (
typeof networkSettings.type === "string" &&
radialProjectable[networkSettings.type] &&
networkSettings.projection === "radial"
) {
const radialPoint =
node.depth === 0
? [adjustedSize[0] / 2, adjustedSize[1] / 2]
: pointOnArcAtAngle(
[adjustedSize[0] / 2, adjustedSize[1] / 2],
node.x / adjustedSize[0],
node.y / 2
)
node.x = radialPoint[0]
node.y = radialPoint[1]
} else {
node.x = node.x
node.y = node.y
if (createPointLayer) {
node.x0 = node.x0
node.x1 = node.x1
node.y0 = node.y0
node.y1 = node.y1
}
}
})
}
if (
networkSettings.type !== "static" &&
(changedData || networkSettingsChanged)
) {
let components = [
{
componentNodes: projectedNodes,
componentEdges: projectedEdges
}
]
if (networkSettings.type === "chord") {
const radius = adjustedSize[1] / 2
const { groupWidth = 20, padAngle = 0.01, sortGroups } = networkSettings
const arcGenerator = arc()
.innerRadius(radius - groupWidth)
.outerRadius(radius)
const ribbonGenerator = ribbon().radius(radius - groupWidth)
const matrixifiedNetwork = matrixify({
edgeHash: edgeHash,
nodes: projectedNodes,
edgeWidthAccessor,
nodeIDAccessor
})
const chordLayout = chord().padAngle(padAngle)
if (sortGroups) {
chordLayout.sortGroups(sortGroups)
}
const chords = chordLayout(matrixifiedNetwork)
const groups = chords.groups
groups.forEach(group => {
const groupCentroid = arcGenerator.centroid(group)
const groupD = arcGenerator(group)
const groupNode = projectedNodes[group.index]
groupNode.d = groupD
groupNode.index = group.index
groupNode.x = groupCentroid[0] + adjustedSize[0] / 2
groupNode.y = groupCentroid[1] + adjustedSize[1] / 2
})
chords.forEach(generatedChord => {
const chordD = ribbonGenerator(generatedChord)
//this is incorrect should use edgeHash
const nodeSourceID = nodeIDAccessor(
projectedNodes[generatedChord.source.index]
)
const nodeTargetID = nodeIDAccessor(
projectedNodes[generatedChord.target.index]
)
const chordEdge = edgeHash.get(`${nodeSourceID}|${nodeTargetID}`)
chordEdge.d = chordD
const chordBounds = pathBounds(chordD)
chordEdge.x =
adjustedSize[0] / 2 + (chordBounds.x1 + chordBounds.x2) / 2
chordEdge.y =
adjustedSize[1] / 2 + (chordBounds.y1 + chordBounds.y2) / 2
})
} else if (
networkSettings.type === "sankey" ||
networkSettings.type === "flowchart"
) {
const {
orient = "center",
iterations = 100,
nodePadding,
nodePaddingRatio = nodePadding ? undefined : 0.5,
nodeWidth = networkSettings.type === "flowchart" ? 2 : 24,
customSankey,
direction = "right"
} = networkSettings
const sankeyOrient = sankeyOrientHash[orient]
const actualSankey = customSankey || sankeyCircular
let frameExtent = [[0, 0], adjustedSize]
if (
networkSettings.direction === "up" ||
networkSettings.direction === "down"
) {
frameExtent = [[0, 0], [adjustedSize[1], adjustedSize[0]]]
}
const frameSankey = actualSankey()
.extent(frameExtent)
.links(projectedEdges)
.nodes(projectedNodes)
.nodeAlign(sankeyOrient)
.nodeId(nodeIDAccessor)
.nodeWidth(nodeWidth)
.iterations(iterations)
if (frameSankey.nodePaddingRatio && nodePaddingRatio) {
frameSankey.nodePaddingRatio(nodePaddingRatio)
} else if (nodePadding) {
frameSankey.nodePadding(nodePadding)
}
frameSankey()
projectedNodes.forEach(d => {
d.height = d.y1 - d.y0
d.width = d.x1 - d.x0
d.x = d.x0 + d.width / 2
d.y = d.y0 + d.height / 2
d.radius = d.height / 2
d.direction = direction
})
projectedEdges.forEach(d => {
d.sankeyWidth = d.width
d.direction = direction
d.width = undefined
})
} else if (networkSettings.type === "wordcloud") {
const {
iterations = 500,
fontSize = 18,
rotate,
fontWeight = 300,
textAccessor = d => d.text
} = networkSettings
const fontWeightMod = (fontWeight / 300 - 1) / 5 + 1
const fontWidth = (fontSize / 1.5) * fontWeightMod
projectedNodes.forEach((d, i) => {
const calcualatedNodeSize = nodeSizeAccessor(d)
d._NWFText = textAccessor(d) || ""
const textWidth =
fontWidth * d._NWFText.length * calcualatedNodeSize * 1.4
const textHeight = fontSize * calcualatedNodeSize
d.textHeight = textHeight + 4
d.textWidth = textWidth + 4
d.rotate = rotate ? rotate(d, i) : 0
d.fontSize = fontSize * calcualatedNodeSize
d.fontWeight = fontWeight
d.radius = d.r = textWidth / 2
})
projectedNodes.sort((a, b) => b.textWidth - a.textWidth)
//bubblepack for initial position
packSiblings(projectedNodes)
// if (rotate) {
const collide = bboxCollide(d => {
if (d.rotate) {
return [
[-d.textHeight / 2, -d.textWidth / 2],
[d.textHeight / 2, d.textWidth / 2]
]
}
return [
[-d.textWidth / 2, -d.textHeight / 2],
[d.textWidth / 2, d.textHeight / 2]
]
}).iterations(1)
const xCenter = size[0] / 2
const yCenter = size[1] / 2
const simulation = forceSimulation(projectedNodes)
.velocityDecay(0.6)
.force("x", forceX(xCenter).strength(1.2))
.force("y", forceY(yCenter).strength(1.2))
.force("collide", collide)
simulation.stop()
for (let i = 0; i < iterations; ++i) simulation.tick()
// }
const xMin = min(
projectedNodes.map(
p => p.x - (p.rotate ? p.textHeight / 2 : p.textWidth / 2)
)
)
const xMax = max(
projectedNodes.map(
p => p.x + (p.rotate ? p.textHeight / 2 : p.textWidth / 2)
)
)
const yMin = min(
projectedNodes.map(
p => p.y - (p.rotate ? p.textWidth / 2 : p.textHeight / 2)
)
)
const yMax = max(
projectedNodes.map(
p => p.y + (p.rotate ? p.textWidth / 2 : p.textHeight / 2)
)
)
const projectionScaleX = scaleLinear()
.domain([xMin, xMax])
.range([0, adjustedSize[0]])
const projectionScaleY = scaleLinear()
.domain([yMin, yMax])
.range([0, adjustedSize[1]])
const xMod = adjustedSize[0] / xMax
const yMod = adjustedSize[1] / yMax
const sizeMod = Math.min(xMod, yMod) * 1.2
projectedNodes.forEach(node => {
node.x = projectionScaleX(node.x)
node.y = projectionScaleY(node.y)
node.fontSize = node.fontSize * sizeMod
node.scale = 1
node.radius = node.r = Math.max(
(node.textHeight / 4) * yMod,
(node.textWidth / 4) * xMod
)
// node.textHeight = projectionScaleY(node.textHeight)
// node.textWidth = projectionScaleY(node.textWidth)
})
} else if (networkSettings.type === "force") {
const {
iterations = 500,
edgeStrength = 0.1,
distanceMax = Infinity,
edgeDistance
} = networkSettings
const linkForce = forceLink().strength(d =>
Math.min(2.5, d.weight ? d.weight * edgeStrength : edgeStrength)
)
if (edgeDistance) {
linkForce.distance(edgeDistance)
}
const simulation =
networkSettings.simulation ||
forceSimulation().force(
"charge",
forceManyBody()
.distanceMax(distanceMax)
.strength(
networkSettings.forceManyBody ||
(d => -25 * nodeSizeAccessor(d))
)
)
// simulation.force("link", linkForce).nodes(projectedNodes)
simulation.nodes(projectedNodes)
const forceMod = adjustedSize[1] / adjustedSize[0]
if (!simulation.force("x")) {
simulation.force(
"x",
forceX(adjustedSize[0] / 2).strength(forceMod * 0.1)
)
}
if (!simulation.force("y")) {
simulation.force("y", forceY(adjustedSize[1] / 2).strength(0.1))
}
if (projectedEdges.length !== 0 && !simulation.force("link")) {
simulation.force("link", linkForce)
simulation.force("link").links(projectedEdges)
}
//reset alpha if it's too cold
if (simulation.alpha() < 0.1) {
simulation.alpha(1)
}
simulation.stop()
for (let i = 0; i < iterations; ++i) simulation.tick()
} else if (networkSettings.type === "motifs") {
const componentHash = new Map()
projectedEdges.forEach(edge => {
;[edge.source, edge.target].forEach(node => {
if (!componentHash.get(node)) {
componentHash.set(node, {
node,
component: -99,
connectedNodes: [],
edges: []
})
}
})
componentHash.get(edge.source).connectedNodes.push(edge.target)
componentHash.get(edge.target).connectedNodes.push(edge.source)
componentHash.get(edge.source).edges.push(edge)
})
components = breadthFirstCompontents(projectedNodes, componentHash)
const largestComponent = Math.max(
projectedNodes.length / 3,
components[0].componentNodes.length
)
const layoutSize = size[0] > size[1] ? size[1] : size[0]
const layoutDirection = size[0] > size[1] ? "horizontal" : "vertical"
const {
iterations = 500,
edgeStrength = 0.1,
edgeDistance,
padding = 0
} = networkSettings
let currentX = padding
let currentY = padding
components.forEach(({ componentNodes, componentEdges }) => {
const linkForce = forceLink().strength(d =>
Math.min(2.5, d.weight ? d.weight * edgeStrength : edgeStrength)
)
if (edgeDistance) {
linkForce.distance(edgeDistance)
}
const componentLayoutSize =
Math.max(componentNodes.length / largestComponent, 0.2) * layoutSize
const xBound = componentLayoutSize + currentX
const yBound = componentLayoutSize + currentY
if (layoutDirection === "horizontal") {
if (yBound > size[1]) {
currentX = componentLayoutSize + currentX + padding
currentY = componentLayoutSize + padding
} else {
currentY = componentLayoutSize + currentY + padding
}
} else {
if (xBound > size[0]) {
currentY = componentLayoutSize + currentY + padding
currentX = componentLayoutSize + padding
} else {
currentX = componentLayoutSize + currentX + padding
}
}
const xCenter = currentX - componentLayoutSize / 2
const yCenter = currentY - componentLayoutSize / 2
const simulation = forceSimulation()
.force(
"charge",
forceManyBody().strength(
networkSettings.forceManyBody ||
(d => -25 * nodeSizeAccessor(d))
)
)
.force("link", linkForce)
simulation
.force("x", forceX(xCenter))
.force("y", forceY(yCenter))
.nodes(componentNodes)
simulation.force("link").links(componentEdges)
simulation.stop()
for (let i = 0; i < iterations; ++i) simulation.tick()
const maxX = max(componentNodes.map(d => d.x))
const maxY = max(componentNodes.map(d => d.y))
const minX = min(componentNodes.map(d => d.x))
const minY = min(componentNodes.map(d => d.y))
const resetX = scaleLinear()
.domain([minX, maxX])
.range([currentX - componentLayoutSize, currentX - 20])
const resetY = scaleLinear()
.domain([minY, maxY])
.range([currentY - componentLayoutSize, currentY - 20])
componentNodes.forEach(node => {
node.x = resetX(node.x)
node.y = resetY(node.y)
})
})
} else if (networkSettings.type === "matrix") {
if (networkSettings.sort) {
projectedNodes = projectedNodes.sort(networkSettings.sort)
}
const gridSize = Math.min(...adjustedSize)
const stepSize = gridSize / (projectedNodes.length + 1)
projectedNodes.forEach((node, index) => {
node.x = 0
node.y = (index + 1) * stepSize
})
} else if (networkSettings.type === "arc") {
if (networkSettings.sort) {
projectedNodes = projectedNodes.sort(networkSettings.sort)
}
const stepSize = adjustedSize[0] / (projectedNodes.length + 2)
projectedNodes.forEach((node, index) => {
node.x = (index + 1) * stepSize
node.y = adjustedSize[1] / 2
})
} else if (typeof networkSettings.type === "function") {
networkSettings.type({
nodes: projectedNodes,
edges: projectedEdges
})
} else {
projectedNodes.forEach(node => {
node.x = node.x === undefined ? (node.x0 + node.x1) / 2 : node.x
node.y = node.y === undefined ? node.y0 : node.y
})
}
this.state.graphSettings.nodes = currentProps.nodes
this.state.graphSettings.edges = currentProps.edges
}
//filter out user-defined nodes
projectedNodes = projectedNodes.filter(filterRenderedNodes)
projectedEdges = projectedEdges.filter(
d =>
projectedNodes.indexOf(d.target) !== -1 &&
projectedNodes.indexOf(d.source) !== -1
)
if (networkSettings.direction === "flip") {
projectedNodes.forEach(node => {
// const ox = node.x
// const oy = node.y
node.x = adjustedSize[0] - node.x
node.y = adjustedSize[1] - node.y
})
} else if (
networkSettings.direction === "up" ||
networkSettings.direction === "down"
) {
const mod =
networkSettings.direction === "up"
? value => adjustedSize[1] - value
: value => value
projectedNodes.forEach(node => {
const ox = node.x
const ox0 = node.x0
const ox1 = node.x1
node.x = mod(node.y)
node.x0 = mod(node.y0)
node.x1 = mod(node.y1)
node.y = ox
node.y0 = ox0
node.y1 = ox1
})
} else if (networkSettings.direction === "left") {
projectedNodes.forEach(node => {
node.x = adjustedSize[0] - node.x
node.x0 = adjustedSize[0] - node.x0
node.x1 = adjustedSize[0] - node.x1
})
}
if (typeof networkSettings.zoom === "function") {
networkSettings.zoom(projectedNodes, adjustedSize)
} else if (
networkSettings.zoom !== false &&
networkSettings.type !== "matrix" &&
networkSettings.type !== "wordcloud" &&
networkSettings.type !== "chord" &&
networkSettings.type !== "sankey" &&
networkSettings.type !== "partition" &&
networkSettings.type !== "treemap" &&
networkSettings.type !== "circlepack" &&
networkSettings.type !== "dagre"
) {
// ZOOM SHOULD MAINTAIN ASPECT RATIO, ADD "stretch" to fill whole area
const xMin = min(projectedNodes.map(p => p.x - nodeSizeAccessor(p)))
const xMax = max(projectedNodes.map(p => p.x + nodeSizeAccessor(p)))
const yMin = min(projectedNodes.map(p => p.y - nodeSizeAccessor(p)))
const yMax = max(projectedNodes.map(p => p.y + nodeSizeAccessor(p)))
const xSize = Math.abs(xMax - xMin)
const ySize = Math.abs(yMax - yMin)
const networkAspectRatio = xSize / ySize
const baseAspectRatio = adjustedSize[0] / adjustedSize[1]
let yMod, xMod
if (networkSettings.zoom === "stretch") {
yMod = 0
xMod = 0
} else if (xSize > ySize) {
if (networkAspectRatio > baseAspectRatio) {
xMod = 0
yMod = (adjustedSize[1] - (adjustedSize[0] / xSize) * ySize) / 2
} else {
yMod = 0
xMod = (adjustedSize[0] - (adjustedSize[1] / ySize) * xSize) / 2
}
} else {
if (networkAspectRatio > baseAspectRatio) {
xMod = 0
yMod = (adjustedSize[1] - (adjustedSize[0] / xSize) * ySize) / 2
} else {
yMod = 0
xMod = (adjustedSize[0] - (adjustedSize[1] / ySize) * xSize) / 2
}
}
const projectionScaleX = scaleLinear()
.domain([xMin, xMax])
.range([xMod, adjustedSize[0] - xMod])
const projectionScaleY = scaleLinear()
.domain([yMin, yMax])
.range([yMod, adjustedSize[1] - yMod])
projectedNodes.forEach(node => {
node.x = projectionScaleX(node.x)
node.y = projectionScaleY(node.y)
})
} else if (
networkSettings.zoom !== false &&
networkSettings.projection !== "radial" &&
(networkSettings.type === "partition" ||
networkSettings.type === "treemap" ||
networkSettings.type === "dagre")
) {
const xMin = min(projectedNodes.map(p => p.x0))
const xMax = max(projectedNodes.map(p => p.x1))
const yMin = min(projectedNodes.map(p => p.y0))
const yMax = max(projectedNodes.map(p => p.y1))
const projectionScaleX = scaleLinear()
.domain([xMin, xMax])
.range([margin.left, adjustedSize[0] - margin.right])
const projectionScaleY = scaleLinear()
.domain([yMin, yMax])
.range([margin.top, adjustedSize[1] - margin.bottom])
projectedNodes.forEach(node => {
node.x = projectionScaleX(node.x)
node.y = projectionScaleY(node.y)
node.x0 = projectionScaleX(node.x0)
node.y0 = projectionScaleY(node.y0)
node.x1 = projectionScaleX(node.x1)
node.y1 = projectionScaleY(node.y1)
node.zoomedHeight = node.y1 - node.y0
node.zoomedWidth = node.x1 - node.x0
})
projectedEdges.forEach(edge => {
if (edge.points) {
edge.points.forEach(p => {
p.x = projectionScaleX(p.x)
p.y = projectionScaleY(p.y)
})
}
})
}
projectedNodes.forEach(node => {
node.nodeSize = nodeSizeAccessor(node)
})
projectedEdges.forEach(edge => {
edge.width = edgeWidthAccessor(edge)
})
let legendSettings
if (currentProps.legend) {
legendSettings = currentProps.legend
if (!legendSettings.legendGroups) {
///Something auto for networks
const legendGroups = [
{
styleFn: currentProps.nodeStyle,
type: "fill",
items: []
}
]
legendSettings.legendGroups = legendGroups
}
}
const networkFrameRender = {
edges: {
accessibleTransform: (data, i) => {
const edgeX = (data[i].source.x + data[i].target.x) / 2
const edgeY = (data[i].source.y + data[i].target.y) / 2
return { type: "frame-hover", ...data[i], x: edgeX, y: edgeY }
},
data: projectedEdges,
styleFn: stringToFn<GenericObject>(edgeStyle, () => ({}), true),
classFn: stringToFn<string>(edgeClass, () => "", true),
renderMode: stringToFn<string | GenericObject>(
edgeRenderMode,
undefined,
true
),
canvasRenderFn:
canvasEdges && stringToFn<boolean>(canvasEdges, undefined, true),
renderKeyFn: currentProps.edgeRenderKey
? currentProps.edgeRenderKey
: d => d._NWFEdgeKey || `${d.source.id}-${d.target.id}`,
behavior: drawEdges,
projection: networkSettings.projection,
type: edgeType,
customMark: customEdgeIcon,
networkType: networkSettings.type,
direction: networkSettings.direction
},
nodes: {
accessibleTransform: (data, i) => ({
type: "frame-hover",
...data[i],
...(data[i].data || {})
}),
data: projectedNodes,
styleFn: nodeStyleFn,
classFn: nodeClassFn,
renderMode: nodeRenderModeFn,
canvasRenderFn: nodeCanvasRenderFn,
customMark: customNodeIcon,
behavior: drawNodes,
renderKeyFn: currentProps.nodeRenderKey
}
}
const nodeLabelAnnotations = []
if (currentProps.nodeLabels && projectedNodes) {
projectedNodes.forEach((node, nodei) => {
const feasibleLabel =
nodeLabels && nodeLabels !== true && nodeLabels(node)
if (nodeLabels === true || feasibleLabel) {
const actualLabel =
networkSettings.projection === "radial" && node.depth !== 0
? radialLabelGenerator(
node,
nodei,
nodeLabels === true ? nodeIDAccessor : nodeLabels,
adjustedSize
)
: nodeLabels === true
? nodeIDAccessor(node, nodei)
: feasibleLabel
let nodeLabel
if (React.isValidElement(actualLabel)) {
nodeLabel = {
key: `node-label-${nodei}`,
type: "basic-node-label",
x: node.x,
y: node.y,
element: actualLabel
}
} else {
nodeLabel = {
key: `node-label-${nodei}`,
className: "node-label",
dx: 0,
dy: 0,
x: node.x,
y: node.y,
note: { label: actualLabel },
connector: { end: "none" },
type: AnnotationLabel,
subject: { radius: nodeSizeAccessor(node) + 2 }
}
}
nodeLabelAnnotations.push(nodeLabel)
}
})
}
let projectedXYPoints
const overlay = []
const areaBasedTypes = ["circlepack", "treemap", "partition", "chord"]
if (
(hoverAnnotation &&
areaBasedTypes.find(d => d === networkSettings.type)) ||
hoverAnnotation === "area"
) {
if (hoverAnnotation !== "edge") {
const renderedNodeOverlays = projectedNodes.map((d, i) => ({
overlayData: d,
...customNodeIcon({
d,
i,
transform: `translate(${d.x},${d.y})`,
styleFn: () => ({ fill: "pink", opacity: 0 })
}).props
}))
overlay.push(...renderedNodeOverlays)
}
if (hoverAnnotation !== "node") {
projectedEdges.forEach((d, i) => {
const generatedIcon = customEdgeIcon({
d,
i,
transform: `translate(${d.x},${d.y})`,
styleFn: () => ({ fill: "pink", opacity: 0 })
})
if (generatedIcon) {
overlay.push({
overlayData: {
...d,
x: d.x || (d.source.x + d.target.x) / 2,
y: d.y || (d.source.y + d.target.y) / 2,
edge: true
},
...generatedIcon.props
})
}
})
}
} else if (
hoverAnnotation === "edge" &&
typeof networkSettings.type === "string" &&
edgePointHash[networkSettings.type]
) {
projectedXYPoints = projectedEdges.map(
edgePointHash[networkSettings.type]
)
} else if (
Array.isArray(hoverAnnotation) ||
hoverAnnotation === true ||
hoverAnnotation === "node"
) {
projectedXYPoints = projectedNodes
if (changedData || networkSettingsChanged)
projectedXYPoints = [...projectedNodes]
} else if (
hoverAnnotation === "all" &&
typeof networkSettings.type === "string"
) {
projectedXYPoints = [
...projectedEdges.map(edgePointHash[networkSettings.type]),
...projectedNodes
]
}
this.setState({
adjustedPosition: adjustedPosition,
adjustedSize: adjustedSize,
backgroundGraphics: currentProps.backgroundGraphics,
foregroundGraphics: currentProps.foregroundGraphics,
title,
renderNumber: this.state.renderNumber + 1,
projectedNodes,
projectedEdges,
projectedXYPoints,
overlay,
nodeIDAccessor,
sourceAccessor,
targetAccessor,
nodeSizeAccessor,
edgeWidthAccessor,
margin,
legendSettings,
networkFrameRender,
nodeLabelAnnotations,
graphSettings: {
...networkSettings.graphSettings,
...networkSettings
}
})
}
defaultNetworkSVGRule = ({
d: baseD,
i,
annotationLayer
}: {
d: AnnotationType
i: number
annotationLayer: AnnotationLayerProps
}) => {
const {
projectedNodes,
projectedEdges,
nodeIDAccessor,
nodeSizeAccessor,
networkFrameRender,
adjustedSize,
adjustedPosition
} = this.state
//TODO PASS FRAME STYLE FNs TO HIGHLIGHT
const { svgAnnotationRules } = this.props
const d = baseD.ids
? baseD
: baseD.edge
? {
...(projectedEdges.find(
p =>
nodeIDAccessor(p.source) === nodeIDAccessor(baseD.source) &&
nodeIDAccessor(p.target) === nodeIDAccessor(baseD.target)
) || {}),
...baseD
}
: {
...(projectedNodes.find(p => nodeIDAccessor(p) === baseD.id) || {}),
...baseD
}
const { voronoiHover } = annotationLayer
if (svgAnnotationRules) {
const customAnnotation = svgAnnotationRules({
d,
i,
networkFrameProps: this.props,
networkFrameState: this.state,
nodes: projectedNodes,
edges: projectedEdges,
voronoiHover,
screenCoordinates: [d.x, d.y],
adjustedPosition,
adjustedSize,
annotationLayer
})
if (customAnnotation !== null) {
return customAnnotation
}
}
if (d.type === "node") {
return svgNodeRule({
d,
i,
nodeSizeAccessor
})
} else if (d.type === "desaturation-layer") {
return desaturationLayer({
style: d.style instanceof Function ? d.style(d, i) : d.style,
size: adjustedSize,
i,
key: d.key
})
} else if (d.type === "basic-node-label") {
return (
<g key={d.key || `basic-${i}`} transform={`translate(${d.x},${d.y})`}>
{baseD.element || baseD.label}
</g>
)
} else if (d.type === "react-annotation" || typeof d.type === "function") {
return svgReactAnnotationRule({
d,
i,
projectedNodes,
nodeIDAccessor
})
} else if (d.type === "enclose") {
return svgEncloseRule({
d,
i,
projectedNodes,
nodeIDAccessor,
nodeSizeAccessor
})
} else if (d.type === "enclose-rect") {
return svgRectEncloseRule({
d,
i,
projectedNodes,
nodeIDAccessor,
nodeSizeAccessor
})
} else if (d.type === "enclose-hull") {
return svgHullEncloseRule({
d,
i,
projectedNodes,
nodeIDAccessor,
nodeSizeAccessor
})
} else if (d.type === "highlight") {
return svgHighlightRule({
d,
networkFrameRender
})
}
return null
}
defaultNetworkHTMLRule = ({
d: baseD,
i,
annotationLayer
}: {
d: AnnotationType
i: number
annotationLayer: AnnotationLayerProps
}) => {
const {
tooltipContent,
optimizeCustomTooltipPosition,
size,
useSpans
} = this.props
const {
projectedNodes,
projectedEdges,
nodeIDAccessor,
adjustedSize,
adjustedPosition
} = this.state
const { voronoiHover } = annotationLayer
const d = baseD.ids
? baseD
: baseD.edge
? {
...(projectedEdges.find(
p =>
nodeIDAccessor(p.source) === nodeIDAccessor(baseD.source) &&
nodeIDAccessor(p.target) === nodeIDAccessor(baseD.target)
) || {}),
...baseD
}
: {
...(projectedNodes.find(p => nodeIDAccessor(p) === baseD.id) || {}),
...baseD
}
if (this.props.htmlAnnotationRules) {
const customAnnotation = this.props.htmlAnnotationRules({
d,
i,
networkFrameProps: this.props,
networkFrameState: this.state,
nodes: projectedNodes,
edges: projectedEdges,
voronoiHover,
screenCoordinates: [d.x, d.y],
adjustedPosition,
adjustedSize,
annotationLayer
})
if (customAnnotation !== null) {
return customAnnotation
}
}
if (d.type === "frame-hover") {
return htmlFrameHoverRule({
d,
i,
tooltipContent,
optimizeCustomTooltipPosition,
useSpans,
nodes: projectedNodes,
edges: projectedEdges,
nodeIDAccessor
})
}
return null
}
render() {
const {
annotations,
annotationSettings,
className,
customClickBehavior,
customDoubleClickBehavior,
customHoverBehavior,
size,
matte,
hoverAnnotation,
beforeElements,
afterElements,
interaction,
disableContext,
canvasPostProcess,
baseMarkProps,
useSpans,
canvasNodes,
canvasEdges,
name,
downloadFields,
download,
additionalDefs,
renderOrder = this.state.graphSettings &&
this.state.graphSettings.type === "matrix"
? matrixRenderOrder
: generalRenderOrder
} = this.props
const {
backgroundGraphics,
foregroundGraphics,
projectedXYPoints,
margin,
legendSettings,
adjustedPosition,
adjustedSize,
networkFrameRender,
nodeLabelAnnotations,
overlay,
projectedNodes,
projectedEdges,
title
} = this.state
const downloadButton = []
if (download && projectedNodes.length > 0) {
downloadButton.push(
<DownloadButton
key="network-download-nodes"
csvName={`${name}-${new Date().toJSON()}`}
width={size[0]}
label={"Download Node List"}
data={networkNodeDownloadMapping({
data: projectedNodes,
fields: downloadFields
})}
/>
)
}
if (download && projectedEdges.length > 0) {
downloadButton.push(
<DownloadButton
key="network-download-edges"
csvName={`${name}-${new Date().toJSON()}`}
width={size[0]}
label={"Download Edge List"}
data={networkEdgeDownloadMapping({
data: projectedEdges,
fields: downloadFields
})}
/>
)
}
let formattedOverlay
if (overlay && overlay.length > 0) {
formattedOverlay = overlay
}
return (
<Frame
name="networkframe"
renderPipeline={networkFrameRender}
adjustedPosition={adjustedPosition}
adjustedSize={adjustedSize}
size={size}
xScale={xScale}
yScale={yScale}
title={title}
matte={matte}
className={className}
additionalDefs={additionalDefs}
frameKey={"none"}
projectedCoordinateNames={projectedCoordinateNames}
defaultSVGRule={this.defaultNetworkSVGRule}
defaultHTMLRule={this.defaultNetworkHTMLRule}
hoverAnnotation={
Array.isArray(hoverAnnotation) ? hoverAnnotation : !!hoverAnnotation
}
annotations={[...annotations, ...nodeLabelAnnotations]}
annotationSettings={annotationSettings}
legendSettings={legendSettings}
interaction={interaction}
customClickBehavior={customClickBehavior}
customHoverBehavior={customHoverBehavior}
customDoubleClickBehavior={customDoubleClickBehavior}
points={projectedXYPoints}
margin={margin}
overlay={formattedOverlay}
backgroundGraphics={backgroundGraphics}
foregroundGraphics={foregroundGraphics}
beforeElements={beforeElements}
afterElements={afterElements}
downloadButton={downloadButton}
disableContext={disableContext}
canvasPostProcess={canvasPostProcess}
baseMarkProps={baseMarkProps}
useSpans={!!useSpans}
canvasRendering={!!(canvasNodes || canvasEdges)}
renderOrder={renderOrder}
disableCanvasInteraction={true}
/>
)
}
}
export default NetworkFrame | the_stack |
import { Chart } from 'phaser3-rex-plugins/templates/ui/ui-components.js';
import { GoldGraphDisplayConfig, GraphDisplayConfig } from "~/data/config/overlayConfig";
import GoldEntry from '~/data/goldEntry';
import IngameScene from "~/scenes/IngameScene";
import ColorUtils from '~/util/ColorUtils';
import variables from '~/variables';
import { VisualElement } from "./VisualElement";
export default class GraphVisual extends VisualElement {
BackgroundRect: Phaser.GameObjects.Rectangle | null = null;
BackgroundImage: Phaser.GameObjects.Image | null = null;
BackgroundVideo: Phaser.GameObjects.Video | null = null;
ImgMask!: Phaser.Display.Masks.BitmapMask;
GeoMask!: Phaser.Display.Masks.GeometryMask;
MaskImage!: Phaser.GameObjects.Sprite;
MaskGeo!: Phaser.GameObjects.Graphics;
Title: Phaser.GameObjects.Text | null = null;
Graph: Chart;
CurrentData: GoldEntry[] = [];
constructor(scene: IngameScene, cfg: GoldGraphDisplayConfig, data: GoldEntry[]) {
super(scene, cfg.Position, "scoreboard");
this.CurrentData = data;
this.CreateTextureListeners();
//Mask
if (cfg.Background.UseAlpha) {
this.MaskImage = scene.make.sprite({ x: cfg.Position.X - cfg.Size.X * 1.5, y: cfg.Position.Y, key: 'graphMask', add: true});
this.MaskImage.setDisplaySize(cfg.Size.X, cfg.Size.Y);
this.ImgMask = this.MaskImage.createBitmapMask();
} else {
this.MaskGeo = scene.make.graphics({add: false});
this.MaskGeo.fillStyle(0xffffff);
this.MaskGeo.fillRect(0, 0, cfg.Size.X, cfg.Size.Y);
this.GeoMask = this.MaskGeo.createGeometryMask();
this.MaskGeo.setPosition(cfg.Position.X - cfg.Size.X * 1.5, cfg.Position.Y);
}
//Background
if (cfg.Background.UseVideo) {
this.scene.load.video('graphBgVideo', 'frontend/backgrounds/GoldGraph.mp4');
} else if (cfg.Background.UseImage) {
this.scene.load.image('graphBg', 'frontend/backgrounds/GoldGraph.png');
} else {
this.BackgroundRect = this.scene.add.rectangle(cfg.Position.X, cfg.Position.Y, cfg.Size.X, cfg.Size.Y, Phaser.Display.Color.RGBStringToColor(cfg.Background.FallbackColor).color);
this.BackgroundRect.setOrigin(0.5,0);
this.BackgroundRect.depth = -1;
this.AddVisualComponent(this.BackgroundRect);
}
//Title
if(cfg.Title.Enabled) {
this.Title = scene.add.text(cfg.Position.X + cfg.Title.Position.X, cfg.Position.Y + cfg.Title.Position.Y, cfg.Title.Text, {
fontFamily: cfg.Title.Font.Name,
fontSize: cfg.Title.Font.Size,
color: cfg.Title.Font.Color,
fontStyle: cfg.Title.Font.Style,
align: cfg.Title.Font.Align
});
this.Title.setOrigin(0.5,0);
this.AddVisualComponent(this.Title);
}
//Set Mask
this.GetActiveVisualComponents().forEach(vc => {
vc.setMask(GraphVisual.GetConfig().Background.UseAlpha ? this.ImgMask : this.GeoMask);
});
//Load Resources
if (cfg.Background.UseImage || cfg.Background.UseVideo) {
this.scene.load.start();
}
this.Load();
}
UpdateValues(newValues: GoldEntry[]): void {
if(newValues.length === 0) {
if (this.isActive && !this.isHiding) {
this.Stop();
}
return;
}
if (newValues.length !== 0 && !this.isActive && !this.isShowing) {
//this.Start();
this.scene.displayRegions[10].AddToAnimationQueue(this);
}
this.CurrentData = newValues;
let goldValues = this.CurrentData.map(a => a.y);
this.Graph.chart.config.data.datasets[0].data = this.CurrentData;
let ctx = this;
this.Graph.chart.config.options.scales.y.ticks = {
min: Math.trunc(Math.min(...goldValues)),
max: Math.trunc(Math.max(...goldValues)),
callback: function (value, index, values) {
if (index === values.length - 1) return GraphVisual.FormatGold(Math.trunc(Math.max(...goldValues)));
else if (index === 0) return GraphVisual.FormatGold(Math.trunc(Math.min(...goldValues)));
else if (value == 0) return 0;
else return null;
},
beginAtZero: true,
font: {
family: GraphVisual.GetConfig().Graph.InfoFont.Name,
},
color: GraphVisual.GetConfig().Graph.InfoFont.Color,
drawBorder: true,
maxRotation: 0,
minRotation: 0
}
this.Graph.chart.update();
}
UpdateConfig(cfg: GoldGraphDisplayConfig): void {
//Delete the Graph and make a new one
if(this.Graph !== null && this.Graph !== undefined)
this.Graph.destroy();
this.RemoveVisualComponent(this.Graph);
//Position
this.position = cfg.Position;
//Background
if (!cfg.Background.UseAlpha) {
this.MaskGeo.clear();
this.MaskGeo.fillStyle(0xffffff);
this.MaskGeo.fillRect(0, 0, cfg.Size.X, cfg.Size.Y);
this.MaskGeo.setPosition(cfg.Position.X - cfg.Size.X * 1.5, cfg.Position.Y);
} else {
this.MaskImage.setDisplaySize(cfg.Size.X, cfg.Size.Y);
this.MaskImage.setPosition(cfg.Position.X - cfg.Size.X * 1.5, cfg.Position.Y)
}
//Background Image
if (cfg.Background.UseImage) {
if (this.BackgroundVideo !== undefined && this.BackgroundVideo !== null) {
this.RemoveVisualComponent(this.BackgroundVideo);
this.BackgroundVideo.destroy();
}
if (this.BackgroundRect !== undefined && this.BackgroundRect !== null) {
this.RemoveVisualComponent(this.BackgroundRect);
this.BackgroundRect.destroy();
this.BackgroundRect = null;
}
//Reset old Texture
if (this.scene.textures.exists('graphBg')) {
this.RemoveVisualComponent(this.BackgroundImage);
this.BackgroundImage?.destroy();
this.BackgroundImage = null;
this.scene.textures.remove('graphBg');
}
this.scene.load.image('graphBg', 'frontend/backgrounds/GoldGraph.png');
}
//Background Video
else if (cfg.Background.UseVideo) {
if (this.BackgroundRect !== undefined && this.BackgroundRect !== null) {
this.RemoveVisualComponent(this.BackgroundRect);
this.BackgroundRect.destroy();
this.BackgroundRect = null;
}
if (this.BackgroundImage !== undefined && this.BackgroundImage !== null) {
this.RemoveVisualComponent(this.BackgroundImage);
this.BackgroundImage.destroy();
this.BackgroundImage = null;
}
//Reset old Video
if(!this.scene.cache.video.has('graphBgVideo')) {
this.RemoveVisualComponent(this.BackgroundVideo);
this.BackgroundVideo?.destroy(),
this.BackgroundVideo = null;
this.scene.cache.video.remove('graphBgVideo');
}
this.scene.load.video('graphBgVideo', 'frontend/backgrounds/GoldGraph.mp4');
}
//Background Color
else {
if (this.BackgroundImage !== undefined && this.BackgroundImage !== null) {
this.RemoveVisualComponent(this.BackgroundImage);
this.BackgroundImage.destroy();
}
if (this.BackgroundVideo !== undefined && this.BackgroundVideo !== null) {
this.RemoveVisualComponent(this.BackgroundVideo);
this.BackgroundVideo.destroy();
this.BackgroundVideo = null;
}
if (this.BackgroundRect === null || this.BackgroundRect === undefined) {
this.BackgroundRect = this.scene.add.rectangle(cfg.Position.X, cfg.Position.Y, cfg.Size.X, cfg.Size.Y, Phaser.Display.Color.RGBStringToColor(cfg.Background.FallbackColor).color32);
this.BackgroundRect.setOrigin(0, 0);
this.BackgroundRect.depth = -1;
this.BackgroundRect.setMask(GraphVisual.GetConfig().Background.UseAlpha ? this.ImgMask : this.GeoMask);
this.AddVisualComponent(this.BackgroundRect);
}
this.BackgroundRect.setPosition(cfg.Position.X, cfg.Position.Y);
this.BackgroundRect.setDisplaySize(cfg.Size.X, cfg.Size.Y);
this.BackgroundRect.setFillStyle(Phaser.Display.Color.RGBStringToColor(cfg.Background.FallbackColor).color, 1);
}
//Graph Colors
var BorderBlueColor = Phaser.Display.Color.IntegerToColor(variables.fallbackBlue);
if (cfg.Graph.BorderUseTeamColors && this.scene.state?.blueColor !== undefined && this.scene.state.blueColor !== '') {
BorderBlueColor = Phaser.Display.Color.RGBStringToColor(this.scene.state?.blueColor);
} else {
BorderBlueColor = Phaser.Display.Color.RGBStringToColor(cfg.Graph.BorderOrderColor);
}
var FillBlueColor = Phaser.Display.Color.IntegerToColor(variables.fallbackBlue);
if (cfg.Graph.BorderUseTeamColors && this.scene.state?.blueColor !== undefined && this.scene.state.blueColor !== '') {
FillBlueColor = Phaser.Display.Color.RGBStringToColor(this.scene.state?.blueColor);
} else {
FillBlueColor = Phaser.Display.Color.RGBStringToColor(cfg.Graph.FillOrderColor);
}
var BorderRedColor = Phaser.Display.Color.IntegerToColor(variables.fallbackRed);
if (cfg.Graph.BorderUseTeamColors && this.scene.state?.redColor !== undefined && this.scene.state.redColor !== '') {
BorderRedColor = Phaser.Display.Color.RGBStringToColor(this.scene.state?.redColor);
} else {
BorderRedColor = Phaser.Display.Color.RGBStringToColor(cfg.Graph.BorderChaosColor);
}
var FillRedColor = Phaser.Display.Color.IntegerToColor(variables.fallbackRed);
if (cfg.Graph.BorderUseTeamColors && this.scene.state?.redColor !== undefined && this.scene.state.redColor !== '') {
FillRedColor = Phaser.Display.Color.RGBStringToColor(this.scene.state?.redColor);
} else {
FillRedColor = Phaser.Display.Color.RGBStringToColor(cfg.Graph.BorderChaosColor);
}
let goldValues = this.CurrentData.map(a => a.y);
//Graph
this.Graph = this.scene.rexUI.add.chart(cfg.Position.X + cfg.Graph.Position.X, cfg.Position.Y + cfg.Graph.Position.Y, cfg.Graph.Size.X, cfg.Graph.Size.Y, {
type: 'line',
data: {
labels: this.CurrentData.map(entry => entry.x),
datasets: [
{
tension: cfg.Graph.LineTension,
fill: {
target: 'origin',
above: ColorUtils.GetRGBAString(FillBlueColor, 1), // Area will be red above the origin
below: ColorUtils.GetRGBAString(FillRedColor, 1) // And blue below the origin
},
data: this.CurrentData
},
]
},
options: {
animation: {
duration: 0
},
plugins: {
legend: {
display: false
}
},
cubicInterpolationMode: 'monotone',
elements: {
point: {
radius: 0
}
},
scales: {
x: {
display: cfg.Graph.ShowHorizontalGrid,
alignToPixels: true,
type: 'linear',
position: 'bottom',
grid: {
color: cfg.Graph.GridColor,
lineWidth: 1,
drawTicks: false,
display: cfg.Graph.ShowVerticalGrid
},
ticks: {
callback: function (value, index, values) {
var minute = Math.floor(parseFloat(value) / 60);
var second = parseFloat(value) % 60;
if (minute % 5 == 0 && second === 0 && index !== values.length - 2) return (minute).toFixed(0);
else if(index === values.length - 1) return (Math.round(parseFloat(value) / 60)).toFixed(0);
if(cfg.Graph.ShowTimeStepIndicators && value % cfg.Graph.TimeStepSize < 1) return '';
return null;
},
autoSkip: true,
stepSize: 1,
beginAtZero: false,
font: {
family: cfg.Graph.InfoFont.Name,
},
color: cfg.Graph.InfoFont.Color,
maxRotation: 0,
minRotation: 0
}
},
y: {
display: true,
alignToPixels: true,
grid: {
color: cfg.Graph.GridColor,
lineWidth: 1,
drawTicks: false,
display: cfg.Graph.ShowHorizontalGrid
},
ticks: {
min: Math.trunc(Math.min(...goldValues)),
max: Math.trunc(Math.max(...goldValues)),
callback: function (value, index, values) {
if (index === values.length - 1) return GraphVisual.FormatGold(Math.trunc(Math.max(...goldValues)));
else if (index === 0) return GraphVisual.FormatGold(Math.trunc(Math.min(...goldValues)));
else if (value == 0) return 0;
else return null;
},
beginAtZero: true,
font: {
family: cfg.Graph.InfoFont.Name,
},
color: cfg.Graph.InfoFont.Color,
drawBorder: true,
maxRotation: 0,
minRotation: 0
}
}
}
}
});
this.Graph.setOrigin(0.5,0);
this.Graph.setMask(GraphVisual.GetConfig().Background.UseAlpha ? this.ImgMask : this.GeoMask);
this.AddVisualComponent(this.Graph);
//Title
if (cfg.Title.Enabled) {
if (GraphVisual.GetConfig().Title.Enabled) {
this.Title?.setPosition(cfg.Position.X + cfg.Title.Position.X, cfg.Position.Y + cfg.Title.Position.Y);
this.UpdateTextStyle(this.Title!, cfg.Title.Font);
} else {
this.Title = this.scene.add.text(cfg.Position.X + cfg.Title.Position.X, cfg.Position.Y + cfg.Title.Position.Y, cfg.Title.Text, {
fontFamily: cfg.Title.Font.Name,
fontSize: cfg.Title.Font.Size,
color: cfg.Title.Font.Color,
fontStyle: cfg.Title.Font.Style,
align: 'center'
});
this.Title.setOrigin(0.5, 0);
this.Title.setMask(GraphVisual.GetConfig().Background.UseAlpha ? this.ImgMask : this.GeoMask);
this.AddVisualComponent(this.Title);
}
}
else if (!(this.Title === null || this.Title === undefined)) {
this.RemoveVisualComponent(this.Title);
this.Title.destroy();
this.Title = null;
}
}
Load(): void {
//Fully loaded in start
}
Start(): void {
if (this.isActive || this.isShowing)
return;
this.isShowing = true;
var ctx = this;
this.UpdateConfig(GraphVisual.GetConfig());
this.currentAnimation[0] = this.scene.tweens.add({
targets: [ctx.MaskGeo, ctx.MaskImage],
props: {
x: { from: GraphVisual.GetConfig().Position.X - GraphVisual.GetConfig().Size.X * 1.5, to: GraphVisual.GetConfig().Position.X - GraphVisual.GetConfig().Size.X * 0.5, duration: 1000, ease: 'Cubic.easeInOut' }
},
paused: false,
yoyo: false,
duration: 1000,
onComplete: function() {
ctx.isActive = true;
ctx.isShowing = false;
}
});
}
Stop(): void {
if (!this.isActive || this.isHiding)
return;
this.isActive = false;
this.isHiding = true;
var ctx = this;
this.currentAnimation[0] = this.scene.tweens.add({
targets: [ctx.MaskGeo, ctx.MaskImage],
props: {
x: { from: GraphVisual.GetConfig().Position.X - GraphVisual.GetConfig().Size.X * 0.5, to: GraphVisual.GetConfig().Position.X - GraphVisual.GetConfig().Size.X * 1.5, duration: 1000, ease: 'Cubic.easeInOut' }
},
paused: false,
yoyo: false,
duration: 1000,
onComplete: function() {
ctx.isHiding = false;
ctx.AnimationComplete.dispatch();
}
});
}
static GetConfig(): GoldGraphDisplayConfig {
return IngameScene.Instance.overlayCfg!.GoldGraph;
}
static FormatGold(num: number): string {
let res = Math.abs(num) > 999 ? ((Math.abs(num) / 1000).toFixed(1)) + 'k' : Math.abs(num) + ''
return res;
}
Average(v) {
return v.reduce((a, b) => a + b, 0) / v.length;
}
CreateTextureListeners(): void {
//Background Image support
this.scene.load.on(`filecomplete-image-graphBg`, () => {
this.BackgroundImage = this.scene.make.sprite({ x: GraphVisual.GetConfig().Position.X, y: GraphVisual.GetConfig().Position.Y, key: 'graphBg', add: true });
this.BackgroundImage.setOrigin(0.5,0);
this.BackgroundImage.setDisplaySize(GraphVisual.GetConfig().Size.X, GraphVisual.GetConfig().Size.Y);
this.BackgroundImage.setDepth(-1);
this.BackgroundImage.setMask(GraphVisual.GetConfig()?.Background.UseAlpha ? this.ImgMask : this.GeoMask);
this.AddVisualComponent(this.BackgroundImage);
});
//Background Video support
this.scene.load.on(`filecomplete-video-graphBgVideo`, () => {
if (this.BackgroundVideo !== undefined && this.BackgroundVideo !== null) {
this.RemoveVisualComponent(this.BackgroundVideo);
this.BackgroundVideo.destroy();
}
// @ts-ignore
this.BackgroundVideo = this.scene.add.video(GraphVisual.GetConfig().Position.X, GraphVisual.GetConfig()!.Position.Y, 'graphBgVideo', false, true);
this.BackgroundVideo.setDisplaySize(GraphVisual.GetConfig().Size.X, GraphVisual.GetConfig().Size.Y);
this.BackgroundVideo.setOrigin(0.5,0);
this.BackgroundVideo.setMask(GraphVisual.GetConfig().Background.UseAlpha ? this.ImgMask : this.GeoMask);
this.BackgroundVideo.setLoop(true);
this.BackgroundVideo.setDepth(-1);
this.BackgroundVideo.play();
this.AddVisualComponent(this.BackgroundVideo);
});
}
} | the_stack |
export = WPAPI;
/**
* REST API Client for WordPress
*
* @see http://wp-api.org/node-wpapi/api-reference/wpapi/1.1.2/WPAPI.html
*/
declare class WPAPI {
/**
* Construct a REST API client instance object to create
*
* @param options An options hash to configure the instance
*/
constructor(options?: WPAPI.WPAPIOptions);
/**
* Take an arbitrary WordPress site, deduce the WP REST API root endpoint,
* query that endpoint, and parse the response JSON. Use the returned JSON
* response to instantiate a WPAPI instance bound to the provided site.
*
* @param url A URL within a REST API-enabled WordPress website
*/
static discover(url: string): Promise<WPAPI>;
/** Start a request against /categories endpoint */
categories(): WPAPI.WPRequest;
/** Start a request against /comments endpoints */
comments(): WPAPI.WPRequest;
/** Start a request against /media endpoints */
media(): WPAPI.WPRequest;
/** Start a request against /pages endpoints */
pages(): WPAPI.WPRequest;
/** Start a request against /posts endpoints */
posts(): WPAPI.WPRequest;
/** Start a request against /settings endpoints */
settings(): WPAPI.WPRequest;
/** Start a request against /statuses endpoints */
statuses(): WPAPI.WPRequest;
/** Start a request against /tags endpoints */
tags(): WPAPI.WPRequest;
/** Start a request against /taxonomies endpoints */
taxonomies(): WPAPI.WPRequest;
/** Start a request against /types endpoints */
types(): WPAPI.WPRequest;
/** Start a request against /users endpoints */
users(): WPAPI.WPRequest;
/**
* Set the authentication to use for a WPAPI site handler instance. Accepts
* basic HTTP authentication credentials (string username & password) or a
* Nonce (for cookie authentication) by default; may be overloaded to accept
* OAuth credentials in the future.
*
* @param credentials An authentication credentials object
*/
auth(credentials?: WPAPI.Credentials): WPAPI;
/**
* Deduce request methods from a provided API root JSON response object's
* routes dictionary, and assign those methods to the current instance. If
* no routes dictionary is provided then the instance will be bootstrapped
* with route handlers for the default API endpoints only.
*
* This method is called automatically during WPAPI instance creation.
*
* @param routes The "routes" object from the JSON object returned from the
* root API endpoint of a WP site, which should be a dictionary of route
* definition objects keyed by the route's regex pattern
*/
bootstrap(routes: WPAPI.Routes): WPAPI;
/**
* Access API endpoint handlers from a particular API namespace object
*
* @param namespace A namespace string
*/
namespace(namespace: string): WPAPI;
/**
* Create and return a handler for an arbitrary WP REST API endpoint.
*
* @param namespace A namespace string, e.g. 'myplugin/v1'
* @param restBase A REST route string, e.g. '/author/(?P\d+)'
* @param options An (optional) options object
*/
registerRoute(
namespace: string,
restBase: string,
options?: WPAPI.RegisterRouteOptions
): WPAPI.WPRequestFactory;
/**
* Set the default headers to use for all HTTP requests created from this
* WPAPI site instance. Accepts a header name and its associated value as
* two strings, or multiple headers as an object of name-value pairs.
*
* @param headers
*/
setHeaders(headers: WPAPI.HTTPHeaders): WPAPI;
/**
* Convenience method for making a new WPAPI instance
*
* @param endpoint The URI for a WP-API endpoint
* @param routes The "routes" object from the JSON object returned from the
* root API endpoint of a WP site, which should be a dictionary of route
* definition objects keyed by the route's regex pattern
*/
site(endpoint: string, routes: WPAPI.Routes): WPAPI;
/**
* Set custom transport methods to use when making HTTP requests against the
* API.
*
* Pass an object with a function for one or many of "get", "post", "put",
* "delete" and "head" and that function will be called when making that
* type of request. The provided transport functions should take a WPRequest
* handler instance (e.g. the result of a wp.posts()... chain or any other
* chaining request handler) as their first argument; a data object as their
* second argument (for POST, PUT and DELETE requests); and an optional
* callback as their final argument. Transport methods should invoke the
* callback with the response data (or error, as appropriate), and should
* also return a Promise.
*
* @param transport A dictionary of HTTP transport methods
*/
transport(transport: WPAPI.Transport): WPAPI;
/**
* Generate a query against an arbitrary path on the current endpoint. This
* is useful for requesting resources at custom WP-API endpoints, such as
* WooCommerce's /products.
*
* @param relativePath An endpoint-relative path to which to bind the request
*/
root(relativePath?: string): WPAPI.WPRequest;
/**
* Generate a request against a completely arbitrary endpoint, with no
* assumptions about or mutation of path, filtering, or query parameters.
* This request is not restricted to the endpoint specified during WPAPI
* object instantiation.
*
* @param url The URL to request
*/
url(url: string): WPAPI.WPRequest;
/**
* An API client can define its parameter methods, like .authors(), .cart(),
* .products(). They are usually decided by WPAPI namespaces configuration
* object. They have WPRequest return type.
*/
[customRoutesMethod: string]: any;
}
/*~ If you want to expose types from your module as well, you can
*~ place them in this block.
*/
declare namespace WPAPI {
/**
* The base WordPress API request
*
* @see http://wp-api.org/node-wpapi/api-reference/wpapi/1.1.2/WPRequest.html
*/
class WPRequest {
/**
* WPRequest is the base API request object constructor
*
* @param options A hash of options for the WPRequest instance
*/
constructor(options: WPAPIOptions);
/**
* Set a request to use authentication, and optionally provide auth
* credentials. If auth credentials were already specified when the WPAPI
* instance was created, calling .auth on the request chain will set
* that request to use the existing credentials.
*
* @param credentials An authentication credentials object
*/
auth(credentials?: Credentials): WPRequest;
/**
* Set the context of the request. Used primarily to expose private
* values on a request object by setting the context to "edit".
*
* @param context The context to set on the request
*/
context(context: string): WPRequest;
/**
* Create the specified resource with the provided data
*
* This is the public interface for creating POST requests
*
* @param data The data for the POST request
* @param callback A callback to invoke with the results of the POST
* request
*/
create(data: any, callback?: WPRequestCallback): Promise<any>;
/**
* Delete the specified resource
*
* @param data Data to send along with the DELETE request
* @param callback A callback to invoke with the results of the DELETE
* request
*/
delete(data?: any, callback?: WPRequestCallback): Promise<any>;
/**
* Convenience wrapper for .context( 'edit' )
*/
edit(): WPRequest;
/**
* Return embedded resources as part of the response payload.
*/
embed(): WPRequest;
/**
* Exclude specific resource IDs in the response collection.
*
* @param ids An ID or array of IDs to exclude
*/
exclude(ids: number | number[]): WPRequest;
/**
* Specify a file or a file buffer to attach to the request, for use
* when creating a new Media item
*
* @param file A path to a file (in Node) or an file object (Node or
* Browser) to attach to the request
* @param name An (optional) filename to use for the file
*/
file(file: string | File, name?: string): WPRequest;
/**
* Get the headers for the specified resource
*
* @param callback A callback to invoke with the results of the HEAD
* request
*/
get(callback?: WPRequestCallback): Promise<any>;
/**
* Set the id of resource.
*
* @param id An ID of item
*/
id(id: number): WPRequest;
/**
* Include specific resource IDs in the response collection.
*
* @param ids An ID or array of IDs to include
*/
include(ids: number | number[]): WPRequest;
/**
* Set the namespace of the request, e.g. to specify the API root for
* routes registered by wp core v2 ("wp/v2") or by any given plugin. Any
* previously- set namespace will be overwritten by subsequent calls to
* the method.
*
* @param namespace A namespace string, e.g. "wp/v2"
*/
namespace(namespace: string): WPRequest;
/**
* Set an arbitrary offset to retrieve items from a specific point in a
* collection.
*
* @param offsetNumber The number of items by which to offset the response
*/
offset(offsetNumber: number): WPRequest;
/**
* Change the sort direction of a returned collection
*
* @param direction The order to use when sorting the response
*/
order(direction: "asc" | "desc"): WPRequest;
/**
* Order a collection by a specific field
*
* @param field The field by which to order the response
*/
orderby(field: string): WPRequest;
/**
* Set the pagination of a request. Use in conjunction with .perPage()
* for explicit pagination handling. (The number of pages in a response
* can be retrieved from the response's _paging.totalPages property.)
*
* @param pageNumber The page number of results to retrieve
*/
page(pageNumber: number): WPRequest;
/**
* Set a parameter to render into the final query URI.
*
* @param props The name of the parameter to set, or an object containing
* parameter keys and their corresponding values
* @param value The value of the parameter being set
*/
param(
props: string | { [name: string]: string | number | any[] },
value?: string | number | any[]
): WPRequest;
/**
* Set the number of items to be returned in a page of responses.
*
* @param itemsPerPage The number of items to return in one page of
* results
*/
perPage(itemsPerPage: number): WPRequest;
/**
* Filter results to those matching the specified search terms.
*
* @param searchString A string to search for within post content
*/
search(searchString: string): WPRequest;
/**
* Specify one or more headers to send with the dispatched HTTP request.
*
* @param headers The name of the header to set, or an object of header
* names and their associated string values
* @param value The value of the header being set
*/
setHeaders(
headers: string | { [name: string]: string },
value?: string
): WPRequest;
/**
* Set a component of the resource URL itself (as opposed to a query
* parameter)
*
* If a path component has already been set at this level, throw an
* error: requests are meant to be transient, so any re-writing of a
* previously-set path part value is likely to be a mistake.
*
* @param level A "level" of the path to set, e.g. "1" or "2"
* @param value The value to set at that path part level
*/
setPathPart(level: number | string, value: number | string): WPRequest;
/**
* Query a collection for members with a specific slug.
*
* @param slug A post slug (slug), e.g. "hello-world"
*/
slug(slug: string): WPRequest;
/**
* Calling .then on a query chain will invoke the query as a GET and
* return a promise
*
* @param successCallback A callback to handle the data returned from
* the GET request
* @param failureCallback A callback to handle any errors encountered
* by the request
*/
then(
successCallback?: (data: any) => void,
failureCallback?: (error: Error) => void
): Promise<any>;
/**
* Parse the request into a WordPress API request URI string
*/
toString(): string;
/**
* Update the specified resource with the provided data
*
* This is the public interface for creating PATCH requests
*
* @param data The data for the PATCH request
* @param callback A callback to invoke with the results of the PATCH
* request
*/
update(data: any, callback?: WPRequestCallback): Promise<any>;
/**
* Validate whether the specified path parts are valid for this endpoint
*
* "Path parts" are non-query-string URL segments, like "some" "path" in
* the URL mydomain.com/some/path?and=a&query=string&too. Because a well
* -formed path is necessary to execute a successful API request, we
* throw an error if the user has omitted a value (such as /some/[missing
* component]/url) or has provided a path part value that does not match
* the regular expression the API uses to goven that segment.
*/
validatePath(): WPRequest;
/**
* A request can define its parameter methods, like .id(), .date(),
* .author(). They are usually decided by WPAPI routes configuration
* object.
*/
[customParamsMethod: string]: any;
}
interface WPAPIOptions extends Credentials {
/** The URI for a WP-API endpoint */
endpoint: string;
/**
* A dictionary of API routes with which to bootstrap the WPAPI instance:
* the instance will be initialized with default routes only if this
* property is omitted
*/
routes?: Routes | undefined;
/**
* An optional dictionary of HTTP transport methods (.get, .post, .put,
* .delete, .head) to use instead of the defaults, e.g. to use a
* different HTTP library than superagent
*/
transport?: Transport | undefined;
}
interface WPRequestOptions extends Credentials {
/** The URI for a WP-API endpoint */
endpoint: string;
/**
* An dictionary of HTTP transport methods (.get, .post, .put,
* .delete, .head) to use instead of the defaults, e.g. to use a
* different HTTP library than superagent
*/
transport?: Transport | undefined;
}
type WPRequestFactory = () => WPRequest;
type WPRequestCallback = (error: Error, data: any) => void;
/** Authentication credentials */
interface Credentials {
/** A WP-API Basic HTTP Authentication username */
username?: string | undefined;
/** A WP-API Basic HTTP Authentication password */
password?: string | undefined;
/** A WP nonce for use with cookie authentication */
nonce?: string | undefined;
}
interface Transport {
get?: TransportFunction | undefined;
post?: TransportFunction | undefined;
put?: TransportFunction | undefined;
delete?: TransportFunction | undefined;
head?: TransportFunction | undefined;
}
type TransportFunction = (
wpreq: WPRequest,
cb?: WPRequestCallback
) => Promise<any>;
interface Routes {
[path: string]: Route;
}
interface Route {
namespace: string;
methods: HTTPMethod[];
endpoints: HTTPEndpoint[];
_links?: {
self: string;
} | undefined;
}
type HTTPMethod = "GET" | "POST" | "PUT" | "PATCH" | "DELETE";
interface HTTPEndpoint {
methods: HTTPMethod[];
args: {
[arg: string]: HTTPArgument;
};
}
interface HTTPArgument {
required: boolean;
default?: string | number | undefined;
enum?: string[] | undefined;
description?: string | undefined;
type?: HTTPArgumentType | undefined;
items?: {
type: HTTPArgumentType;
} | undefined;
}
type HTTPArgumentType =
| "string"
| "integer"
| "number"
| "boolean"
| "object"
| "array";
interface HTTPHeaders {
[key: string]: string;
}
interface RegisterRouteOptions {
params?: string[] | undefined;
methods?: HTTPMethod[] | undefined;
mixins?: {
[key: string]: (val: any) => any;
} | undefined;
}
} | the_stack |
import doctrine from 'doctrine'
import fs from 'fs-extra'
import { version } from 'fts-core'
import path from 'path'
import tempy from 'tempy'
import * as TS from 'ts-morph'
import * as TJS from 'typescript-json-schema'
import * as FTS from './types'
const FTSReturns = 'FTSReturns'
const FTSParams = 'FTSParams'
const promiseTypeRe = /^Promise<(.*)>$/
const supportedExtensions = {
js: 'javascript',
jsx: 'javascript',
ts: 'typescript',
tsx: 'typescript'
}
export async function generateDefinition(
file: string,
options: FTS.PartialDefinitionOptions = {}
): Promise<FTS.Definition> {
file = path.resolve(file)
const fileInfo = path.parse(file)
const language = supportedExtensions[fileInfo.ext.substr(1)]
if (!language) {
throw new Error(`File type "${fileInfo.ext}" not supported. "${file}"`)
}
const outDir = tempy.directory()
// initialize and compile TS program
const compilerOptions = {
allowJs: true,
ignoreCompilerErrors: true,
esModuleInterop: true,
// TODO: why do we need to specify the full filename for these lib definition files?
lib: ['lib.es2018.d.ts', 'lib.dom.d.ts'],
target: TS.ScriptTarget.ES5,
outDir,
...(options.compilerOptions || {})
}
const jsonSchemaOptions = {
noExtraProps: true,
required: true,
validationKeywords: ['coerceTo', 'coerceFrom'],
...(options.jsonSchemaOptions || {})
}
const definition: Partial<FTS.Definition> = {
config: {
defaultExport: true,
language
},
params: {
http: false,
context: false,
order: [],
schema: null
},
returns: {
async: false,
http: false,
schema: null
},
version
}
const project = new TS.Project({ compilerOptions })
project.addExistingSourceFile(file)
project.resolveSourceFileDependencies()
const diagnostics = project.getPreEmitDiagnostics()
if (diagnostics.length > 0) {
console.log(project.formatDiagnosticsWithColorAndContext(diagnostics))
// TODO: throw error?
}
const sourceFile = project.getSourceFileOrThrow(file)
const main = extractMainFunction(sourceFile, definition)
if (!main) {
throw new Error(`Unable to infer a main function export "${file}"`)
}
// extract main function type and documentation info
const title = main.getName ? main.getName() : path.parse(file).name
const mainTypeParams = main.getTypeParameters()
definition.title = title
if (mainTypeParams.length > 0) {
throw new Error(
`Generic Type Parameters are not supported for function "${title}"`
)
}
const doc = main.getJsDocs()[0]
let docs: doctrine.Annotation
if (doc) {
const { description } = doc.getStructure()
docs = doctrine.parse(description as string)
if (docs.description) {
definition.description = docs.description
}
}
const builder: FTS.DefinitionBuilder = {
definition,
docs,
main,
sourceFile,
title
}
if (options.emit) {
const result = project.emit(options.emitOptions)
if (result.getEmitSkipped()) {
throw new Error('emit skipped')
}
}
addParamsDeclaration(builder)
addReturnTypeDeclaration(builder)
// TODO: figure out a better workaround than mutating the source file directly
// TODO: fix support for JS files since you can't save TS in JS
const tempSourceFilePath = path.format({
dir: fileInfo.dir,
ext: '.ts',
name: `.${fileInfo.name}-fts`
})
const tempSourceFile = sourceFile.copy(tempSourceFilePath, {
overwrite: true
})
await tempSourceFile.save()
try {
extractJSONSchemas(builder, tempSourceFilePath, jsonSchemaOptions)
} finally {
await fs.remove(tempSourceFilePath)
}
return builder.definition as FTS.Definition
}
/** Find main exported function declaration */
function extractMainFunction(
sourceFile: TS.SourceFile,
definition: Partial<FTS.Definition>
): TS.FunctionDeclaration | undefined {
const functionDefaultExports = sourceFile
.getFunctions()
.filter((f) => f.isDefaultExport())
if (functionDefaultExports.length === 1) {
definition.config.defaultExport = true
return functionDefaultExports[0]
} else {
definition.config.defaultExport = false
}
const functionExports = sourceFile
.getFunctions()
.filter((f) => f.isExported())
if (functionExports.length === 1) {
const func = functionExports[0]
definition.config.namedExport = func.getName()
return func
}
if (functionExports.length > 1) {
const externalFunctions = functionExports.filter((f) => {
const docs = f.getJsDocs()[0]
return (
docs &&
docs.getTags().find((tag) => {
const tagName = tag.getTagName()
return tagName === 'external' || tagName === 'public'
})
)
})
if (externalFunctions.length === 1) {
const func = externalFunctions[0]
definition.config.namedExport = func.getName()
return func
}
}
// TODO: arrow function exports are a lil hacky
const arrowFunctionExports = sourceFile
.getDescendantsOfKind(TS.SyntaxKind.ArrowFunction)
.filter((f) => TS.TypeGuards.isExportAssignment(f.getParent()))
if (arrowFunctionExports.length === 1) {
const func = arrowFunctionExports[0]
const exportAssignment = func.getParent() as TS.ExportAssignment
const exportSymbol = sourceFile.getDefaultExportSymbol()
// TODO: handle named exports `export const foo = () => 'bar'`
if (exportSymbol) {
const defaultExportPos = exportSymbol
.getValueDeclarationOrThrow()
.getPos()
const exportAssignmentPos = exportAssignment.getPos()
// TODO: better way of comparing nodes
const isDefaultExport = defaultExportPos === exportAssignmentPos
if (isDefaultExport) {
definition.config.defaultExport = true
return (func as unknown) as TS.FunctionDeclaration
}
}
}
return undefined
}
function addParamsDeclaration(
builder: FTS.DefinitionBuilder
): TS.ClassDeclaration {
const mainParams = builder.main.getParameters()
const paramsDeclaration = builder.sourceFile.addClass({
name: FTSParams
})
const paramComments = {}
if (builder.docs) {
const paramTags = builder.docs.tags.filter((tag) => tag.title === 'param')
for (const tag of paramTags) {
paramComments[tag.name] = tag.description
}
}
let httpParameterName: string
for (let i = 0; i < mainParams.length; ++i) {
const param = mainParams[i]
const name = param.getName()
const structure = param.getStructure()
if (structure.isRestParameter) {
builder.definition.params.schema = { additionalProperties: true }
continue
}
// TODO: this handles alias type resolution i think...
// need to test multiple levels of aliasing
structure.type = param.getType().getText()
if (name === 'context') {
if (i !== mainParams.length - 1) {
throw new Error(
`Function parameter "context" must be last parameter to main function "${
builder.title
}"`
)
}
// TODO: ensure context has valid type `FTS.Context`
builder.definition.params.context = true
if (mainParams.length === 1) {
builder.definition.params.http = true
httpParameterName = name
}
// ignore context in parameter aggregation
continue
} else if (structure.type === 'Buffer') {
if (mainParams.length > 2 || i > 0) {
throw new Error(
`Function parameter "${name}" of type "Buffer" must not include additional parameters to main function "${
builder.title
}"`
)
}
builder.definition.params.http = true
httpParameterName = name
builder.definition.params.order.push(name)
// ignore Buffer in parameter aggregation
continue
} else {
// TODO: ensure that type is valid:
// not `FTS.Context`
// not Promise<T>
// not Function or ArrowFunction
// not RegExp
}
const promiseReMatch = structure.type.match(promiseTypeRe)
if (promiseReMatch) {
throw new Error(
`Parameter "${name}" has unsupported type "${structure.type}"`
)
}
addPropertyToDeclaration(
paramsDeclaration,
structure as TS.PropertyDeclarationStructure,
paramComments[name]
)
builder.definition.params.order.push(name)
}
if (
builder.definition.params.http &&
builder.definition.params.order.length > 1
) {
throw new Error(
`Function parameter "${httpParameterName}" of type "Buffer" must not include additional parameters to main function "${
builder.title
}"`
)
}
return paramsDeclaration
}
function addReturnTypeDeclaration(builder: FTS.DefinitionBuilder) {
const mainReturnType = builder.main.getReturnType()
let type = mainReturnType.getText()
const promiseReMatch = type.match(promiseTypeRe)
const isAsync = !!promiseReMatch
builder.definition.returns.async = builder.main.isAsync()
if (isAsync) {
type = promiseReMatch[1]
builder.definition.returns.async = true
}
if (type === 'void') {
type = 'any'
}
if (
type.endsWith('HttpResponse') &&
(isAsync || mainReturnType.isInterface())
) {
builder.definition.returns.http = true
}
const declaration = builder.sourceFile.addInterface({
name: FTSReturns
})
const jsdoc =
builder.docs &&
builder.docs.tags.find(
(tag) => tag.title === 'returns' || tag.title === 'return'
)
addPropertyToDeclaration(
declaration,
{ name: 'result', type },
jsdoc && jsdoc.description
)
}
function addPropertyToDeclaration(
declaration: TS.ClassDeclaration | TS.InterfaceDeclaration,
structure: TS.PropertyDeclarationStructure,
jsdoc?: string
): TS.PropertyDeclaration | TS.PropertySignature {
const isDate = structure.type === 'Date'
const isBuffer = structure.type === 'Buffer'
// Type coercion for non-JSON primitives like Date and Buffer
if (isDate || isBuffer) {
const coercionType = structure.type
if (isDate) {
structure.type = 'Date'
} else {
structure.type = 'string'
}
jsdoc = `${jsdoc ? jsdoc + '\n' : ''}@coerceTo ${coercionType}`
}
const property = declaration.addProperty(structure)
if (jsdoc) {
property.addJsDoc(jsdoc)
}
return property
}
function extractJSONSchemas(
builder: FTS.DefinitionBuilder,
file: string,
jsonSchemaOptions: TJS.PartialArgs = {},
jsonCompilerOptions: any = {}
) {
const compilerOptions = {
allowJs: true,
lib: ['es2018', 'dom'],
target: 'es5',
esModuleInterop: true,
...jsonCompilerOptions
}
const program = TJS.getProgramFromFiles(
[file],
compilerOptions,
process.cwd()
)
builder.definition.params.schema = {
...TJS.generateSchema(program, FTSParams, jsonSchemaOptions),
...(builder.definition.params.schema || {}) // Spread any existing schema params
}
if (!builder.definition.params.schema) {
throw new Error(`Error generating params JSON schema for TS file "${file}"`)
}
// fix required parameters to only be those which do not have default values
const { schema } = builder.definition.params
schema.required = (schema.required || []).filter(
(k) => schema.properties[k].default === undefined
)
if (!schema.required.length) {
delete schema.required
}
builder.definition.returns.schema = TJS.generateSchema(program, FTSReturns, {
...jsonSchemaOptions,
required: false
})
if (!builder.definition.returns.schema) {
throw new Error(
`Error generating returns JSON schema for TS file "${file}"`
)
}
}
/*
// useful for quick testing purposes
if (!module.parent) {
generateDefinition('./fixtures/http-request.ts')
.then((definition) => {
console.log(JSON.stringify(definition, null, 2))
})
.catch((err) => {
console.error(err)
process.exit(1)
})
}
*/ | the_stack |
import createValidator = require('../')
/** Static assertion that `value` has type `T` */
// Disable tslint here b/c the generic is used to let us do a type coercion and
// validate that coercion works for the type value "passed into" the function.
// tslint:disable-next-line:no-unnecessary-generics
function assertType<T>(value: T): void {}
const input = null as unknown
const nullValidator = createValidator({ type: 'null' })
assertType<{ type: 'null' }>(nullValidator.toJSON())
if (nullValidator(input)) {
assertType<null>(input)
}
assertType<createValidator.ValidationError[]>(nullValidator.errors)
assertType<createValidator.ValidationError>(nullValidator.errors[0])
assertType<string>(nullValidator.errors[0].field)
assertType<string>(nullValidator.errors[0].message)
assertType<string>(nullValidator.errors[0].type)
assertType<unknown>(nullValidator.errors[0].value)
const numberValidator = createValidator({ type: 'number' })
assertType<{ type: 'number' }>(numberValidator.toJSON())
if (numberValidator(input)) {
assertType<number>(input)
}
const stringValidator = createValidator({ type: 'string' })
assertType<{ type: 'string' }>(stringValidator.toJSON())
if (stringValidator(input)) {
assertType<string>(input)
}
const personValidator = createValidator({
type: 'object',
properties: {
name: { type: 'string' },
age: { type: 'number' },
},
required: [
'name'
]
})
if (personValidator(input)) {
assertType<string>(input.name)
if (typeof input.age !== 'undefined') assertType<number>(input.age)
if (typeof input.age !== 'number') assertType<undefined>(input.age)
}
const namesValidator = createValidator({
type: 'array',
items: { type: 'string' }
})
if (namesValidator(input)) {
assertType<number>(input.length)
assertType<string>(input[0])
}
const boxValidator = createValidator({
type: 'object',
properties: {
name: { type: 'string' },
items: { type: 'array', items: { type: 'boolean' } },
},
required: [
'name',
'items',
]
})
if (boxValidator(input)) {
assertType<string>(input.name)
assertType<number>(input.items.length)
assertType<boolean>(input.items[0])
}
const matrixValidator = createValidator({
type: 'array',
items: {
type: 'array',
items: {
type: 'number'
}
}
})
if (matrixValidator(input)) {
assertType<number>(input[0][0])
}
const userValidator = createValidator({
type: 'object',
properties: {
name: { type: 'string' },
items: { type: 'array', items: { type: 'string' } },
},
required: [
'name',
'items',
]
})
if (userValidator(input)) {
assertType<string>(input.name)
assertType<number>(input.items.length)
assertType<string>(input.items[0])
}
const user2Validator = createValidator({
type: 'object',
properties: {
name: {
type: 'object',
properties: {
first: { type: 'string' },
last: { type: 'string' },
},
required: [
'last' as 'last'
]
},
items: {
type: 'array',
items: { type: 'string' },
}
},
required: [
'name'
]
})
if (user2Validator(input)) {
assertType<{ first: string | undefined, last: string }>(input.name)
if (typeof input.name.first !== 'undefined') assertType<string>(input.name.first)
if (typeof input.name.first !== 'string') assertType<undefined>(input.name.first)
assertType<string>(input.name.last)
if (input.items !== undefined) {
assertType<number>(input.items.length)
assertType<string>(input.items[0])
}
}
const booleanValidator = createValidator({
enum: [true, false]
})
if (booleanValidator(input)) {
assertType<boolean>(input)
}
const specificValuesValidator = createValidator({
enum: [
true as true,
1000 as 1000,
'XX' as 'XX'
]
})
if (specificValuesValidator(input)) {
if (input !== true && input !== 1000) assertType<'XX'>(input)
if (input !== 1000 && input !== 'XX') assertType<true>(input)
if (input !== 'XX' && input !== true) assertType<1000>(input)
}
const metricValidator = createValidator({
type: 'object',
properties: {
name: { type: 'string', enum: ['page-view' as 'page-view'] },
page: { type: 'string', minLength: 0 }
},
required: [
'name',
'page'
]
})
if (metricValidator(input)) {
assertType<'page-view'>(input.name)
assertType<string>(input.page)
}
const noRequiredFieldsValidator = createValidator({
type: 'object',
properties: {
a: { type: 'string' },
b: { type: 'string' },
c: { type: 'string' }
}
})
if (noRequiredFieldsValidator(input)) {
if (typeof input.a !== 'string') assertType<undefined>(input.a)
if (typeof input.b !== 'string') assertType<undefined>(input.b)
if (typeof input.c !== 'string') assertType<undefined>(input.c)
if (typeof input.a !== 'undefined') assertType<string>(input.a)
if (typeof input.b !== 'undefined') assertType<string>(input.b)
if (typeof input.c !== 'undefined') assertType<string>(input.c)
}
const signupValidator = createValidator({
type: 'object',
properties: {
email: {
type: 'string'
},
paymentInformation: {
type: 'object',
properties: {
plan: { type: 'string' },
token: { type: 'string' }
},
required: [
'plan' as 'plan',
'token' as 'token'
]
}
},
required: [
'paymentInformation'
]
})
if (signupValidator(input)) {
if (typeof input.email !== 'string') assertType<undefined>(input.email)
if (typeof input.email !== 'undefined') assertType<string>(input.email)
assertType<string>(input.paymentInformation.plan)
assertType<string>(input.paymentInformation.token)
}
const animalValidator = createValidator({
oneOf: [
{
type: 'object',
properties: {
type: { enum: ['cat' as 'cat'] },
name: { type: 'string' }
},
required: [
'type',
'name'
]
},
{
type: 'object',
properties: {
type: { enum: ['dog' as 'dog'] },
name: { type: 'string' }
},
required: [
'type',
'name'
]
}
]
})
if (animalValidator(input)) {
if (input.type !== 'cat') assertType<'dog'>(input.type)
if (input.type !== 'dog') assertType<'cat'>(input.type)
assertType<string>(input.name)
}
const shapeValidator = createValidator({
oneOf: [
{ type: 'object', properties: { kind: { enum: ['triangle' as 'triangle'] } }, required: ['kind'] },
{ type: 'object', properties: { kind: { enum: ['rectangle' as 'rectangle'] } }, required: ['kind'] },
{ type: 'object', properties: { kind: { enum: ['circle' as 'circle'] } }, required: ['kind'] },
]
})
if (shapeValidator(input)) {
if (input.kind !== 'triangle' && input.kind !== 'rectangle') assertType<'circle'>(input.kind)
if (input.kind !== 'rectangle' && input.kind !== 'circle') assertType<'triangle'>(input.kind)
if (input.kind !== 'circle' && input.kind !== 'triangle') assertType<'rectangle'>(input.kind)
}
const foobar = createValidator({
oneOf: [
{ type: 'object', properties: { a: { type: 'string' } }, required: ['a'] },
{ type: 'object', properties: { b: { type: 'number' } }, required: ['b'] },
{ type: 'object', properties: { c: { type: 'boolean' } }, required: ['c'] },
{ type: 'object', properties: { d: { type: 'null' } }, required: ['d'] },
]
})
if (foobar(input)) {
if ('a' in input) assertType<string>(input.a)
if ('b' in input) assertType<number>(input.b)
if ('c' in input) assertType<boolean>(input.c)
if ('d' in input) assertType<null>(input.d)
}
const stringOrNullValidator = createValidator({
oneOf: [
{ type: 'string' },
{ type: 'null' }
]
})
if (stringOrNullValidator(input)) {
if (typeof input !== 'object') assertType<string>(input)
if (typeof input !== 'string') assertType<null>(input)
}
const primitiveValidator = createValidator({
oneOf: [
{ type: 'string' },
{ type: 'number' },
{ type: 'boolean' }
]
})
if (primitiveValidator(input)) {
if (typeof input !== 'string' && typeof input !== 'number') assertType<boolean>(input)
if (typeof input !== 'number' && typeof input !== 'boolean') assertType<string>(input)
if (typeof input !== 'boolean' && typeof input !== 'string') assertType<number>(input)
}
const overengineeredColorValidator = createValidator({
oneOf: [
{ enum: ['red' as 'red', 'pink' as 'pink'] },
{ enum: ['green' as 'green', 'olive' as 'olive'] },
{ enum: ['blue' as 'blue', 'teal' as 'teal'] },
{ enum: ['yellow' as 'yellow', 'cream' as 'cream'] }
]
})
if (overengineeredColorValidator(input)) {
if (input !== 'red' && input !== 'pink' && input !== 'green' && input !== 'olive' && input !== 'blue' && input !== 'teal' && input !== 'yellow') assertType<'cream'>(input)
if (input !== 'pink' && input !== 'green' && input !== 'olive' && input !== 'blue' && input !== 'teal' && input !== 'yellow' && input !== 'cream') assertType<'red'>(input)
if (input !== 'green' && input !== 'olive' && input !== 'blue' && input !== 'teal' && input !== 'yellow' && input !== 'cream' && input !== 'red') assertType<'pink'>(input)
if (input !== 'olive' && input !== 'blue' && input !== 'teal' && input !== 'yellow' && input !== 'cream' && input !== 'red' && input !== 'pink') assertType<'green'>(input)
if (input !== 'blue' && input !== 'teal' && input !== 'yellow' && input !== 'cream' && input !== 'red' && input !== 'pink' && input !== 'green') assertType<'olive'>(input)
if (input !== 'teal' && input !== 'yellow' && input !== 'cream' && input !== 'red' && input !== 'pink' && input !== 'green' && input !== 'olive') assertType<'blue'>(input)
if (input !== 'yellow' && input !== 'cream' && input !== 'red' && input !== 'pink' && input !== 'green' && input !== 'olive' && input !== 'blue') assertType<'teal'>(input)
if (input !== 'cream' && input !== 'red' && input !== 'pink' && input !== 'green' && input !== 'olive' && input !== 'blue' && input !== 'teal') assertType<'yellow'>(input)
}
const nullableStringValidator = createValidator({
type: ['string', 'null']
})
if (nullableStringValidator(input)) {
if (typeof input !== 'object') assertType<string>(input)
if (typeof input !== 'string') assertType<null>(input)
}
const nullableNameValidator = createValidator({
type: 'object',
properties: {
name: { type: ['string', 'null'] }
},
required: [
'name'
]
})
if (nullableNameValidator(input)) {
if (typeof input.name !== 'object') assertType<string>(input.name)
if (typeof input.name !== 'string') assertType<null>(input.name)
}
const nullableInventoryValidator = createValidator({
type: 'object',
properties: {
inventory: {
type: ['array', 'null'],
items: { type: 'string' }
}
},
required: [
'inventory'
]
})
if (nullableInventoryValidator(input)) {
if (input.inventory === null) assertType<null>(input.inventory)
if (input.inventory !== null) assertType<string[]>(input.inventory)
}
const nullableParentValidator = createValidator({
type: 'object',
properties: {
parent: {
type: ['object', 'null'],
properties: {
name: { type: 'string' }
},
required: [
'name' as 'name'
]
}
},
required: [
'parent'
]
})
if (nullableParentValidator(input)) {
if (input.parent === null) assertType<null>(input.parent)
if (input.parent !== null) assertType<string>(input.parent.name)
} | the_stack |
export interface paths {
"/": {
get: {
responses: {
/** OK */
200: unknown;
};
};
};
"/comment_with_author": {
get: {
parameters: {
query: {
id?: parameters["rowFilter.comment_with_author.id"];
slug?: parameters["rowFilter.comment_with_author.slug"];
createdAt?: parameters["rowFilter.comment_with_author.createdAt"];
updatedAt?: parameters["rowFilter.comment_with_author.updatedAt"];
title?: parameters["rowFilter.comment_with_author.title"];
content?: parameters["rowFilter.comment_with_author.content"];
isPublished?: parameters["rowFilter.comment_with_author.isPublished"];
authorId?: parameters["rowFilter.comment_with_author.authorId"];
parentId?: parameters["rowFilter.comment_with_author.parentId"];
live?: parameters["rowFilter.comment_with_author.live"];
siteId?: parameters["rowFilter.comment_with_author.siteId"];
isPinned?: parameters["rowFilter.comment_with_author.isPinned"];
isDeleted?: parameters["rowFilter.comment_with_author.isDeleted"];
isApproved?: parameters["rowFilter.comment_with_author.isApproved"];
author?: parameters["rowFilter.comment_with_author.author"];
/** Filtering Columns */
select?: parameters["select"];
/** Ordering */
order?: parameters["order"];
/** Limiting and Pagination */
offset?: parameters["offset"];
/** Limiting and Pagination */
limit?: parameters["limit"];
};
header: {
/** Limiting and Pagination */
Range?: parameters["range"];
/** Limiting and Pagination */
"Range-Unit"?: parameters["rangeUnit"];
/** Preference */
Prefer?: parameters["preferCount"];
};
};
responses: {
/** OK */
200: {
schema: definitions["comment_with_author"][];
};
/** Partial Content */
206: unknown;
};
};
};
"/comments_linear_view": {
get: {
parameters: {
query: {
id?: parameters["rowFilter.comments_linear_view.id"];
slug?: parameters["rowFilter.comments_linear_view.slug"];
createdAt?: parameters["rowFilter.comments_linear_view.createdAt"];
updatedAt?: parameters["rowFilter.comments_linear_view.updatedAt"];
title?: parameters["rowFilter.comments_linear_view.title"];
content?: parameters["rowFilter.comments_linear_view.content"];
isPublished?: parameters["rowFilter.comments_linear_view.isPublished"];
authorId?: parameters["rowFilter.comments_linear_view.authorId"];
parentId?: parameters["rowFilter.comments_linear_view.parentId"];
live?: parameters["rowFilter.comments_linear_view.live"];
siteId?: parameters["rowFilter.comments_linear_view.siteId"];
isPinned?: parameters["rowFilter.comments_linear_view.isPinned"];
isDeleted?: parameters["rowFilter.comments_linear_view.isDeleted"];
isApproved?: parameters["rowFilter.comments_linear_view.isApproved"];
author?: parameters["rowFilter.comments_linear_view.author"];
parent?: parameters["rowFilter.comments_linear_view.parent"];
responses?: parameters["rowFilter.comments_linear_view.responses"];
/** Filtering Columns */
select?: parameters["select"];
/** Ordering */
order?: parameters["order"];
/** Limiting and Pagination */
offset?: parameters["offset"];
/** Limiting and Pagination */
limit?: parameters["limit"];
};
header: {
/** Limiting and Pagination */
Range?: parameters["range"];
/** Limiting and Pagination */
"Range-Unit"?: parameters["rangeUnit"];
/** Preference */
Prefer?: parameters["preferCount"];
};
};
responses: {
/** OK */
200: {
schema: definitions["comments_linear_view"][];
};
/** Partial Content */
206: unknown;
};
};
};
"/comments_thread": {
get: {
parameters: {
query: {
id?: parameters["rowFilter.comments_thread.id"];
slug?: parameters["rowFilter.comments_thread.slug"];
createdAt?: parameters["rowFilter.comments_thread.createdAt"];
updatedAt?: parameters["rowFilter.comments_thread.updatedAt"];
title?: parameters["rowFilter.comments_thread.title"];
content?: parameters["rowFilter.comments_thread.content"];
isPublished?: parameters["rowFilter.comments_thread.isPublished"];
authorId?: parameters["rowFilter.comments_thread.authorId"];
parentId?: parameters["rowFilter.comments_thread.parentId"];
live?: parameters["rowFilter.comments_thread.live"];
siteId?: parameters["rowFilter.comments_thread.siteId"];
isPinned?: parameters["rowFilter.comments_thread.isPinned"];
isDeleted?: parameters["rowFilter.comments_thread.isDeleted"];
isApproved?: parameters["rowFilter.comments_thread.isApproved"];
author?: parameters["rowFilter.comments_thread.author"];
votes?: parameters["rowFilter.comments_thread.votes"];
upvotes?: parameters["rowFilter.comments_thread.upvotes"];
downvotes?: parameters["rowFilter.comments_thread.downvotes"];
depth?: parameters["rowFilter.comments_thread.depth"];
path?: parameters["rowFilter.comments_thread.path"];
pathVotesRecent?: parameters["rowFilter.comments_thread.pathVotesRecent"];
pathLeastRecent?: parameters["rowFilter.comments_thread.pathLeastRecent"];
pathMostRecent?: parameters["rowFilter.comments_thread.pathMostRecent"];
/** Filtering Columns */
select?: parameters["select"];
/** Ordering */
order?: parameters["order"];
/** Limiting and Pagination */
offset?: parameters["offset"];
/** Limiting and Pagination */
limit?: parameters["limit"];
};
header: {
/** Limiting and Pagination */
Range?: parameters["range"];
/** Limiting and Pagination */
"Range-Unit"?: parameters["rangeUnit"];
/** Preference */
Prefer?: parameters["preferCount"];
};
};
responses: {
/** OK */
200: {
schema: definitions["comments_thread"][];
};
/** Partial Content */
206: unknown;
};
};
};
"/comments_thread_with_user_vote": {
get: {
parameters: {
query: {
id?: parameters["rowFilter.comments_thread_with_user_vote.id"];
slug?: parameters["rowFilter.comments_thread_with_user_vote.slug"];
createdAt?: parameters["rowFilter.comments_thread_with_user_vote.createdAt"];
updatedAt?: parameters["rowFilter.comments_thread_with_user_vote.updatedAt"];
title?: parameters["rowFilter.comments_thread_with_user_vote.title"];
content?: parameters["rowFilter.comments_thread_with_user_vote.content"];
isPublished?: parameters["rowFilter.comments_thread_with_user_vote.isPublished"];
authorId?: parameters["rowFilter.comments_thread_with_user_vote.authorId"];
parentId?: parameters["rowFilter.comments_thread_with_user_vote.parentId"];
live?: parameters["rowFilter.comments_thread_with_user_vote.live"];
siteId?: parameters["rowFilter.comments_thread_with_user_vote.siteId"];
isPinned?: parameters["rowFilter.comments_thread_with_user_vote.isPinned"];
isDeleted?: parameters["rowFilter.comments_thread_with_user_vote.isDeleted"];
isApproved?: parameters["rowFilter.comments_thread_with_user_vote.isApproved"];
author?: parameters["rowFilter.comments_thread_with_user_vote.author"];
votes?: parameters["rowFilter.comments_thread_with_user_vote.votes"];
upvotes?: parameters["rowFilter.comments_thread_with_user_vote.upvotes"];
downvotes?: parameters["rowFilter.comments_thread_with_user_vote.downvotes"];
depth?: parameters["rowFilter.comments_thread_with_user_vote.depth"];
path?: parameters["rowFilter.comments_thread_with_user_vote.path"];
pathVotesRecent?: parameters["rowFilter.comments_thread_with_user_vote.pathVotesRecent"];
pathLeastRecent?: parameters["rowFilter.comments_thread_with_user_vote.pathLeastRecent"];
pathMostRecent?: parameters["rowFilter.comments_thread_with_user_vote.pathMostRecent"];
userVoteValue?: parameters["rowFilter.comments_thread_with_user_vote.userVoteValue"];
/** Filtering Columns */
select?: parameters["select"];
/** Ordering */
order?: parameters["order"];
/** Limiting and Pagination */
offset?: parameters["offset"];
/** Limiting and Pagination */
limit?: parameters["limit"];
};
header: {
/** Limiting and Pagination */
Range?: parameters["range"];
/** Limiting and Pagination */
"Range-Unit"?: parameters["rangeUnit"];
/** Preference */
Prefer?: parameters["preferCount"];
};
};
responses: {
/** OK */
200: {
schema: definitions["comments_thread_with_user_vote"][];
};
/** Partial Content */
206: unknown;
};
};
};
"/comments_with_author_votes": {
get: {
parameters: {
query: {
id?: parameters["rowFilter.comments_with_author_votes.id"];
slug?: parameters["rowFilter.comments_with_author_votes.slug"];
createdAt?: parameters["rowFilter.comments_with_author_votes.createdAt"];
updatedAt?: parameters["rowFilter.comments_with_author_votes.updatedAt"];
title?: parameters["rowFilter.comments_with_author_votes.title"];
content?: parameters["rowFilter.comments_with_author_votes.content"];
isPublished?: parameters["rowFilter.comments_with_author_votes.isPublished"];
authorId?: parameters["rowFilter.comments_with_author_votes.authorId"];
parentId?: parameters["rowFilter.comments_with_author_votes.parentId"];
live?: parameters["rowFilter.comments_with_author_votes.live"];
siteId?: parameters["rowFilter.comments_with_author_votes.siteId"];
isPinned?: parameters["rowFilter.comments_with_author_votes.isPinned"];
isDeleted?: parameters["rowFilter.comments_with_author_votes.isDeleted"];
isApproved?: parameters["rowFilter.comments_with_author_votes.isApproved"];
author?: parameters["rowFilter.comments_with_author_votes.author"];
votes?: parameters["rowFilter.comments_with_author_votes.votes"];
upvotes?: parameters["rowFilter.comments_with_author_votes.upvotes"];
downvotes?: parameters["rowFilter.comments_with_author_votes.downvotes"];
/** Filtering Columns */
select?: parameters["select"];
/** Ordering */
order?: parameters["order"];
/** Limiting and Pagination */
offset?: parameters["offset"];
/** Limiting and Pagination */
limit?: parameters["limit"];
};
header: {
/** Limiting and Pagination */
Range?: parameters["range"];
/** Limiting and Pagination */
"Range-Unit"?: parameters["rangeUnit"];
/** Preference */
Prefer?: parameters["preferCount"];
};
};
responses: {
/** OK */
200: {
schema: definitions["comments_with_author_votes"][];
};
/** Partial Content */
206: unknown;
};
};
};
"/posts": {
get: {
parameters: {
query: {
id?: parameters["rowFilter.posts.id"];
slug?: parameters["rowFilter.posts.slug"];
createdAt?: parameters["rowFilter.posts.createdAt"];
updatedAt?: parameters["rowFilter.posts.updatedAt"];
title?: parameters["rowFilter.posts.title"];
content?: parameters["rowFilter.posts.content"];
isPublished?: parameters["rowFilter.posts.isPublished"];
authorId?: parameters["rowFilter.posts.authorId"];
parentId?: parameters["rowFilter.posts.parentId"];
live?: parameters["rowFilter.posts.live"];
siteId?: parameters["rowFilter.posts.siteId"];
isPinned?: parameters["rowFilter.posts.isPinned"];
isDeleted?: parameters["rowFilter.posts.isDeleted"];
isApproved?: parameters["rowFilter.posts.isApproved"];
/** Filtering Columns */
select?: parameters["select"];
/** Ordering */
order?: parameters["order"];
/** Limiting and Pagination */
offset?: parameters["offset"];
/** Limiting and Pagination */
limit?: parameters["limit"];
};
header: {
/** Limiting and Pagination */
Range?: parameters["range"];
/** Limiting and Pagination */
"Range-Unit"?: parameters["rangeUnit"];
/** Preference */
Prefer?: parameters["preferCount"];
};
};
responses: {
/** OK */
200: {
schema: definitions["posts"][];
};
/** Partial Content */
206: unknown;
};
};
post: {
parameters: {
body: {
/** posts */
posts?: definitions["posts"];
};
query: {
/** Filtering Columns */
select?: parameters["select"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** Created */
201: unknown;
};
};
delete: {
parameters: {
query: {
id?: parameters["rowFilter.posts.id"];
slug?: parameters["rowFilter.posts.slug"];
createdAt?: parameters["rowFilter.posts.createdAt"];
updatedAt?: parameters["rowFilter.posts.updatedAt"];
title?: parameters["rowFilter.posts.title"];
content?: parameters["rowFilter.posts.content"];
isPublished?: parameters["rowFilter.posts.isPublished"];
authorId?: parameters["rowFilter.posts.authorId"];
parentId?: parameters["rowFilter.posts.parentId"];
live?: parameters["rowFilter.posts.live"];
siteId?: parameters["rowFilter.posts.siteId"];
isPinned?: parameters["rowFilter.posts.isPinned"];
isDeleted?: parameters["rowFilter.posts.isDeleted"];
isApproved?: parameters["rowFilter.posts.isApproved"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** No Content */
204: never;
};
};
patch: {
parameters: {
query: {
id?: parameters["rowFilter.posts.id"];
slug?: parameters["rowFilter.posts.slug"];
createdAt?: parameters["rowFilter.posts.createdAt"];
updatedAt?: parameters["rowFilter.posts.updatedAt"];
title?: parameters["rowFilter.posts.title"];
content?: parameters["rowFilter.posts.content"];
isPublished?: parameters["rowFilter.posts.isPublished"];
authorId?: parameters["rowFilter.posts.authorId"];
parentId?: parameters["rowFilter.posts.parentId"];
live?: parameters["rowFilter.posts.live"];
siteId?: parameters["rowFilter.posts.siteId"];
isPinned?: parameters["rowFilter.posts.isPinned"];
isDeleted?: parameters["rowFilter.posts.isDeleted"];
isApproved?: parameters["rowFilter.posts.isApproved"];
};
body: {
/** posts */
posts?: definitions["posts"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** No Content */
204: never;
};
};
};
"/profiles": {
get: {
parameters: {
query: {
id?: parameters["rowFilter.profiles.id"];
updated_at?: parameters["rowFilter.profiles.updated_at"];
username?: parameters["rowFilter.profiles.username"];
avatar_url?: parameters["rowFilter.profiles.avatar_url"];
website?: parameters["rowFilter.profiles.website"];
user_metadata?: parameters["rowFilter.profiles.user_metadata"];
full_name?: parameters["rowFilter.profiles.full_name"];
/** Filtering Columns */
select?: parameters["select"];
/** Ordering */
order?: parameters["order"];
/** Limiting and Pagination */
offset?: parameters["offset"];
/** Limiting and Pagination */
limit?: parameters["limit"];
};
header: {
/** Limiting and Pagination */
Range?: parameters["range"];
/** Limiting and Pagination */
"Range-Unit"?: parameters["rangeUnit"];
/** Preference */
Prefer?: parameters["preferCount"];
};
};
responses: {
/** OK */
200: {
schema: definitions["profiles"][];
};
/** Partial Content */
206: unknown;
};
};
post: {
parameters: {
body: {
/** profiles */
profiles?: definitions["profiles"];
};
query: {
/** Filtering Columns */
select?: parameters["select"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** Created */
201: unknown;
};
};
delete: {
parameters: {
query: {
id?: parameters["rowFilter.profiles.id"];
updated_at?: parameters["rowFilter.profiles.updated_at"];
username?: parameters["rowFilter.profiles.username"];
avatar_url?: parameters["rowFilter.profiles.avatar_url"];
website?: parameters["rowFilter.profiles.website"];
user_metadata?: parameters["rowFilter.profiles.user_metadata"];
full_name?: parameters["rowFilter.profiles.full_name"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** No Content */
204: never;
};
};
patch: {
parameters: {
query: {
id?: parameters["rowFilter.profiles.id"];
updated_at?: parameters["rowFilter.profiles.updated_at"];
username?: parameters["rowFilter.profiles.username"];
avatar_url?: parameters["rowFilter.profiles.avatar_url"];
website?: parameters["rowFilter.profiles.website"];
user_metadata?: parameters["rowFilter.profiles.user_metadata"];
full_name?: parameters["rowFilter.profiles.full_name"];
};
body: {
/** profiles */
profiles?: definitions["profiles"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** No Content */
204: never;
};
};
};
"/sites": {
get: {
parameters: {
query: {
id?: parameters["rowFilter.sites.id"];
siteDomain?: parameters["rowFilter.sites.siteDomain"];
ownerId?: parameters["rowFilter.sites.ownerId"];
name?: parameters["rowFilter.sites.name"];
/** Filtering Columns */
select?: parameters["select"];
/** Ordering */
order?: parameters["order"];
/** Limiting and Pagination */
offset?: parameters["offset"];
/** Limiting and Pagination */
limit?: parameters["limit"];
};
header: {
/** Limiting and Pagination */
Range?: parameters["range"];
/** Limiting and Pagination */
"Range-Unit"?: parameters["rangeUnit"];
/** Preference */
Prefer?: parameters["preferCount"];
};
};
responses: {
/** OK */
200: {
schema: definitions["sites"][];
};
/** Partial Content */
206: unknown;
};
};
post: {
parameters: {
body: {
/** sites */
sites?: definitions["sites"];
};
query: {
/** Filtering Columns */
select?: parameters["select"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** Created */
201: unknown;
};
};
delete: {
parameters: {
query: {
id?: parameters["rowFilter.sites.id"];
siteDomain?: parameters["rowFilter.sites.siteDomain"];
ownerId?: parameters["rowFilter.sites.ownerId"];
name?: parameters["rowFilter.sites.name"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** No Content */
204: never;
};
};
patch: {
parameters: {
query: {
id?: parameters["rowFilter.sites.id"];
siteDomain?: parameters["rowFilter.sites.siteDomain"];
ownerId?: parameters["rowFilter.sites.ownerId"];
name?: parameters["rowFilter.sites.name"];
};
body: {
/** sites */
sites?: definitions["sites"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** No Content */
204: never;
};
};
};
"/votes": {
get: {
parameters: {
query: {
postId?: parameters["rowFilter.votes.postId"];
userId?: parameters["rowFilter.votes.userId"];
value?: parameters["rowFilter.votes.value"];
/** Filtering Columns */
select?: parameters["select"];
/** Ordering */
order?: parameters["order"];
/** Limiting and Pagination */
offset?: parameters["offset"];
/** Limiting and Pagination */
limit?: parameters["limit"];
};
header: {
/** Limiting and Pagination */
Range?: parameters["range"];
/** Limiting and Pagination */
"Range-Unit"?: parameters["rangeUnit"];
/** Preference */
Prefer?: parameters["preferCount"];
};
};
responses: {
/** OK */
200: {
schema: definitions["votes"][];
};
/** Partial Content */
206: unknown;
};
};
post: {
parameters: {
body: {
/** votes */
votes?: definitions["votes"];
};
query: {
/** Filtering Columns */
select?: parameters["select"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** Created */
201: unknown;
};
};
delete: {
parameters: {
query: {
postId?: parameters["rowFilter.votes.postId"];
userId?: parameters["rowFilter.votes.userId"];
value?: parameters["rowFilter.votes.value"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** No Content */
204: never;
};
};
patch: {
parameters: {
query: {
postId?: parameters["rowFilter.votes.postId"];
userId?: parameters["rowFilter.votes.userId"];
value?: parameters["rowFilter.votes.value"];
};
body: {
/** votes */
votes?: definitions["votes"];
};
header: {
/** Preference */
Prefer?: parameters["preferReturn"];
};
};
responses: {
/** No Content */
204: never;
};
};
};
"/rpc/get_post_by_slug": {
post: {
parameters: {
body: {
args: {
root_slug: string;
max_depth?: number;
responses_limit?: number;
responses_offset?: number;
current_depth?: number;
user_id?: string;
};
};
header: {
/** Preference */
Prefer?: parameters["preferParams"];
};
};
responses: {
/** OK */
200: unknown;
};
};
};
"/rpc/handle_new_user": {
post: {
parameters: {
body: {
args: { [key: string]: any };
};
header: {
/** Preference */
Prefer?: parameters["preferParams"];
};
};
responses: {
/** OK */
200: unknown;
};
};
};
"/rpc/build_post_responses": {
post: {
parameters: {
body: {
args: {
max_depth?: number;
current_depth?: number;
user_id?: string;
parent_id: number;
};
};
header: {
/** Preference */
Prefer?: parameters["preferParams"];
};
};
responses: {
/** OK */
200: unknown;
};
};
};
}
export interface definitions {
comment_with_author: {
/**
* Note:
* This is a Primary Key.<pk/>
*/
id?: number;
slug?: string;
createdAt?: string;
updatedAt?: string;
title?: string;
content?: string;
isPublished?: boolean;
/**
* Note:
* This is a Foreign Key to `profiles.id`.<fk table='profiles' column='id'/>
*/
authorId?: string;
/**
* Note:
* This is a Foreign Key to `posts.id`.<fk table='posts' column='id'/>
*/
parentId?: number;
live?: boolean;
/**
* Note:
* This is a Foreign Key to `sites.id`.<fk table='sites' column='id'/>
*/
siteId?: number;
isPinned?: boolean;
isDeleted?: boolean;
isApproved?: boolean;
author?: string;
};
comments_linear_view: {
/**
* Note:
* This is a Primary Key.<pk/>
*/
id?: number;
slug?: string;
createdAt?: string;
updatedAt?: string;
title?: string;
content?: string;
isPublished?: boolean;
/**
* Note:
* This is a Foreign Key to `profiles.id`.<fk table='profiles' column='id'/>
*/
authorId?: string;
/**
* Note:
* This is a Foreign Key to `posts.id`.<fk table='posts' column='id'/>
*/
parentId?: number;
live?: boolean;
/**
* Note:
* This is a Foreign Key to `sites.id`.<fk table='sites' column='id'/>
*/
siteId?: number;
isPinned?: boolean;
isDeleted?: boolean;
isApproved?: boolean;
author?: string;
parent?: string;
responses?: string;
};
comments_thread: {
id?: number;
slug?: string;
createdAt?: string;
updatedAt?: string;
title?: string;
content?: string;
isPublished?: boolean;
authorId?: string;
parentId?: number;
live?: boolean;
siteId?: number;
isPinned?: boolean;
isDeleted?: boolean;
isApproved?: boolean;
author?: string;
votes?: number;
upvotes?: number;
downvotes?: number;
depth?: number;
path?: string;
pathVotesRecent?: string;
pathLeastRecent?: string;
pathMostRecent?: string;
};
comments_thread_with_user_vote: {
id?: number;
slug?: string;
createdAt?: string;
updatedAt?: string;
title?: string;
content?: string;
isPublished?: boolean;
authorId?: string;
parentId?: number;
live?: boolean;
siteId?: number;
isPinned?: boolean;
isDeleted?: boolean;
isApproved?: boolean;
author?: string;
votes?: number;
upvotes?: number;
downvotes?: number;
depth?: number;
path?: string;
pathVotesRecent?: string;
pathLeastRecent?: string;
pathMostRecent?: string;
userVoteValue?: number;
};
comments_with_author_votes: {
/**
* Note:
* This is a Primary Key.<pk/>
*/
id?: number;
slug?: string;
createdAt?: string;
updatedAt?: string;
title?: string;
content?: string;
isPublished?: boolean;
/**
* Note:
* This is a Foreign Key to `profiles.id`.<fk table='profiles' column='id'/>
*/
authorId?: string;
/**
* Note:
* This is a Foreign Key to `posts.id`.<fk table='posts' column='id'/>
*/
parentId?: number;
live?: boolean;
/**
* Note:
* This is a Foreign Key to `sites.id`.<fk table='sites' column='id'/>
*/
siteId?: number;
isPinned?: boolean;
isDeleted?: boolean;
isApproved?: boolean;
author?: string;
votes?: number;
upvotes?: number;
downvotes?: number;
};
posts: {
/**
* Note:
* This is a Primary Key.<pk/>
*/
id: number;
slug: string;
createdAt: string;
updatedAt?: string;
title?: string;
content?: string;
isPublished: boolean;
/**
* Note:
* This is a Foreign Key to `profiles.id`.<fk table='profiles' column='id'/>
*/
authorId: string;
/**
* Note:
* This is a Foreign Key to `posts.id`.<fk table='posts' column='id'/>
*/
parentId?: number;
live?: boolean;
/**
* Note:
* This is a Foreign Key to `sites.id`.<fk table='sites' column='id'/>
*/
siteId: number;
isPinned: boolean;
isDeleted: boolean;
isApproved: boolean;
};
profiles: {
/**
* Note:
* This is a Primary Key.<pk/>
* This is a Foreign Key to `users.id`.<fk table='users' column='id'/>
*/
id: string;
updated_at?: string;
username?: string;
avatar_url?: string;
website?: string;
user_metadata?: string;
full_name?: string;
};
sites: {
/**
* Note:
* This is a Primary Key.<pk/>
*/
id: number;
siteDomain: string;
/**
* Note:
* This is a Foreign Key to `profiles.id`.<fk table='profiles' column='id'/>
*/
ownerId: string;
name: string;
};
votes: {
/**
* Note:
* This is a Primary Key.<pk/>
* This is a Foreign Key to `posts.id`.<fk table='posts' column='id'/>
*/
postId: number;
/**
* Note:
* This is a Primary Key.<pk/>
* This is a Foreign Key to `profiles.id`.<fk table='profiles' column='id'/>
*/
userId: string;
value: number;
};
}
export interface parameters {
/** Preference */
preferParams: "params=single-object";
/** Preference */
preferReturn: "return=representation" | "return=minimal" | "return=none";
/** Preference */
preferCount: "count=none";
/** Filtering Columns */
select: string;
/** On Conflict */
on_conflict: string;
/** Ordering */
order: string;
/** Limiting and Pagination */
range: string;
/** Limiting and Pagination */
rangeUnit: string;
/** Limiting and Pagination */
offset: string;
/** Limiting and Pagination */
limit: string;
/** comment_with_author */
"body.comment_with_author": definitions["comment_with_author"];
"rowFilter.comment_with_author.id": string;
"rowFilter.comment_with_author.slug": string;
"rowFilter.comment_with_author.createdAt": string;
"rowFilter.comment_with_author.updatedAt": string;
"rowFilter.comment_with_author.title": string;
"rowFilter.comment_with_author.content": string;
"rowFilter.comment_with_author.isPublished": string;
"rowFilter.comment_with_author.authorId": string;
"rowFilter.comment_with_author.parentId": string;
"rowFilter.comment_with_author.live": string;
"rowFilter.comment_with_author.siteId": string;
"rowFilter.comment_with_author.isPinned": string;
"rowFilter.comment_with_author.isDeleted": string;
"rowFilter.comment_with_author.isApproved": string;
"rowFilter.comment_with_author.author": string;
/** comments_linear_view */
"body.comments_linear_view": definitions["comments_linear_view"];
"rowFilter.comments_linear_view.id": string;
"rowFilter.comments_linear_view.slug": string;
"rowFilter.comments_linear_view.createdAt": string;
"rowFilter.comments_linear_view.updatedAt": string;
"rowFilter.comments_linear_view.title": string;
"rowFilter.comments_linear_view.content": string;
"rowFilter.comments_linear_view.isPublished": string;
"rowFilter.comments_linear_view.authorId": string;
"rowFilter.comments_linear_view.parentId": string;
"rowFilter.comments_linear_view.live": string;
"rowFilter.comments_linear_view.siteId": string;
"rowFilter.comments_linear_view.isPinned": string;
"rowFilter.comments_linear_view.isDeleted": string;
"rowFilter.comments_linear_view.isApproved": string;
"rowFilter.comments_linear_view.author": string;
"rowFilter.comments_linear_view.parent": string;
"rowFilter.comments_linear_view.responses": string;
/** comments_thread */
"body.comments_thread": definitions["comments_thread"];
"rowFilter.comments_thread.id": string;
"rowFilter.comments_thread.slug": string;
"rowFilter.comments_thread.createdAt": string;
"rowFilter.comments_thread.updatedAt": string;
"rowFilter.comments_thread.title": string;
"rowFilter.comments_thread.content": string;
"rowFilter.comments_thread.isPublished": string;
"rowFilter.comments_thread.authorId": string;
"rowFilter.comments_thread.parentId": string;
"rowFilter.comments_thread.live": string;
"rowFilter.comments_thread.siteId": string;
"rowFilter.comments_thread.isPinned": string;
"rowFilter.comments_thread.isDeleted": string;
"rowFilter.comments_thread.isApproved": string;
"rowFilter.comments_thread.author": string;
"rowFilter.comments_thread.votes": string;
"rowFilter.comments_thread.upvotes": string;
"rowFilter.comments_thread.downvotes": string;
"rowFilter.comments_thread.depth": string;
"rowFilter.comments_thread.path": string;
"rowFilter.comments_thread.pathVotesRecent": string;
"rowFilter.comments_thread.pathLeastRecent": string;
"rowFilter.comments_thread.pathMostRecent": string;
/** comments_thread_with_user_vote */
"body.comments_thread_with_user_vote": definitions["comments_thread_with_user_vote"];
"rowFilter.comments_thread_with_user_vote.id": string;
"rowFilter.comments_thread_with_user_vote.slug": string;
"rowFilter.comments_thread_with_user_vote.createdAt": string;
"rowFilter.comments_thread_with_user_vote.updatedAt": string;
"rowFilter.comments_thread_with_user_vote.title": string;
"rowFilter.comments_thread_with_user_vote.content": string;
"rowFilter.comments_thread_with_user_vote.isPublished": string;
"rowFilter.comments_thread_with_user_vote.authorId": string;
"rowFilter.comments_thread_with_user_vote.parentId": string;
"rowFilter.comments_thread_with_user_vote.live": string;
"rowFilter.comments_thread_with_user_vote.siteId": string;
"rowFilter.comments_thread_with_user_vote.isPinned": string;
"rowFilter.comments_thread_with_user_vote.isDeleted": string;
"rowFilter.comments_thread_with_user_vote.isApproved": string;
"rowFilter.comments_thread_with_user_vote.author": string;
"rowFilter.comments_thread_with_user_vote.votes": string;
"rowFilter.comments_thread_with_user_vote.upvotes": string;
"rowFilter.comments_thread_with_user_vote.downvotes": string;
"rowFilter.comments_thread_with_user_vote.depth": string;
"rowFilter.comments_thread_with_user_vote.path": string;
"rowFilter.comments_thread_with_user_vote.pathVotesRecent": string;
"rowFilter.comments_thread_with_user_vote.pathLeastRecent": string;
"rowFilter.comments_thread_with_user_vote.pathMostRecent": string;
"rowFilter.comments_thread_with_user_vote.userVoteValue": string;
/** comments_with_author_votes */
"body.comments_with_author_votes": definitions["comments_with_author_votes"];
"rowFilter.comments_with_author_votes.id": string;
"rowFilter.comments_with_author_votes.slug": string;
"rowFilter.comments_with_author_votes.createdAt": string;
"rowFilter.comments_with_author_votes.updatedAt": string;
"rowFilter.comments_with_author_votes.title": string;
"rowFilter.comments_with_author_votes.content": string;
"rowFilter.comments_with_author_votes.isPublished": string;
"rowFilter.comments_with_author_votes.authorId": string;
"rowFilter.comments_with_author_votes.parentId": string;
"rowFilter.comments_with_author_votes.live": string;
"rowFilter.comments_with_author_votes.siteId": string;
"rowFilter.comments_with_author_votes.isPinned": string;
"rowFilter.comments_with_author_votes.isDeleted": string;
"rowFilter.comments_with_author_votes.isApproved": string;
"rowFilter.comments_with_author_votes.author": string;
"rowFilter.comments_with_author_votes.votes": string;
"rowFilter.comments_with_author_votes.upvotes": string;
"rowFilter.comments_with_author_votes.downvotes": string;
/** posts */
"body.posts": definitions["posts"];
"rowFilter.posts.id": string;
"rowFilter.posts.slug": string;
"rowFilter.posts.createdAt": string;
"rowFilter.posts.updatedAt": string;
"rowFilter.posts.title": string;
"rowFilter.posts.content": string;
"rowFilter.posts.isPublished": string;
"rowFilter.posts.authorId": string;
"rowFilter.posts.parentId": string;
"rowFilter.posts.live": string;
"rowFilter.posts.siteId": string;
"rowFilter.posts.isPinned": string;
"rowFilter.posts.isDeleted": string;
"rowFilter.posts.isApproved": string;
/** profiles */
"body.profiles": definitions["profiles"];
"rowFilter.profiles.id": string;
"rowFilter.profiles.updated_at": string;
"rowFilter.profiles.username": string;
"rowFilter.profiles.avatar_url": string;
"rowFilter.profiles.website": string;
"rowFilter.profiles.user_metadata": string;
"rowFilter.profiles.full_name": string;
/** sites */
"body.sites": definitions["sites"];
"rowFilter.sites.id": string;
"rowFilter.sites.siteDomain": string;
"rowFilter.sites.ownerId": string;
"rowFilter.sites.name": string;
/** votes */
"body.votes": definitions["votes"];
"rowFilter.votes.postId": string;
"rowFilter.votes.userId": string;
"rowFilter.votes.value": string;
}
export interface operations {} | the_stack |
'use strict'
import dotenv from 'dotenv'
import FormData from 'form-data'
import fs from 'fs'
import app from '../app'
import { getConfig } from '../utils/config'
import { signJWT } from '../utils/index'
import { S3Backend } from '../backend/s3'
dotenv.config({ path: '.env.test' })
const ENV = process.env
const { anonKey, jwtSecret, serviceKey } = getConfig()
beforeEach(() => {
process.env = { ...ENV }
jest.spyOn(S3Backend.prototype, 'getObject').mockResolvedValue({
metadata: {
httpStatusCode: 200,
size: 3746,
mimetype: 'image/png',
},
body: Buffer.from(''),
})
jest.spyOn(S3Backend.prototype, 'uploadObject').mockResolvedValue({
httpStatusCode: 200,
size: 3746,
mimetype: 'image/png',
})
jest.spyOn(S3Backend.prototype, 'copyObject').mockResolvedValue({
httpStatusCode: 200,
size: 3746,
mimetype: 'image/png',
})
jest.spyOn(S3Backend.prototype, 'deleteObject').mockResolvedValue({})
jest.spyOn(S3Backend.prototype, 'deleteObjects').mockResolvedValue({})
jest.spyOn(S3Backend.prototype, 'headObject').mockResolvedValue({
httpStatusCode: 200,
size: 3746,
mimetype: 'image/png',
})
})
beforeEach(() => {
jest.clearAllMocks()
})
/*
* GET /object/:id
*/
describe('testing GET object', () => {
test('check if RLS policies are respected: authenticated user is able to read authenticated resource', async () => {
const response = await app().inject({
method: 'GET',
url: '/object/authenticated/bucket2/authenticated/casestudy.png',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.getObject).toBeCalled()
})
test('forward 304 and If-Modified-Since/If-None-Match headers', async () => {
const mockGetObject = jest.spyOn(S3Backend.prototype, 'getObject')
mockGetObject.mockRejectedValue({
$metadata: {
httpStatusCode: 304,
},
})
const response = await app().inject({
method: 'GET',
url: '/object/authenticated/bucket2/authenticated/casestudy.png',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'if-modified-since': 'Fri Aug 13 2021 00:00:00 GMT+0800 (Singapore Standard Time)',
'if-none-match': 'abc',
},
})
expect(response.statusCode).toBe(304)
expect(mockGetObject.mock.calls[0][2]).toMatchObject({
ifModifiedSince: 'Fri Aug 13 2021 00:00:00 GMT+0800 (Singapore Standard Time)',
ifNoneMatch: 'abc',
})
})
test('check if RLS policies are respected: anon user is not able to read authenticated resource', async () => {
const response = await app().inject({
method: 'GET',
url: '/object/authenticated/bucket2/authenticated/casestudy.png',
headers: {
authorization: `Bearer ${anonKey}`,
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.getObject).not.toHaveBeenCalled()
})
test('user is not able to read a resource without Auth header', async () => {
const response = await app().inject({
method: 'GET',
url: '/object/authenticated/bucket2/authenticated/casestudy.png',
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.getObject).not.toHaveBeenCalled()
})
test('return 400 when reading a non existent object', async () => {
const response = await app().inject({
method: 'GET',
url: '/object/authenticated/bucket2/authenticated/notfound',
headers: {
authorization: `Bearer ${anonKey}`,
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.getObject).not.toHaveBeenCalled()
})
test('return 400 when reading a non existent bucket', async () => {
const response = await app().inject({
method: 'GET',
url: '/object/authenticated/notfound/authenticated/casestudy.png',
headers: {
authorization: `Bearer ${anonKey}`,
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.getObject).not.toHaveBeenCalled()
})
})
/*
* POST /object/:id
* multipart upload
*/
describe('testing POST object via multipart upload', () => {
test('check if RLS policies are respected: authenticated user is able to upload authenticated resource', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
})
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/authenticated/casestudy1.png',
headers,
payload: form,
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.uploadObject).toBeCalled()
expect(response.body).toBe(`{"Key":"bucket2/authenticated/casestudy1.png"}`)
})
test('check if RLS policies are respected: anon user is not able to upload authenticated resource', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${anonKey}`,
})
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/authenticated/casestudy.png',
headers,
payload: form,
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
expect(response.body).toBe(
JSON.stringify({
statusCode: '42501',
error: '',
message: 'new row violates row-level security policy for table "objects"',
})
)
})
test('check if RLS policies are respected: user is not able to upload a resource without Auth header', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/authenticated/casestudy.png',
headers: form.getHeaders(),
payload: form,
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('return 400 when uploading to a non existent bucket', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${anonKey}`,
})
const response = await app().inject({
method: 'POST',
url: '/object/notfound/authenticated/casestudy.png',
headers,
payload: form,
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('return 400 when uploading to duplicate object', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${anonKey}`,
})
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/public/sadcat-upload23.png',
headers,
payload: form,
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('return 200 when upserting duplicate object', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${anonKey}`,
'x-upsert': 'true',
})
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/public/sadcat-upload23.png',
headers,
payload: form,
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.uploadObject).toHaveBeenCalled()
})
test('return 400 when exceeding file size limit', async () => {
process.env.FILE_SIZE_LIMIT = '1'
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${anonKey}`,
})
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/public/sadcat.jpg',
headers,
payload: form,
})
expect(response.statusCode).toBe(400)
expect(response.body).toBe(
JSON.stringify({
statusCode: '413',
error: 'Payload too large',
message: 'The object exceeded the maximum allowed size',
})
)
})
})
/*
* POST /object/:id
* binary upload
*/
describe('testing POST object via binary upload', () => {
test('check if RLS policies are respected: authenticated user is able to upload authenticated resource', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/authenticated/binary-casestudy1.png',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.uploadObject).toBeCalled()
expect(response.body).toBe(`{"Key":"bucket2/authenticated/binary-casestudy1.png"}`)
})
test('check if RLS policies are respected: anon user is not able to upload authenticated resource', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
authorization: `Bearer ${anonKey}`,
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/authenticated/binary-casestudy.png',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
expect(response.body).toBe(
JSON.stringify({
statusCode: '42501',
error: '',
message: 'new row violates row-level security policy for table "objects"',
})
)
})
test('check if RLS policies are respected: user is not able to upload a resource without Auth header', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/authenticated/binary-casestudy1.png',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('return 400 when uploading to a non existent bucket', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'POST',
url: '/object/notfound/authenticated/binary-casestudy1.png',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('return 400 when uploading to duplicate object', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/public/sadcat-upload23.png',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('return 200 when upserting duplicate object', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'Content-Length': size,
'Content-Type': 'image/jpeg',
'x-upsert': 'true',
}
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/public/sadcat-upload23.png',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.uploadObject).toHaveBeenCalled()
})
test('return 400 when exceeding file size limit', async () => {
process.env.FILE_SIZE_LIMIT = '1'
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'POST',
url: '/object/bucket2/public/sadcat.jpg',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(400)
expect(response.body).toBe(
JSON.stringify({
statusCode: '413',
error: 'Payload too large',
message: 'The object exceeded the maximum allowed size',
})
)
})
})
/**
* PUT /object/:id
*/
describe('testing PUT object', () => {
test('check if RLS policies are respected: authenticated user is able to update authenticated resource', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
})
const response = await app().inject({
method: 'PUT',
url: '/object/bucket2/authenticated/cat.jpg',
headers,
payload: form,
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.uploadObject).toBeCalled()
expect(response.body).toBe(`{"Key":"bucket2/authenticated/cat.jpg"}`)
})
test('check if RLS policies are respected: anon user is not able to update authenticated resource', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${anonKey}`,
})
const response = await app().inject({
method: 'PUT',
url: '/object/bucket2/authenticated/cat.jpg',
headers,
payload: form,
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
// expect(response.body).toBe(`new row violates row-level security policy for table "objects"`)
})
test('user is not able to update a resource without Auth header', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const response = await app().inject({
method: 'PUT',
url: '/object/bucket2/authenticated/cat.jpg',
headers: form.getHeaders(),
payload: form,
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('return 400 when update to a non existent bucket', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${anonKey}`,
})
const response = await app().inject({
method: 'PUT',
url: '/object/notfound/authenticated/cat.jpg',
headers,
payload: form,
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('return 400 when updating a non existent key', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${anonKey}`,
})
const response = await app().inject({
method: 'PUT',
url: '/object/notfound/authenticated/notfound.jpg',
headers,
payload: form,
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
})
/*
* PUT /object/:id
* binary upload
*/
describe('testing PUT object via binary upload', () => {
test('check if RLS policies are respected: authenticated user is able to update authenticated resource', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'PUT',
url: '/object/bucket2/authenticated/cat.jpg',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.uploadObject).toBeCalled()
expect(response.body).toBe(`{"Key":"bucket2/authenticated/cat.jpg"}`)
})
test('check if RLS policies are respected: anon user is not able to update authenticated resource', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
authorization: `Bearer ${anonKey}`,
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'PUT',
url: '/object/bucket2/authenticated/cat.jpg',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('check if RLS policies are respected: user is not able to upload a resource without Auth header', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'PUT',
url: '/object/bucket2/authenticated/cat.jpg',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('return 400 when updating an object in a non existent bucket', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'PUT',
url: '/object/notfound/authenticated/binary-casestudy1.png',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
test('return 400 when updating an object in a non existent key', async () => {
const path = './src/test/assets/sadcat.jpg'
const { size } = fs.statSync(path)
const headers = {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'Content-Length': size,
'Content-Type': 'image/jpeg',
}
const response = await app().inject({
method: 'PUT',
url: '/object/notfound/authenticated/notfound.jpg',
headers,
payload: fs.createReadStream(path),
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})
})
/**
* POST /copy
*/
describe('testing copy object', () => {
test('check if RLS policies are respected: authenticated user is able to copy authenticated resource', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/copy',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
payload: {
bucketId: 'bucket2',
sourceKey: 'authenticated/casestudy.png',
destinationKey: 'authenticated/casestudy11.png',
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.copyObject).toBeCalled()
expect(response.body).toBe(`{"Key":"bucket2/authenticated/casestudy11.png"}`)
})
test('check if RLS policies are respected: anon user is not able to update authenticated resource', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/copy',
headers: {
authorization: `Bearer ${anonKey}`,
},
payload: {
bucketId: 'bucket2',
sourceKey: 'authenticated/casestudy.png',
destinationKey: 'authenticated/casestudy11.png',
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled()
})
test('user is not able to copy a resource without Auth header', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/copy',
payload: {
bucketId: 'bucket2',
sourceKey: 'authenticated/casestudy.png',
destinationKey: 'authenticated/casestudy11.png',
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled()
})
test('return 400 when copy from a non existent bucket', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/copy',
headers: {
authorization: `Bearer ${anonKey}`,
},
payload: {
bucketId: 'notfound',
sourceKey: 'authenticated/casestudy.png',
destinationKey: 'authenticated/casestudy11.png',
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled()
})
test('return 400 when copying a non existent key', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/copy',
headers: {
authorization: `Bearer ${anonKey}`,
},
payload: {
bucketId: 'bucket2',
sourceKey: 'authenticated/notfound.png',
destinationKey: 'authenticated/casestudy11.png',
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled()
})
})
/**
* DELETE /object
* */
describe('testing delete object', () => {
test('check if RLS policies are respected: authenticated user is able to delete authenticated resource', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/bucket2/authenticated/delete.png',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.deleteObject).toBeCalled()
})
test('check if RLS policies are respected: anon user is not able to delete authenticated resource', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/bucket2/authenticated/delete1.png',
headers: {
authorization: `Bearer ${anonKey}`,
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled()
})
test('user is not able to delete a resource without Auth header', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/bucket2/authenticated/delete1.png',
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled()
})
test('return 400 when delete from a non existent bucket', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/notfound/authenticated/delete1.png',
headers: {
authorization: `Bearer ${anonKey}`,
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled()
})
test('return 400 when deleting a non existent key', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/notfound/authenticated/notfound.jpg',
headers: {
authorization: `Bearer ${anonKey}`,
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled()
})
})
/**
* DELETE /objects
* */
describe('testing deleting multiple objects', () => {
test('check if RLS policies are respected: authenticated user is able to delete authenticated resource', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/bucket2',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
payload: {
prefixes: ['authenticated/delete-multiple1.png', 'authenticated/delete-multiple2.png'],
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.deleteObjects).toBeCalled()
const result = JSON.parse(response.body)
expect(result[0].name).toBe('authenticated/delete-multiple1.png')
expect(result[1].name).toBe('authenticated/delete-multiple2.png')
})
test('check if RLS policies are respected: anon user is not able to delete authenticated resource', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/bucket2',
headers: {
authorization: `Bearer ${anonKey}`,
},
payload: {
prefixes: ['authenticated/delete-multiple3.png', 'authenticated/delete-multiple4.png'],
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled()
const results = JSON.parse(response.body)
expect(results.length).toBe(0)
})
test('user is not able to delete a resource without Auth header', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/bucket2',
payload: {
prefixes: ['authenticated/delete-multiple3.png', 'authenticated/delete-multiple4.png'],
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled()
})
test('deleting from a non existent bucket', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/notfound',
headers: {
authorization: `Bearer ${anonKey}`,
},
payload: {
prefixes: ['authenticated/delete-multiple3.png', 'authenticated/delete-multiple4.png'],
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled()
})
test('deleting a non existent key', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/bucket2',
headers: {
authorization: `Bearer ${anonKey}`,
},
payload: {
prefixes: ['authenticated/delete-multiple5.png', 'authenticated/delete-multiple6.png'],
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled()
const results = JSON.parse(response.body)
expect(results.length).toBe(0)
})
test('check if RLS policies are respected: user has permission to delete only one of the objects', async () => {
const response = await app().inject({
method: 'DELETE',
url: '/object/bucket2',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
payload: {
prefixes: ['authenticated/delete-multiple7.png', 'private/sadcat-upload3.png'],
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.deleteObjects).toBeCalled()
const results = JSON.parse(response.body)
expect(results.length).toBe(1)
expect(results[0].name).toBe('authenticated/delete-multiple7.png')
})
})
/**
* POST /sign/
*/
describe('testing generating signed URL', () => {
test('check if RLS policies are respected: authenticated user is able to sign URL for an authenticated resource', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/sign/bucket2/authenticated/cat.jpg',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
payload: {
expiresIn: 1000,
},
})
expect(response.statusCode).toBe(200)
const result = JSON.parse(response.body)
expect(result.signedURL).toBeTruthy()
})
test('check if RLS policies are respected: anon user is not able to generate signedURL for authenticated resource', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/sign/bucket2/authenticated/cat.jpg',
headers: {
authorization: `Bearer ${anonKey}`,
},
payload: {
expiresIn: 1000,
},
})
expect(response.statusCode).toBe(400)
})
test('user is not able to generate signedURLs without Auth header', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/sign/bucket2/authenticated/cat.jpg',
payload: {
expiresIn: 1000,
},
})
expect(response.statusCode).toBe(400)
})
test('return 400 when generate signed urls from a non existent bucket', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/sign/notfound/authenticated/cat.jpg',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
payload: {
expiresIn: 1000,
},
})
expect(response.statusCode).toBe(400)
})
test('signing url of a non existent key', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/sign/bucket2/authenticated/notfound.jpg',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
payload: {
expiresIn: 1000,
},
})
expect(response.statusCode).toBe(400)
})
})
/**
* GET /public/
*/
// these tests are written in bucket.test.ts since its easier
/**
* GET /sign/
*/
describe('testing retrieving signed URL', () => {
test('get object with a token', async () => {
const urlToSign = 'bucket2/public/sadcat-upload.png'
const jwtToken = await signJWT({ url: urlToSign }, jwtSecret, 100)
const response = await app().inject({
method: 'GET',
url: `/object/sign/${urlToSign}?token=${jwtToken}`,
})
expect(response.statusCode).toBe(200)
})
test('forward 304 and If-Modified-Since/If-None-Match headers', async () => {
const mockGetObject = jest.spyOn(S3Backend.prototype, 'getObject')
mockGetObject.mockRejectedValue({
$metadata: {
httpStatusCode: 304,
},
})
const urlToSign = 'bucket2/public/sadcat-upload.png'
const jwtToken = await signJWT({ url: urlToSign }, jwtSecret, 100)
const response = await app().inject({
method: 'GET',
url: `/object/sign/${urlToSign}?token=${jwtToken}`,
headers: {
'if-modified-since': 'Fri Aug 13 2021 00:00:00 GMT+0800 (Singapore Standard Time)',
'if-none-match': 'abc',
},
})
expect(response.statusCode).toBe(304)
expect(mockGetObject.mock.calls[0][2]).toMatchObject({
ifModifiedSince: 'Fri Aug 13 2021 00:00:00 GMT+0800 (Singapore Standard Time)',
ifNoneMatch: 'abc',
})
})
test('get object without a token', async () => {
const response = await app().inject({
method: 'GET',
url: '/object/sign/bucket2/public/sadcat-upload.png',
})
expect(response.statusCode).toBe(400)
})
test('get object with a malformed JWT', async () => {
const response = await app().inject({
method: 'GET',
url: '/object/sign/bucket2/public/sadcat-upload.png?token=xxx',
})
expect(response.statusCode).toBe(400)
})
test('get object with an expired JWT', async () => {
const urlToSign = 'bucket2/public/sadcat-upload.png'
const expiredJWT = await signJWT({ url: urlToSign }, jwtSecret, -1)
const response = await app().inject({
method: 'GET',
url: `/object/sign/${urlToSign}?token=${expiredJWT}`,
})
expect(response.statusCode).toBe(400)
})
})
describe('testing move object', () => {
test('check if RLS policies are respected: authenticated user is able to move an authenticated object', async () => {
const response = await app().inject({
method: 'POST',
url: `/object/move`,
payload: {
sourceKey: 'authenticated/move-orig.png',
destinationKey: 'authenticated/move-new.png',
bucketId: 'bucket2',
},
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.copyObject).toHaveBeenCalled()
expect(S3Backend.prototype.deleteObject).toHaveBeenCalled()
})
test('check if RLS policies are respected: anon user is not able to move an authenticated object', async () => {
const response = await app().inject({
method: 'POST',
url: `/object/move`,
payload: {
sourceKey: 'authenticated/move-orig-2.png',
destinationKey: 'authenticated/move-new-2.png',
bucketId: 'bucket2',
},
headers: {
authorization: `Bearer ${anonKey}`,
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled()
expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled()
})
test('user is not able to move an object without auth header', async () => {
const response = await app().inject({
method: 'POST',
url: `/object/move`,
payload: {
sourceKey: 'authenticated/move-orig-3.png',
destinationKey: 'authenticated/move-orig-new-3.png',
bucketId: 'bucket2',
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled()
expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled()
})
test('user is not able to move an object in a non existent bucket', async () => {
const response = await app().inject({
method: 'POST',
url: `/object/move`,
payload: {
sourceKey: 'authenticated/move-orig-3.png',
destinationKey: 'authenticated/move-orig-new-3.png',
bucketId: 'notfound',
},
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled()
expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled()
})
test('user is not able to move an non existent object', async () => {
const response = await app().inject({
method: 'POST',
url: `/object/move`,
payload: {
sourceKey: 'authenticated/notfound',
destinationKey: 'authenticated/move-orig-new-3.png',
bucketId: 'bucket2',
},
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled()
expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled()
})
test('user is not able to move to an existing key', async () => {
const response = await app().inject({
method: 'POST',
url: `/object/move`,
payload: {
sourceKey: 'authenticated/move-orig-2.png',
destinationKey: 'authenticated/move-orig-3.png',
bucketId: 'bucket2',
},
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
},
})
expect(response.statusCode).toBe(400)
expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled()
expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled()
})
})
describe('testing list objects', () => {
test('searching the bucket root folder', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/list/bucket2',
headers: {
authorization: `Bearer ${serviceKey}`,
},
payload: {
prefix: '',
limit: 10,
offset: 0,
},
})
expect(response.statusCode).toBe(200)
const responseJSON = JSON.parse(response.body)
expect(responseJSON).toHaveLength(5)
const names = responseJSON.map((ele: any) => ele.name)
expect(names).toContain('curlimage.jpg')
expect(names).toContain('private')
expect(names).toContain('folder')
expect(names).toContain('authenticated')
expect(names).toContain('public')
})
test('searching a subfolder', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/list/bucket2',
headers: {
authorization: `Bearer ${serviceKey}`,
},
payload: {
prefix: 'folder',
limit: 10,
offset: 0,
},
})
expect(response.statusCode).toBe(200)
const responseJSON = JSON.parse(response.body)
expect(responseJSON).toHaveLength(2)
const names = responseJSON.map((ele: any) => ele.name)
expect(names).toContain('only_uid.jpg')
expect(names).toContain('subfolder')
})
test('searching a non existent prefix', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/list/bucket2',
headers: {
authorization: `Bearer ${serviceKey}`,
},
payload: {
prefix: 'notfound',
limit: 10,
offset: 0,
},
})
expect(response.statusCode).toBe(200)
const responseJSON = JSON.parse(response.body)
expect(responseJSON).toHaveLength(0)
})
test('checking if limit works', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/list/bucket2',
headers: {
authorization: `Bearer ${serviceKey}`,
},
payload: {
prefix: '',
limit: 2,
offset: 0,
},
})
expect(response.statusCode).toBe(200)
const responseJSON = JSON.parse(response.body)
expect(responseJSON).toHaveLength(2)
})
test('listobjects: checking if RLS policies are respected', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/list/bucket2',
headers: {
authorization: `Bearer ${anonKey}`,
},
payload: {
prefix: '',
limit: 10,
offset: 0,
},
})
expect(response.statusCode).toBe(200)
const responseJSON = JSON.parse(response.body)
expect(responseJSON).toHaveLength(2)
})
test('return 400 without Auth Header', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/list/bucket2',
payload: {
prefix: '',
limit: 10,
offset: 0,
},
})
expect(response.statusCode).toBe(400)
})
test('case insensitive search should work', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/list/bucket2',
payload: {
prefix: 'PUBLIC/',
limit: 10,
offset: 0,
},
headers: {
authorization: `Bearer ${serviceKey}`,
},
})
expect(response.statusCode).toBe(200)
const responseJSON = JSON.parse(response.body)
expect(responseJSON).toHaveLength(2)
})
test('test ascending search sorting', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/list/bucket2',
payload: {
prefix: 'public/',
sortBy: {
column: 'name',
order: 'asc',
},
},
headers: {
authorization: `Bearer ${serviceKey}`,
},
})
expect(response.statusCode).toBe(200)
const responseJSON = JSON.parse(response.body)
expect(responseJSON).toHaveLength(2)
expect(responseJSON[0].name).toBe('sadcat-upload23.png')
expect(responseJSON[1].name).toBe('sadcat-upload.png')
})
test('test descending search sorting', async () => {
const response = await app().inject({
method: 'POST',
url: '/object/list/bucket2',
payload: {
prefix: 'public/',
sortBy: {
column: 'name',
order: 'desc',
},
},
headers: {
authorization: `Bearer ${serviceKey}`,
},
})
expect(response.statusCode).toBe(200)
const responseJSON = JSON.parse(response.body)
expect(responseJSON).toHaveLength(2)
expect(responseJSON[0].name).toBe('sadcat-upload.png')
expect(responseJSON[1].name).toBe('sadcat-upload23.png')
})
}) | the_stack |
import { Infinite } from "../../src/Infinite";
import { cleanup } from "./utils/utils";
import * as sinon from "sinon";
describe("test Infinite", () => {
let infinite: Infinite | null;
afterEach(() => {
if (infinite) {
infinite.destroy();
}
infinite = null;
cleanup();
});
it("should check whether visible items change according to cursor", () => {
infinite = new Infinite({});
infinite.setItems([
{
key: 1,
startOutline: [0],
endOutline: [300],
},
{
key: 2,
startOutline: [300],
endOutline: [600],
},
{
key: 3,
startOutline: [600],
endOutline: [900],
},
]);
// When
infinite.setCursors(0, 0);
const items1 = infinite.getVisibleItems();
infinite.setCursors(0, 1);
const items2 = infinite.getVisibleItems();
infinite.setCursors(1, 2);
const items3 = infinite.getVisibleItems();
// Then
expect(items1.map((item) => item.key)).to.be.deep.equals([1]);
expect(items2.map((item) => item.key)).to.be.deep.equals([1, 2]);
expect(items3.map((item) => item.key)).to.be.deep.equals([2, 3]);
});
it("should check if the cursor changes when you sync items", () => {
infinite = new Infinite({});
infinite.setItems([
{ key: 1, startOutline: [], endOutline: [] },
{ key: 2, startOutline: [], endOutline: [] },
{ key: 3, startOutline: [], endOutline: [] },
]);
infinite.setCursors(0, 0);
// When
// 0 => 1
infinite.syncItems([
{ key: 2, startOutline: [], endOutline: [] },
{ key: 1, startOutline: [], endOutline: [] },
{ key: 3, startOutline: [], endOutline: [] },
]);
const cursors1 = [infinite.getStartCursor(), infinite.getEndCursor()];
// When
infinite.setCursors(0, 1);
// [0, 1] => [0, 2]
infinite.syncItems([
{ key: 1, startOutline: [], endOutline: [] },
{ key: 3, startOutline: [], endOutline: [] },
{ key: 2, startOutline: [], endOutline: [] },
]);
const cursors2 = [infinite.getStartCursor(), infinite.getEndCursor()];
// Then
expect(cursors1).to.be.deep.equals([1, 1]);
expect(cursors2).to.be.deep.equals([0, 2]);
});
it("should check if change is true when visible items change", () => {
infinite = new Infinite({});
infinite.setItems([
{ key: 1, startOutline: [], endOutline: [] },
{ key: 2, startOutline: [], endOutline: [] },
{ key: 3, startOutline: [], endOutline: [] },
{ key: 4, startOutline: [], endOutline: [] },
]);
infinite.setCursors(0, 2);
// When
// (0, 2) 1 2 3 => (0, 3) 1 2 4 3
const isChange1 = infinite.syncItems([
{ key: 1, startOutline: [], endOutline: [] },
{ key: 2, startOutline: [], endOutline: [] },
{ key: 4, startOutline: [], endOutline: [] },
{ key: 3, startOutline: [], endOutline: [] },
]);
// (0, 3) 1 2 4 3 => (0, 3) 1 4 2 3
const isChange2 = infinite.syncItems([
{ key: 1, startOutline: [], endOutline: [] },
{ key: 4, startOutline: [], endOutline: [] },
{ key: 2, startOutline: [], endOutline: [] },
{ key: 3, startOutline: [], endOutline: [] },
]);
// (0, 3)1 4 2 3 => (0, 3)1 4 2 3 5
const isChange3 = infinite.syncItems([
{ key: 1, startOutline: [], endOutline: [] },
{ key: 4, startOutline: [], endOutline: [] },
{ key: 2, startOutline: [], endOutline: [] },
{ key: 3, startOutline: [], endOutline: [] },
{ key: 5, startOutline: [], endOutline: [] },
]);
// (0, 3) 1 4 2 3 5 => (1, 4) 5 1 4 2 3
const isChange4 = infinite.syncItems([
{ key: 5, startOutline: [], endOutline: [] },
{ key: 1, startOutline: [], endOutline: [] },
{ key: 4, startOutline: [], endOutline: [] },
{ key: 2, startOutline: [], endOutline: [] },
{ key: 3, startOutline: [], endOutline: [] },
]);
// Then
expect(isChange1).to.be.true;
expect(isChange2).to.be.true;
expect(isChange3).to.be.false;
expect(isChange4).to.be.false;
});
it("should check if the next cursor is [0, 1] when scroll 250, [1, 2] when scroll 550 ", () => {
// Given
const changeSpy = sinon.spy();
infinite = new Infinite({});
infinite.on("change", changeSpy);
infinite.setItems([
{
key: 1,
startOutline: [0],
endOutline: [300],
},
{
key: 2,
startOutline: [300],
endOutline: [600],
},
{
key: 3,
startOutline: [600],
endOutline: [900],
},
]);
infinite.setCursors(0, 0);
infinite.setSize(100);
// When
infinite.scroll(250);
infinite.scroll(550);
// Then
const ev1 = changeSpy.args[0][0];
const ev2 = changeSpy.args[1][0];
expect(ev1.prevStartCursor).to.be.equals(0);
expect(ev1.prevEndCursor).to.be.equals(0);
expect(ev1.nextStartCursor).to.be.equals(0);
expect(ev1.nextEndCursor).to.be.equals(1);
expect(ev2.prevStartCursor).to.be.equals(0);
expect(ev2.prevEndCursor).to.be.equals(0);
expect(ev2.nextStartCursor).to.be.equals(1);
expect(ev2.nextEndCursor).to.be.equals(2);
});
it("should check whether newly added items are included", () => {
// Given
const changeSpy = sinon.spy();
infinite = new Infinite({});
infinite.on("change", changeSpy);
infinite.setItems([
{
key: 1,
startOutline: [],
endOutline: [],
},
{
key: 2,
startOutline: [0],
endOutline: [400],
},
{
key: 3,
startOutline: [400],
endOutline: [800],
},
{
key: 4,
startOutline: [],
endOutline: [],
},
]);
infinite.setCursors(1, 2);
infinite.setSize(300);
// When
infinite.scroll(0);
infinite.scroll(600);
// Then
const ev1 = changeSpy.args[0][0];
const ev2 = changeSpy.args[1][0];
expect(ev1.prevStartCursor).to.be.equals(1);
expect(ev1.prevEndCursor).to.be.equals(2);
expect(ev1.nextStartCursor).to.be.equals(0);
expect(ev1.nextEndCursor).to.be.equals(1);
expect(ev2.prevStartCursor).to.be.equals(1);
expect(ev2.prevEndCursor).to.be.equals(2);
expect(ev2.nextStartCursor).to.be.equals(2);
expect(ev2.nextEndCursor).to.be.equals(3);
});
it("should checks whether requestAppend event is called when the end of the item is reached", () => {
// Given
const requestAppendEvent = sinon.spy();
infinite = new Infinite({});
infinite.on("requestAppend", requestAppendEvent);
infinite.setItems([
{
key: 1,
startOutline: [0],
endOutline: [400],
},
{
key: 2,
startOutline: [400],
endOutline: [800],
},
]);
infinite.setCursors(0, 1);
infinite.setSize(400);
// When
// not triggered
infinite.scroll(200);
// triggered
infinite.scroll(400);
// Then
const ev1 = requestAppendEvent.args[0][0];
expect(requestAppendEvent.callCount).to.be.equals(1);
expect(ev1.key).to.be.equals(2);
});
it("should checks whether requestPrepend event is called when the start of the item is reached", () => {
// Given
const requestPrependEvent = sinon.spy();
infinite = new Infinite({});
infinite.on("requestPrepend", requestPrependEvent);
infinite.setItems([
{
key: 1,
startOutline: [0],
endOutline: [400],
},
{
key: 2,
startOutline: [400],
endOutline: [800],
},
]);
infinite.setCursors(0, 1);
infinite.setSize(400);
// When
// not triggered
infinite.scroll(300);
// triggered
infinite.scroll(0);
// Then
const ev1 = requestPrependEvent.args[0][0];
expect(requestPrependEvent.callCount).to.be.equals(1);
expect(ev1.key).to.be.equals(1);
});
it(`should check if requestAppend event is called if defaultDirection is end when both start and end are reached`, () => {
// Given
const requestPrependEvent = sinon.spy();
const requestAppendEvent = sinon.spy();
infinite = new Infinite({
defaultDirection: "end",
});
infinite.on("requestPrepend", requestPrependEvent);
infinite.on("requestAppend", requestAppendEvent);
infinite.setItems([
{
key: 1,
startOutline: [0],
endOutline: [200],
},
{
key: 2,
startOutline: [200],
endOutline: [400],
},
]);
infinite.setCursors(0, 1);
infinite.setSize(400);
// When
infinite.scroll(0);
// Then
const ev1 = requestAppendEvent.args[0][0];
expect(requestAppendEvent.callCount).to.be.equals(1);
expect(requestPrependEvent.callCount).to.be.equals(0);
expect(ev1.key).to.be.equals(2);
});
it(`should check if requestPrepend event is called if defaultDirection is start when both start and end are reached`, () => {
// Given
const requestPrependEvent = sinon.spy();
const requestAppendEvent = sinon.spy();
infinite = new Infinite({
defaultDirection: "start",
});
infinite.on("requestPrepend", requestPrependEvent);
infinite.on("requestAppend", requestAppendEvent);
infinite.setItems([
{
key: 1,
startOutline: [0],
endOutline: [200],
},
{
key: 2,
startOutline: [200],
endOutline: [400],
},
]);
infinite.setCursors(0, 1);
infinite.setSize(400);
// When
infinite.scroll(0);
// Then
const ev1 = requestPrependEvent.args[0][0];
expect(requestAppendEvent.callCount).to.be.equals(0);
expect(requestPrependEvent.callCount).to.be.equals(1);
expect(ev1.key).to.be.equals(1);
});
it(`should check if a virtual group is requested when it exists`, () => {
// Given
const requestAppendEvent = sinon.spy();
infinite = new Infinite({
defaultDirection: "end",
});
infinite.on("requestAppend", requestAppendEvent);
infinite.setItems([
{
key: 1,
startOutline: [0],
endOutline: [200],
},
{
key: 2,
startOutline: [200],
endOutline: [400],
isVirtual: true,
},
]);
infinite.setCursors(0, 1);
infinite.setSize(400);
// When
infinite.scroll(200);
// Then
const ev1 = requestAppendEvent.args[0][0];
expect(requestAppendEvent.callCount).to.be.equals(1);
expect(ev1.nextKey).to.be.equals(2);
expect(ev1.isVirtual).to.be.equals(true);
});
it(`should check whether the minimum and maximum cursors are maintained when useRecycle is false.`, () => {
// Given
const changeEventSpy = sinon.spy();
infinite = new Infinite({
defaultDirection: "end",
useRecycle: false,
});
infinite.on("change", changeEventSpy);
infinite.setItems([
{
key: 1,
startOutline: [0],
endOutline: [200],
},
{
key: 2,
startOutline: [200],
endOutline: [400],
},
{
key: 3,
startOutline: [400],
endOutline: [600],
},
{
key: 4,
startOutline: [600],
endOutline: [800],
},
]);
infinite.setCursors(0, 1);
infinite.setSize(400);
// When
infinite.scroll(400);
// Then
const ev = changeEventSpy.args[0][0];
expect(ev.prevStartCursor).to.be.equals(0);
expect(ev.prevEndCursor).to.be.equals(1);
expect(ev.nextStartCursor).to.be.equals(0);
expect(ev.nextEndCursor).to.be.equals(3);
});
}); | the_stack |
import joplin from 'api';
import { MenuItem, MenuItemLocation } from 'api/types';
import { ChangeEvent } from 'api/JoplinSettings';
import { NoteTabType, NoteTabs } from './noteTabs';
import { LastActiveNote } from './lastActiveNote';
import { Settings, UnpinBehavior, AddBehavior } from './settings';
import { Panel } from './panel';
joplin.plugins.register({
onStart: async function () {
const COMMANDS = joplin.commands;
const DATA = joplin.data;
const DIALOGS = joplin.views.dialogs;
const SETTINGS = joplin.settings;
const WORKSPACE = joplin.workspace;
// settings
const settings: Settings = new Settings();
await settings.register();
// note tabs
const tabs = new NoteTabs(settings);
// last active note
const lastActiveNote = new LastActiveNote();
// panel
const panel = new Panel(tabs, settings);
await panel.register();
//#region HELPERS
/**
* Add note as tab, if not already has one.
*/
async function addTab(noteId: string) {
if (tabs.hasTab(noteId)) return;
// depending on settings - either add directly as pinned tab
const addAsPinned: boolean = settings.hasAddBehavior(AddBehavior.Pinned);
if (addAsPinned) {
await pinTab(noteId, true);
} else {
// or as temporay tab
if (tabs.indexOfTemp >= 0) {
// replace existing temporary tab...
tabs.replaceTemp(noteId);
} else {
// or add as new temporary tab at the end
await tabs.add(noteId, NoteTabType.Temporary);
}
}
}
/**
* Add new or pin tab for handled note. Optionally at the specified index of targetId.
*/
async function pinTab(noteId: string, addAsNew: boolean, targetId?: string) {
const note: any = await DATA.get(['notes', noteId], { fields: ['id', 'is_todo', 'todo_completed'] });
if (note) {
// do not pin completed todos if auto unpin is enabled
if (settings.unpinCompletedTodos && note.is_todo && note.todo_completed) return;
if (tabs.hasTab(note.id)) {
// if note has already a tab, change type to pinned
await tabs.changeType(note.id, NoteTabType.Pinned);
} else {
// otherwise add as new one
if (addAsNew) {
await tabs.add(note.id, NoteTabType.Pinned, targetId);
}
}
}
}
/**
* Open last active note (tab) (if still exists).
*/
async function openLastActiveNote(): Promise<boolean> {
if (lastActiveNote.length < 2) return false;
const lastActiveNoteId = lastActiveNote.id;
// return if an already removed tab is about to be restored
if (tabs.indexOf(lastActiveNoteId) < 0) return false;
await COMMANDS.execute('openNote', lastActiveNoteId);
return true;
}
/**
* Switch to left tab.
*/
async function switchTabLeft(noteId: string): Promise<boolean> {
const index: number = tabs.indexOf(noteId);
if (index <= 0) return false;
await COMMANDS.execute('openNote', tabs.get(index - 1).id);
return true;
}
/**
* Switch to right tab.
*/
async function switchTabRight(noteId: string): Promise<boolean> {
const index: number = tabs.indexOf(noteId);
if (index < 0) return false;
if (index == tabs.length - 1) return false;
await COMMANDS.execute('openNote', tabs.get(index + 1).id);
return true;
}
/**
* Remove or unpin note with handled id.
*/
async function removeTab(noteId: string) {
const selectedNote: any = await WORKSPACE.selectedNote();
// if noteId is the selected note - try to select another note depending on the settings
if (selectedNote && selectedNote.id == noteId) {
let selected: boolean = false;
// try to select the appropriate tab
switch (settings.unpinBehavior) {
case UnpinBehavior.LastActive:
selected = await openLastActiveNote();
if (selected) break;
// fallthrough if no last active found
case UnpinBehavior.LeftTab:
selected = await switchTabLeft(noteId);
if (selected) break;
// fallthrough if no right tab found
case UnpinBehavior.RightTab:
selected = await switchTabRight(noteId);
if (selected) break;
// try to select left tab
selected = await switchTabLeft(noteId);
default:
break;
}
// then remove note from tabs
await tabs.delete(noteId);
// if no one was selected before
if (!selected) {
// re-add removed note as tab at the end
await addTab(noteId);
}
} else {
// else simply remove note from tabs
await tabs.delete(noteId);
}
}
//#endregion
//#region COMMANDS
// Command: tabsPinNote
// Desc: Pin the selected note(s) to the tabs
await COMMANDS.register({
name: 'tabsPinNote',
label: 'Pin note to Tabs',
iconName: 'fas fa-thumbtack',
enabledCondition: 'someNotesSelected',
execute: async (noteIds: string[], targetId?: string) => {
// get selected note ids and return if empty
let selectedNoteIds = noteIds;
if (!selectedNoteIds) selectedNoteIds = await WORKSPACE.selectedNoteIds();
if (!selectedNoteIds) return;
// add all handled notes as pinned tabs. Optionally at the specified index of targetId.
for (const noteId of selectedNoteIds) {
await pinTab(noteId, true, targetId);
}
await panel.updateWebview();
}
});
// Command: tabsUnpinNote
// Desc: Unpin the selected note(s) from the tabs
await COMMANDS.register({
name: 'tabsUnpinNote',
label: 'Unpin note from Tabs',
iconName: 'fas fa-times',
enabledCondition: 'someNotesSelected',
execute: async (noteIds: string[]) => {
// get selected note ids and return if empty
let selectedNoteIds = noteIds;
if (!selectedNoteIds) selectedNoteIds = await WORKSPACE.selectedNoteIds();
if (!selectedNoteIds) return;
// unpin selected notes and update panel
for (const noteId of selectedNoteIds) {
await removeTab(noteId);
}
await panel.updateWebview();
}
});
// Command: tabsMoveLeft
// Desc: Move active tab to left
await COMMANDS.register({
name: 'tabsMoveLeft',
label: 'Move active Tab left',
iconName: 'fas fa-chevron-left',
enabledCondition: 'oneNoteSelected',
execute: async () => {
const selectedNote: any = await WORKSPACE.selectedNote();
if (!selectedNote) return;
// change index of tab and update panel
const index: number = tabs.indexOf(selectedNote.id);
await tabs.moveWithIndex(index, index - 1);
await panel.updateWebview();
}
});
// Command: tabsMoveRight
// Desc: Move active tab to right
await COMMANDS.register({
name: 'tabsMoveRight',
label: 'Move active Tab right',
iconName: 'fas fa-chevron-right',
enabledCondition: 'oneNoteSelected',
execute: async () => {
const selectedNote: any = await WORKSPACE.selectedNote();
if (!selectedNote) return;
// change index of tab and update panel
const index: number = tabs.indexOf(selectedNote.id);
await tabs.moveWithIndex(index, index + 1);
await panel.updateWebview();
}
});
// Command: tabsSwitchLastActive
// Desc: Switch to last active tab
await COMMANDS.register({
name: 'tabsSwitchLastActive',
label: 'Switch to last active Tab',
iconName: 'fas fa-step-backward',
enabledCondition: 'oneNoteSelected',
execute: async () => {
await openLastActiveNote();
// updateWebview() is called from onNoteSelectionChange event
}
});
// Command: tabsSwitchLeft
// Desc: Switch to left tab, i.e. select left note
await COMMANDS.register({
name: 'tabsSwitchLeft',
label: 'Switch to left Tab',
iconName: 'fas fa-step-backward',
enabledCondition: 'oneNoteSelected',
execute: async () => {
const selectedNote: any = await WORKSPACE.selectedNote();
if (!selectedNote) return;
await switchTabLeft(selectedNote.id);
// updateWebview() is called from onNoteSelectionChange event
}
});
// Command: tabsSwitchRight
// Desc: Switch to right tab, i.e. select right note
await COMMANDS.register({
name: 'tabsSwitchRight',
label: 'Switch to right Tab',
iconName: 'fas fa-step-forward',
enabledCondition: 'oneNoteSelected',
execute: async () => {
const selectedNote: any = await WORKSPACE.selectedNote();
if (!selectedNote) return;
await switchTabRight(selectedNote.id);
// updateWebview() is called from onNoteSelectionChange event
}
});
// Command: tabsClear
// Desc: Remove all pinned tabs
await COMMANDS.register({
name: 'tabsClear',
label: 'Remove all pinned Tabs',
iconName: 'fas fa-times',
execute: async () => {
// ask user before removing tabs
const result: number = await DIALOGS.showMessageBox('Do you really want to remove all pinned tabs?');
if (result) return;
await settings.clearTabs();
// open selected note to update the panel or just update it
const selectedNoteIds: string[] = await WORKSPACE.selectedNoteIds();
if (selectedNoteIds.length > 0) {
await COMMANDS.execute('openNote', selectedNoteIds[0]);
// updateWebview() is called from onNoteSelectionChange event
} else {
await panel.updateWebview();
}
}
});
// Command: tabsToggleVisibility
// Desc: Toggle panel visibility
await COMMANDS.register({
name: 'tabsToggleVisibility',
label: 'Toggle Tabs visibility',
iconName: 'fas fa-eye-slash',
execute: async () => {
await panel.toggleVisibility();
}
});
// prepare commands menu
const commandsSubMenu: MenuItem[] = [
{
commandName: 'tabsPinNote',
label: 'Pin note to Tabs'
},
{
commandName: 'tabsUnpinNote',
label: 'Unpin note from Tabs'
},
{
commandName: 'tabsSwitchLastActive',
label: 'Switch to last active Tab'
},
{
commandName: 'tabsSwitchLeft',
label: 'Switch to left Tab'
},
{
commandName: 'tabsSwitchRight',
label: 'Switch to right Tab'
},
{
commandName: 'tabsMoveLeft',
label: 'Move active Tab left'
},
{
commandName: 'tabsMoveRight',
label: 'Move active Tab right'
},
{
commandName: 'tabsClear',
label: 'Remove all pinned Tabs'
},
{
commandName: 'tabsToggleVisibility',
label: 'Toggle panel visibility'
}
];
await joplin.views.menus.create('toolsTabs', 'Tabs', commandsSubMenu, MenuItemLocation.Tools);
// add commands to notes context menu
await joplin.views.menuItems.create('notesContextMenuPinToTabs', 'tabsPinNote', MenuItemLocation.NoteListContextMenu);
// add commands to editor context menu
await joplin.views.menuItems.create('editorContextMenuPinNote', 'tabsPinNote', MenuItemLocation.EditorContextMenu);
//#endregion
//#region EVENTS
// let onChangeCnt = 0;
SETTINGS.onChange(async (event: ChangeEvent) => {
// console.debug(`onChange() hits: ${onChangeCnt++}`);
await settings.read(event);
await panel.updateWebview();
});
WORKSPACE.onNoteSelectionChange(async () => {
try {
const selectedNote: any = await WORKSPACE.selectedNote();
if (selectedNote) {
// add tab for selected note
await addTab(selectedNote.id);
// add selected note id to last active queue
lastActiveNote.id = selectedNote.id;
}
await panel.updateWebview();
} catch (error) {
console.error(`onNoteSelectionChange: ${error}`);
}
});
// ItemChangeEventType { Create = 1, Update = 2, Delete = 3 }
WORKSPACE.onNoteChange(async (ev: any) => {
try {
if (ev) {
// note was changed (ItemChangeEventType.Update)
if (ev.event == 2) {
// get changed note and return if null
const note: any = await DATA.get(['notes', ev.id], { fields: ['id', 'is_todo', 'todo_completed'] });
if (note == null) return;
// if auto pin is enabled, pin changed note to tabs
if (settings.pinEditedNotes) {
await pinTab(note.id, false);
}
// if auto unpin is enabled and changed note is a completed todo...
if (settings.unpinCompletedTodos && note.is_todo && note.todo_completed) {
const index: number = tabs.indexOf(note.id);
// and the note is currently pinned...
if (tabs.indexOfTemp != index) {
// then remove its tab
await removeTab(note.id);
}
}
}
// note was deleted (ItemChangeEventType.Delete) - remove tab
if (ev.event == 3) {
await tabs.delete(ev.id);
}
}
await panel.updateWebview();
} catch (error) {
console.error(`onNoteChange: ${error}`);
}
});
WORKSPACE.onSyncComplete(async () => {
await panel.updateWebview();
});
//#endregion
await panel.updateWebview();
}
}); | the_stack |
import {
RuntimeError,
NetworkError
} from "../Errors";
import * as async from "async";
import * as fs from "fs";
import { EventEmitter } from "events";
import { DownloadItem } from "../types/download";
import { RequesterCdn } from "../types/Requester";
import * as util from "util";
import * as stream from "stream";
import { Response } from "got/dist/source";
const pipeline = util.promisify(stream.pipeline);
export interface DownloadUpdateOptions {
filesInProgress: number;
finishedFiles: number;
downloadedSize: number;
estimatedSize: number;
speed: number;
}
export interface ListDownloader {
on(name: "update", listener: (options: DownloadUpdateOptions) => void): this;
}
export class ListDownloader extends EventEmitter {
_list: (DownloadItem & { downloadedSize?: number; totalSize?: number })[];
_attempts: number;
_connections: number;
_abort: boolean;
_requester: RequesterCdn;
_filesInProgress: number;
_finishedFiles: number;
_downloadedSize: number;
_estimatedSize: number;
_speed: number;
_lastSpeedCheck: number;
_downloadedSinceCheck: number;
_lastUpdateEmit: number;
constructor(list: DownloadItem[], attempts: number, connections: number, requester: RequesterCdn) {
super();
this._list = list;
this._attempts = attempts;
this._connections = connections;
this._requester = requester;
this._abort = false;
this._filesInProgress = 0;
this._finishedFiles = 0;
this._downloadedSize = 0;
this._estimatedSize = 1;
this._speed = 0;
this._lastSpeedCheck = Date.now();
this._lastUpdateEmit = Date.now();
this._downloadedSinceCheck = 0;
}
_recalculateDownloaded(): void {
this._downloadedSize = 0;
for (const item of this._list) {
if (item.downloadedSize) {
this._downloadedSize += item.downloadedSize;
}
}
}
_estimateSize(): void {
this._estimatedSize = 0;
let numberKnownSize = 0;
for (const item of this._list) {
if (item.totalSize) {
this._estimatedSize += item.totalSize;
// console.log(item.totalSize);
numberKnownSize++;
}
}
if (numberKnownSize > 0) {
const averageSize = this._estimatedSize / numberKnownSize;
this._estimatedSize += averageSize * (this._list.length - numberKnownSize);
} else {
this._estimatedSize = 1;
}
}
_emitUpdate(): void {
if (this._abort) return;
this.emit("update", {
filesInProgress: this._filesInProgress,
finishedFiles: this._finishedFiles,
downloadedSize: this._downloadedSize,
estimatedSize: this._estimatedSize,
speed: this._speed,
});
}
_emitUpdateLimit(): void {
if (this._abort) return;
const timeDiff = Date.now() - this._lastUpdateEmit;
if (timeDiff < 100) return; // update every 0.1 seconds
this._lastUpdateEmit -= 100;
this.emit("update", {
filesInProgress: this._filesInProgress,
finishedFiles: this._finishedFiles,
downloadedSize: this._downloadedSize,
estimatedSize: this._estimatedSize,
speed: this._speed,
});
}
_updateSpeed(amount: number): void {
this._downloadedSinceCheck += amount;
const timeDiff = Date.now() - this._lastSpeedCheck;
if (timeDiff >= 1000) {
this._speed = this._downloadedSinceCheck / (timeDiff / 1000);
this._downloadedSinceCheck = 0;
this._lastSpeedCheck = Date.now();
}
}
async _downloadFile(index: number): Promise<void> {
this._filesInProgress++;
const file = this._list[index];
for (let attempt = 0; attempt < this._attempts; attempt++) {
try {
file.downloadedSize = 0;
let status = -1;
let contentEncoding: string | undefined = "unset";
const stream = this._requester.stream(file.url);
stream.on("response", (response: Response) => {
status = response.statusCode;
file.totalSize = parseInt(response.headers["content-length"] ?? "-2");
contentEncoding = response.headers["content-encoding"];
this._estimateSize();
this._emitUpdate();
});
stream.on("data", (chunk) => {
file.downloadedSize += chunk.length;
this._downloadedSize += chunk.length;
this._updateSpeed(chunk.length);
this._emitUpdateLimit();
});
await pipeline(
stream,
fs.createWriteStream(file.destination)
);
if (status !== 200) {
throw new NetworkError("Status code: " + status);
}
// check if file size it equal to transmitted size
const s = (await fs.promises.stat(file.destination)).size;
if (s !== file.downloadedSize) {
console.log(s + " !== " + file.downloadedSize);
throw new NetworkError("Transmission incomplete. (Downloaded Size).");
}
// as long as the data is uncompressed content_length must be equal the file size
if ((contentEncoding == "identity" || contentEncoding == undefined) && s !== file.totalSize) {
console.log(s + " !== " + file.totalSize);
throw new NetworkError("Transmission incomplete. (content_length).");
}
return;
} catch (e) {
console.log("Error on " + file.url + ".");
file.downloadedSize = 0;
this._recalculateDownloaded();
this._emitUpdate();
if (attempt >= this._attempts - 1) {
throw e;
}
console.log("Error: " + e.message + ". Retrying...");
}
/*try {
await new Promise((resolve, reject) => {
file.downloadedSize = 0;
request(file.url, {
forever: true,
timeout: 20000,
proxy: this.options.httpProxyCdn
}).on("error", (e) => {
reject(new NetworkError(e.message));
}).on("response", (response) => {
if (response.statusCode != 200) {
reject(new NetworkError("HTTP status code: " + (response.statusCode)));
return;
}
file.totalSize = Number.parseInt(response.headers['content-length'] ?? "1");
this.estimateSize();
this.emitUpdate();
response.on("data", (chunk) => {
file.downloadedSize += chunk.length;
this.downloadedSize += chunk.length;
this.updateSpeed(chunk.length)
this.emitUpdate();
});
response.pipe(fs.createWriteStream(file.destination)).on("finish", (resolve));
})
});
this.filesInProgress--;
return;
} catch (e) {
file.downloadedSize = 0;
this.recalculateDownloaded();
this.emitUpdate();
if (!(e instanceof NetworkError) || attempt >= this.options.maxAttempts - 1 || this.abort) {
this.filesInProgress--;
throw e;
}
console.log("Network error: " + e.message + ". Retrying...");
}*/
}
throw new RuntimeError("Too many attempts. (This code should't be reachable)");
}
startDownload(): Promise<void> {
this._lastSpeedCheck = Date.now();
this._lastUpdateEmit = Date.now();
this._downloadedSinceCheck = 0;
return new Promise((resolve, reject) => {
async.forEachOfLimit(this._list, 5, (value, key: number | string, callback) => {
this._downloadFile(key as number).then(() => { callback(); }, callback);
}, err => {
if (err) {
this._abort = true;
this.emit("error", err);
reject(err);
return;
}
this.emit("finish");
resolve();
});
});
}
static async safeDownload(url: string, destination: string, maxAttempts: number, requester: RequesterCdn): Promise<void> {
for (let attempt = 0; attempt < maxAttempts; attempt++) {
try {
let status = -1;
let contentLength = -1;
let contentEncoding: string | undefined = "unset";
let downloadedSize = 0;
const stream = requester.stream(url);
stream.on("response", (response: Response) => {
status = response.statusCode;
contentLength = parseInt(response.headers["content-length"] ?? "-2");
contentEncoding = response.headers["content-encoding"];
});
stream.on("data", (chunk) => {
downloadedSize += chunk.length;
});
await pipeline(
stream,
fs.createWriteStream(destination)
);
if (status !== 200) {
throw new NetworkError("Status code: " + status);
}
// check if file size it equal to transmitted size
const s = (await fs.promises.stat(destination)).size;
if (s !== downloadedSize) {
console.log(s + " !== " + downloadedSize);
throw new NetworkError("Transmission incomplete. (Downloaded Size).");
}
// as long as the data is uncompressed content_length must be equal the file size
if ((contentEncoding == "identity" || contentEncoding == undefined) && s !== contentLength) {
console.log(s + " !== " + contentLength);
throw new NetworkError("Transmission incomplete. (content_length).");
}
return;
} catch (e) {
console.log("Error on " + url + ".");
if (attempt >= maxAttempts - 1) {
throw e;
}
console.log("Network error: " + e.message + ". Retrying...");
}
}
throw new RuntimeError("Too many attempts. (This code should't be reachable)");
}
} | the_stack |
// A '.tsx' file enables JSX support in the TypeScript compiler,
// for more information see the following page on the TypeScript wiki:
// https://github.com/Microsoft/TypeScript/wiki/JSX
import * as React from 'react';
import { SearchTopicSingle } from './SearchTopicSingle';
import { SearchState } from '../../States/SearchState';
import * as Utility from '../../Utility';
import { FocusTopic } from '../../Props/FocusTopic';
import DocumentTitle from '../DocumentTitle';
import {
BrowserRouter as Router,
Route,
Link,
withRouter
} from 'react-router-dom';
/**
* 表示搜索结果的帖子列表
*/
export class Search extends React.Component<{}, SearchState> {
constructor(props) {
super(props);
this.state = {
boardId: 0,
boardName: '全站',
words: [],
data: [],
from: 0,
loading: true,
buttonClassName: ''
}
this.getMore = this.getMore.bind(this);
this.handleScroll = this.handleScroll.bind(this);
}
async getData(searchInfo: any, from: number) {
let newTopic = await Utility.getSearchTopic(searchInfo.boardId, searchInfo.words, from, this.context.router);
//搜索结果为0
if (!newTopic || newTopic.length === 0) {
this.showNoResult();
this.setState({ loading: false });
}
else if (newTopic == -1) {
if (from === 0) {
this.showError();
this.setState({ loading: false });
}
else {
this.setState({ loading: true });
$('#focus-topic-getMore').css('display', 'flex');
$('#focus-topic-loading').addClass('displaynone');
return;
}
}
else {
//搜索结果小于20条,无法再获取新的了,添加新数据,this.state.loading设置为false,后续不可以再次发送fetch请求
if (newTopic.length < 20) {
$('#focus-topic-getMore').css('display', 'none');
$('#focus-topic-loading').addClass('displaynone');
$('#focus-topic-loaddone').removeClass('displaynone');
let data = this.state.data.concat(newTopic);
this.setState({ boardName: searchInfo.boardName, data: data, from: data.length, loading: false });
}
//搜索结果多于20条,还可以通过滚动条继续获取,this.state.loading设置为true,后续可以再次发送fetch请求
else {
let data = this.state.data.concat(newTopic);
this.setState({ boardName: searchInfo.boardName, data: data, from: data.length, loading: true });
$('#focus-topic-getMore').css('display', 'flex');
$('#focus-topic-loading').addClass('displaynone');
}
}
}
async getNewData(searchInfo: any, from: number) {
let newTopic = await Utility.getSearchTopic(searchInfo.boardId, searchInfo.words, from, this.context.router);
//搜索结果为0
if (!newTopic || newTopic.length === 0) {
this.showNoResult();
this.setState({ loading: false });
}
else if (newTopic == -1) {
if (from === 0) {
this.showError();
this.setState({ loading: false });
}
else {
this.setState({ loading: true });
$('#focus-topic-getMore').css('display', 'flex');
$('#focus-topic-loading').addClass('displaynone');
return;
}
}
else {
//搜索结果小于20条,无法再获取新的了,添加新数据,this.state.loading设置为false,后续不可以再次发送fetch请求
if (newTopic.length < 20) {
$('#focus-topic-getMore').css('display', 'none');
$('#focus-topic-loading').addClass('displaynone');
$('#focus-topic-loaddone').removeClass('displaynone');
this.setState({ boardName: searchInfo.boardName, data: newTopic, from: newTopic.length, loading: false });
}
//搜索结果多于20条,还可以通过滚动条继续获取,this.state.loading设置为true,后续可以再次发送fetch请求
else {
this.setState({ boardName: searchInfo.boardName, data: newTopic, from: newTopic.length, loading: true });
$('#focus-topic-getMore').css('display', 'flex');
$('#focus-topic-loading').addClass('displaynone');
}
}
}
async keyWordSearch() {
let keyword = location.href.match(/\/search\?boardId=(\d+)&keyword=(.*)/);
//console.log("匹配结果", keyword);
let searchInfo = { boardId: 0, boardName: '全站', words: null };
if (!keyword) {
//没有搜索条件
console.log("没有搜索关键词?")
this.showNoResult();
this.setState({ loading: false });
}
else {
searchInfo.boardId = parseInt(keyword[1]);
searchInfo.boardName = await Utility.getBoardName(parseInt(keyword[1]));
let keyword2 = decodeURI(decodeURI(keyword[2]));
console.log("有搜索关键词", keyword2);
let words = keyword2.split(' ');
//只取前5个关键词
if (words.length > 5) {
words = words.splice(5);
}
searchInfo.words = words;
}
Utility.setStorage("searchInfo", searchInfo);
this.setState({ boardId: searchInfo.boardId, boardName: searchInfo.boardName, words: searchInfo.words});
//显示“正在加载”的效果
$('#focus-topic-getMore').css('display', 'none');
$('#focus-topic-loading').removeClass('displaynone');
this.getNewData(searchInfo, 0);
//滚动条监听
document.addEventListener('scroll', this.handleScroll);
}
async componentDidMount() {
this.keyWordSearch();
}
async componentWillReceiveProps(nextProps) {
console.log("进入componentWillReceiveProps");
$('#focus-topic-area').removeClass('displaynone');
$('#noResult').addClass('displaynone');
$('#showError').addClass('displaynone');
this.keyWordSearch();
}
async getMore() {
if (this.state.loading) {
/**
*发出第一条fetch请求前将this.state.loading设置为false,防止后面重复发送fetch请求
*/
$('#focus-topic-getMore').css('display','none');
$('#focus-topic-loading').removeClass('displaynone');
this.setState({ loading: false });
let searchInfo = Utility.getStorage("searchInfo");
this.getData(searchInfo, this.state.from);
}
}
/*async componentDidUpdate() {
let searchInfo = Utility.getStorage("searchInfo");
if (searchInfo && JSON.stringify(searchInfo.words) != JSON.stringify(this.state.words)) {
let keyword = searchInfo.words.join(' ');
window.location.href = `/search?boardId=${searchInfo.boardId}&keword=${keyword}`;
}
}*/
showNoResult() {
$('#focus-topic-area').addClass('displaynone');
$('#noResult').removeClass('displaynone');
}
showError() {
$('#focus-topic-area').addClass('displaynone');
$('#showError').removeClass('displaynone');
}
//监听滚动时间控制回到顶部按钮样式
handleScroll(e) {
if (window.pageYOffset > 234) {
this.setState({
buttonClassName: 'btn-show'
});
}
if (window.pageYOffset < 234) {
this.setState(prevState => ({
buttonClassName: prevState.buttonClassName === '' ? '' : 'btn-disappare'
})
);
}
}
//回到顶部
scrollToTop() {
$('body,html').animate({ scrollTop: 0 }, 500);
}
render() {
return (<div className="focus-root">
<DocumentTitle title={`搜索结果 - CC98论坛`} />
<div className="focus" >
<Category />
<div className="focus-topic-area" id="focus-topic-area">
<div className="focus-topic-topicArea">{this.state.data.map(coverFocusPost)}</div>
<div className="focus-topic-getMore" onClick={this.getMore} id="focus-topic-getMore">
<div>点击获取更多搜索结果~</div>
<div>······</div>
</div>
<div className="focus-topic-loading displaynone" id="focus-topic-loading"><img src="http://file.cc98.org/uploadfile/2017/12/20/6514723843.gif"></img></div>
<div className="focus-topic-loaddone displaynone" id="focus-topic-loaddone"> 没有更多帖子啦~</div>
<button type="button" id="scrollToTop" className={this.state.buttonClassName} onClick={this.scrollToTop}>回到顶部</button>
</div>
<div id="noResult" className="noResult displaynone">
<img src="/static/images/searchNone.png" className="noResultPic"></img>
<div className="noResultText">-----------------------抱歉呢前辈,没有找到你想要的帖子哦~----------------------</div>
</div>
<div id="showError" className="resultErr displaynone">查询出错了,请刷新重试</div>
</div>
</div>)
}
}
function coverFocusPost(item: FocusTopic) {
return <SearchTopicSingle title={item.title} hitCount={item.hitCount} id={item.id} boardId={item.boardId} boardName={item.boardName} replyCount={item.replyCount} userId={item.userId} userName={item.userName} portraitUrl={item.portraitUrl} time={item.time} likeCount={item.likeCount} dislikeCount={item.dislikeCount} lastPostUser={item.lastPostUser} lastPostTime={item.lastPostTime} tag1={item.tag1} tag2={item.tag2} floorCount={item.floorCount} />;
}
export class Category extends React.Component {
render() {
return <div className="row" style={{ alignItems: "baseline", justifyContent: "flex-start", color: "grey", fontSize: "0.75rem", marginBottom: "1rem" }}>
<Link style={{ color: "grey", fontSize: "1rem", marginRight: "0.5rem" }} to={"/"}>首页</Link>
<i className="fa fa-chevron-right"></i>
<div style={{ color: "grey", fontSize: "1rem", marginLeft: "0.5rem", marginRight: "0.5rem" }}>搜索主题</div>
</div>;
}
} | the_stack |
import { createSetupForContentReview } from "../utils/helpers";
import { useContentGqlHandler } from "../utils/useContentGqlHandler";
import { mocks as changeRequestMock, richTextMock } from "./mocks/changeRequest";
describe(`Delete "content review" and associated "change requests" and "comments"`, () => {
const options = {
path: "manage/en-US"
};
const gqlHandler = useContentGqlHandler({
...options
});
const {
createContentReviewMutation,
getContentReviewQuery,
createChangeRequestMutation,
createCommentMutation,
listChangeRequestsQuery,
listCommentsQuery,
deleteContentReviewMutation,
getPageQuery
} = gqlHandler;
/**
* Let's do the setup.
*/
const setup = async () => {
const { page } = await createSetupForContentReview(gqlHandler);
return {
page
};
};
const expectedContent = {
id: expect.any(String),
type: expect.any(String),
version: expect.any(Number),
settings: null,
publishedBy: null,
publishedOn: null,
scheduledBy: null,
scheduledOn: null
};
test(`Should able to "delete" entire content review process`, async () => {
const { page } = await setup();
/**
* Initial a review.
*/
const [createContentReviewResponse] = await createContentReviewMutation({
data: {
content: {
id: page.id,
type: "page"
}
}
});
const createdContentReview = createContentReviewResponse.data.apw.createContentReview.data;
/**
* Fetch the content review and check contentReview status.
*/
let [getContentReviewResponse] = await getContentReviewQuery({
id: createdContentReview.id
});
const contentReview = getContentReviewResponse.data.apw.getContentReview.data;
expect(contentReview.status).toEqual("underReview");
/**
* Let's create a "change request" for every step of the publishing workflow.
*/
const changeRequests = [];
for (let i = 0; i < contentReview.steps.length; i++) {
const currentStep = contentReview.steps[i];
const changeRequestStep = `${contentReview.id}#${currentStep.id}`;
const [createChangeRequestResponse] = await createChangeRequestMutation({
data: changeRequestMock.createChangeRequestInput({ step: changeRequestStep })
});
const changeRequest = createChangeRequestResponse.data.apw.createChangeRequest.data;
/**
* Save it for later.
*/
changeRequests.push(changeRequest);
/**
* Let's add two comments for each of these "change requests".
*/
for (let j = 0; j < 2; j++) {
const [createCommentResponse] = await createCommentMutation({
data: {
body: richTextMock,
changeRequest: changeRequest.id,
media: {
src: "cloudfront.net/my-file"
}
}
});
expect(createCommentResponse).toEqual({
data: {
apw: {
createComment: {
error: null,
data: expect.any(Object)
}
}
}
});
}
}
/**
* Fetch the content review and check if the updates were successful.
*/
[getContentReviewResponse] = await getContentReviewQuery({
id: createdContentReview.id
});
expect(getContentReviewResponse.data.apw.getContentReview.data).toEqual({
content: expect.objectContaining(expectedContent),
createdBy: {
displayName: expect.any(String),
id: expect.any(String),
type: expect.any(String)
},
createdOn: expect.stringMatching(/^20/),
id: expect.any(String),
savedOn: expect.stringMatching(/^20/),
status: "underReview",
steps: [
{
id: expect.any(String),
pendingChangeRequests: 1,
signOffProvidedBy: null,
signOffProvidedOn: null,
status: "active"
},
{
id: expect.any(String),
pendingChangeRequests: 1,
signOffProvidedBy: null,
signOffProvidedOn: null,
status: "inactive"
},
{
id: expect.any(String),
pendingChangeRequests: 1,
signOffProvidedBy: null,
signOffProvidedOn: null,
status: "inactive"
}
],
title: expect.any(String)
});
const [changeRequest1, changeRequest2, changeRequest3] = changeRequests;
/**
* Let's list comments for each change request.
*/
let [listCommentsResponse] = await listCommentsQuery({
where: { changeRequest: { id: changeRequest1.id } }
});
expect(listCommentsResponse).toEqual({
data: {
apw: {
listComments: {
data: expect.any(Object),
error: null,
meta: {
hasMoreItems: false,
totalCount: 2,
cursor: null
}
}
}
}
});
[listCommentsResponse] = await listCommentsQuery({
where: { changeRequest: { id: changeRequest2.id } }
});
expect(listCommentsResponse).toEqual({
data: {
apw: {
listComments: {
data: expect.any(Object),
error: null,
meta: {
hasMoreItems: false,
totalCount: 2,
cursor: null
}
}
}
}
});
[listCommentsResponse] = await listCommentsQuery({
where: { changeRequest: { id: changeRequest3.id } }
});
expect(listCommentsResponse).toEqual({
data: {
apw: {
listComments: {
data: expect.any(Object),
error: null,
meta: {
hasMoreItems: false,
totalCount: 2,
cursor: null
}
}
}
}
});
/**
* Let's delete the content review itself.
*/
const [deleteContentReviewResponse] = await deleteContentReviewMutation({
id: contentReview.id
});
expect(deleteContentReviewResponse).toEqual({
data: {
apw: {
deleteContentReview: {
data: true,
error: null
}
}
}
});
/**
* Should return "NOT_FOUND" error when trying to fetch "content review" after deletion.
*/
[getContentReviewResponse] = await getContentReviewQuery({
id: createdContentReview.id
});
expect(getContentReviewResponse).toEqual({
data: {
apw: {
getContentReview: {
data: null,
error: {
message: expect.any(String),
code: "NOT_FOUND",
data: expect.any(Object)
}
}
}
}
});
/**
* Should also delete all linked "changeRequests" after "content review" deletion.
*/
const [listChangeRequestsResponse] = await listChangeRequestsQuery({});
expect(listChangeRequestsResponse).toEqual({
data: {
apw: {
listChangeRequests: {
data: [],
error: null,
meta: {
hasMoreItems: false,
totalCount: 0,
cursor: null
}
}
}
}
});
/**
* Should also delete all linked "comments" after "content review" deletion.
*/
[listCommentsResponse] = await listCommentsQuery({});
expect(listCommentsResponse).toEqual({
data: {
apw: {
listComments: {
data: [],
error: null,
meta: {
hasMoreItems: false,
totalCount: 0,
cursor: null
}
}
}
}
});
/**
* Should unlink attached "contentReview" from page settings after contentReview deletion.
*/
const [getPageResponse] = await getPageQuery({ id: page.id });
const pageData = getPageResponse.data.pageBuilder.getPage.data;
expect(pageData.settings.apw.contentReviewId).toEqual(null);
});
}); | the_stack |
import { Component, Watch } from 'vue-property-decorator';
import _ from 'lodash';
import $ from 'jquery';
import { DEFAULT_ANIMATION_DURATION_S, ENLARGE_ZINDEX, NODE_CONTENT_PADDING_PX } from '@/common/constants';
import { showSystemMessage, elementContains, mouseOffset } from '@/common/util';
import { SubsetNode } from '@/components/subset-node';
import { SubsetOutputPort, SubsetInputPort } from '@/components/port';
import { SubsetSelection } from '@/data/package';
import { TRANSITION_ELEMENT_LIMIT } from './types';
import { TweenLite } from 'gsap';
import ns from '@/store/namespaces';
import WindowResize from '@/directives/window-resize';
import * as history from './history';
import { HistoryNodeEvent } from '@/store/history/types';
const FAILED_DRAG_TIME_THRESHOLD = 500;
const FAILED_DRAG_DISTANCE_THRESHOLD = 100;
const INITIAL_FAILED_DRAGS_BEFORE_HINT = 3;
const FAILED_DRAGS_BEFORE_HINT_INCREMENT = 2;
interface VisualizationSave {
selection: number[];
isTransitionDisabled: boolean;
}
@Component({
directives: {
WindowResize,
},
})
export default class Visualization extends SubsetNode {
public isVisualization = true;
protected NODE_TYPE = 'visualization';
protected DEFAULT_WIDTH = 300;
protected DEFAULT_HEIGHT = 300;
protected RESIZABLE = true;
protected ENLARGEABLE = true;
protected isInVisMode = true;
protected selection: SubsetSelection = new SubsetSelection();
// Tracks the selected items before a selection.
protected prevSelection: SubsetSelection = new SubsetSelection();
// Allows the user to disable transition.
// This is useful to display advancing time series where the primary key is not the table row index,
// and using row index as rendering elements' keys may result in incorrect transitions.
protected isTransitionDisabled = false;
// Specifies an element that responds to dragging when alt-ed.
protected ALT_DRAG_ELEMENT = '.content';
// Specifies an element that responds to mouse brush selection.
protected BRUSH_ELEMENT = '.content > svg';
protected get svgWidth(): number {
return this.width - NODE_CONTENT_PADDING_PX * 2;
}
protected get svgHeight(): number {
return this.height - NODE_CONTENT_PADDING_PX * 2;
}
@ns.interaction.Getter('isAltPressed') protected isAltPressed!: boolean;
// Tracks failed mouse drag so as to hint user about dragging a visualization with alt.
private failedDragCount = 0;
private failedDragsBeforeHint = INITIAL_FAILED_DRAGS_BEFORE_HINT;
// Tracks node size during enlargement.
private beforeEnlargeWidth = 0;
private beforeEnlargeHeight = 0;
private brushTime = 0;
private brushDistance = 0;
/**
* Adds onBrushStart, onBrushMove, onBrushStop to the plot area in inheritting class to keep track of brush points.
*/
private isBrushing = false;
private brushPoints: Point[] = [];
/**
* Allows the system to temporarily disable transition to correctly calculate sizes of screen elements.
*/
private isTransitionAllowed = true;
@ns.modals.State('nodeModalVisible') private nodeModalVisible!: boolean;
@ns.modals.Mutation('openNodeModal') private openNodeModal!: () => void;
@ns.modals.Mutation('closeNodeModal') private closeNodeModal!: () => void;
@ns.modals.Mutation('mountNodeModal') private mountNodeModal!: (modal: Element) => void;
@ns.interaction.State('osCtrlKey') private osCtrlKey!: string;
@ns.interaction.State('osCtrlKeyChar') private osCtrlKeyChar!: string;
public undo(evt: HistoryNodeEvent) {
if (!history.undo(evt)) {
this.undoBase(evt);
}
}
public redo(evt: HistoryNodeEvent) {
if (!history.redo(evt)) {
this.redoBase(evt);
}
}
public onKeys(keys: string): boolean {
return this.onKeysVisualization(keys);
}
public setSelection(selectedItems: number[]) {
this.selection.setItems(selectedItems);
this.onSelectionUpdate();
}
public getSelectionPort(): SubsetOutputPort {
return this.outputPortMap.selection;
}
/**
* Sets the columns to be visualized. This is the default call when the column setting is attempted from a
* service like FlowSense that is outside the option panel.
* Each visualization class should implement this in order to support FlowSense column setting.
* @abstract
*/
public applyColumns(columns: number[]) {
console.error(`applyColumns() is not implemented for node type ${this.NODE_TYPE}`);
}
/**
* The default behavior of dataset change handler is to reset the columns to be visualized.
*/
protected onDatasetChange() {
this.findDefaultColumns();
}
/**
* Searches for default columns to use.
*/
protected findDefaultColumns() {
console.error(`findDefaultColumns() is not implemented for node type ${this.NODE_TYPE}`);
}
protected update() {
if (!this.checkDataset()) {
return;
}
this.draw();
this.output();
}
/**
* Base draw method for visualization. Note that "render()" is a reserved Vue life cycle method.
* @abstract
*/
protected draw() {
console.error(`draw() is not implemented for node type ${this.NODE_TYPE}`);
}
/**
* Computes the outputs.
*/
protected output() {
this.computeForwarding();
this.computeSelection();
}
/**
* Responds to brush movement.
*/
protected brushed(brushPoints: Point[], isBrushStop?: boolean) {}
/**
* Responds to selection update.
*/
protected onSelectionUpdate() {
this.draw();
this.computeSelection();
this.propagateSelection();
}
/**
* Updates the output ports when there is no input dataset.
*/
protected updateNoDatasetOutput() {
this.outputPortMap.out.clear();
this.outputPortMap.selection.clear();
}
/**
* Computes the package for the selection port. This default implementation assumes the visualization
* node has a single input port 'in' and a single selection port 'selection'.
*/
protected computeSelection() {
const pkg = this.inputPortMap.in.getSubsetPackage();
const selectionPkg = pkg.subset(this.selection.getItems());
this.outputPortMap.selection.updatePackage(selectionPkg);
}
/**
* Computes the package for the data forwarding port. This default implementation assumes the visualization
* node ha s asingle input port 'in'.
*/
protected computeForwarding() {
this.forwardSubset(this.inputPortMap.in, this.outputPortMap.out);
}
/**
* Propagates the selection changes by calling dataflow mutation.
*/
protected propagateSelection() {
this.portUpdated(this.outputPortMap.selection);
}
protected createOutputPorts() {
this.outputPorts = [
new SubsetOutputPort({
data: {
id: 'selection',
node: this,
isSelection: true,
},
store: this.$store,
}),
new SubsetOutputPort({
data: {
id: 'out',
node: this,
},
store: this.$store,
}),
];
}
protected created() {
this.containerClasses.push('visualization');
this.serializationChain.push((): VisualizationSave => {
return {
selection: this.selection.serialize(),
isTransitionDisabled: this.isTransitionDisabled,
};
});
this.deserializationChain.push(nodeSave => {
const save = nodeSave as VisualizationSave;
this.selection = new SubsetSelection(save.selection);
});
}
protected isTransitionFeasible(numItems: number) {
if (numItems >= TRANSITION_ELEMENT_LIMIT) {
// Disable transition automatically when the number of elements is too large.
this.isTransitionDisabled = true;
}
return this.isTransitionAllowed && !this.isTransitionDisabled;
}
protected onResizeStart() {
this.isTransitionAllowed = false;
}
protected onResize() {
if (!this.hasNoDataset() && !this.isAnimating && this.isExpanded) {
console.log('render', this.NODE_TYPE);
this.draw();
}
}
protected onResizeStop() {
this.isTransitionAllowed = true;
}
protected onEnlarge() {
this.draw();
}
protected isSelectionEmpty(): boolean {
return !this.selection.numItems();
}
/**
* Allows dragging a visualization only when alt is pressed, a.k.a. drag mode is on.
*/
protected isDraggable(evt: MouseEvent, ui?: JQueryUI.DraggableEventUIParams) {
return this.isDraggableBase(evt);
}
protected isDraggableBase(evt: MouseEvent) {
const $element = $(this.$el).find(this.ALT_DRAG_ELEMENT);
if (!$element.length) {
// Element is draggable when the drag element is not visible. It may be when the node has no data.
return true;
}
if (!elementContains($element[0], evt.pageX, evt.pageY)) {
// The click falls out of the alt drag element. Perform normal drag.
// This allows dragging outside the plot area.
return true;
}
return this.isAltPressed;
}
protected enlarge() {
this.openNodeModal(); // Notify store that modal has been opened.
$(this.$refs.node).css('z-index', ENLARGE_ZINDEX);
this.isEnlarged = true;
this.beforeEnlargeWidth = this.width;
this.beforeEnlargeHeight = this.height;
const view = this.getEnlargedView();
this.onResizeStart();
TweenLite.to(this.$refs.node, DEFAULT_ANIMATION_DURATION_S, {
left: view.x,
top: view.y,
width: view.width,
height: view.height,
onUpdate: () => {
this.width = $(this.$refs.node).width() as number;
this.height = $(this.$refs.node).height() as number;
},
onComplete: () => {
this.width = view.width;
this.height = view.height;
this.onEnlarge();
this.disableDraggable();
this.disableResizable();
this.onResizeStop();
},
});
// Must come after setting isEnlarged
this.$nextTick(() => this.mountNodeModal(this.$refs.enlargeModal as Element));
}
protected closeEnlargeModal() {
this.closeNodeModal(); // Notify store that modal has been closed.
this.onResizeStart();
TweenLite.to(this.$refs.node, DEFAULT_ANIMATION_DURATION_S, {
left: this.x,
top: this.y,
width: this.beforeEnlargeWidth,
height: this.beforeEnlargeHeight,
onUpdate: () => {
this.width = $(this.$refs.node).width() as number;
this.height = $(this.$refs.node).height() as number;
},
onComplete: () => {
$(this.$refs.node).css('z-index', this.layer);
this.isEnlarged = false;
this.width = this.beforeEnlargeWidth;
this.height = this.beforeEnlargeHeight;
this.enableDraggable();
this.enableResizable();
this.onResizeStop();
this.onEnlargeClose();
},
});
}
/**
* Responds to enlarge close. Typically nothing needs to be done. But nodes like network (which applies force based
* on view size) may want to update specially.
*/
protected onEnlargeClose() {}
/**
* Most visualizations require margin computation by first rendering the labels / ticks and then check the sizes of
* those texts. The texts must be visible for the check. This is a helper function that momentarily shows the node
* content, calls the margin update "callback", and then resets the node content's visibility to its original value.
*/
protected updateMargins(callback: () => void) {
const $content = $(this.$refs.content);
const isVisible = $content.is(':visible');
if (!isVisible) { // getBBox() requires the SVG to be visible to return valid sizes
$content.show();
}
callback();
if (!isVisible) {
$content.hide();
}
}
/**
* Allows the visualization to disable brushing conditionally (e.g. network is not brushable in navigation mode).
*/
protected isBrushable(): boolean {
return true;
}
/**
* Records the selection before a brush in order to commit selection history.
*/
protected recordPrevSelection() {
this.prevSelection = this.selection.clone();
}
/**
* Commits the seleciton history post a brush.
*/
protected commitSelectionHistory(message?: string) {
if (!this.selection.isEqual(this.prevSelection)) {
this.commitHistory(history.interactiveSelectionEvent(this, this.selection, this.prevSelection, message));
}
}
/**
* Keys actions for all visualizations.
*/
protected onKeysVisualization(keys: string): boolean {
if (keys === this.osCtrlKey + '+a') {
this.selectAll();
return true;
}
if (keys === this.osCtrlKey + '+shift+a') {
this.deselectAll();
return true;
}
return this.onKeysNode(keys);
}
/**
* Places every data item into the selection.
* If a node has additional visual entities to be selected, such as a bar in the history,
* override this method.
*/
protected executeSelectAll() {
const items = this.inputPortMap.in.getSubsetPackage().getItemIndices();
this.selection.addItems(items);
}
/**
* Removes every data item from the selection, similar to executeSelectAll.
*/
protected executeDeselectAll() {
this.selection.clear();
}
private selectAll() {
if (this.hasNoDataset()) {
return;
}
this.recordPrevSelection();
this.executeSelectAll();
this.commitSelectionHistory('select all');
this.onSelectionUpdate();
}
private deselectAll() {
if (this.hasNoDataset()) {
return;
}
this.recordPrevSelection();
this.executeDeselectAll();
this.commitSelectionHistory('deselect all');
this.onSelectionUpdate();
}
private onWindowResize(evt: Event) {
if (this.isEnlarged) {
const view = this.getEnlargedView();
$(this.$refs.node).css({
left: view.x,
top: view.y,
width: view.width,
height: view.height,
});
}
}
private getEnlargedView(): Box {
const screenWidth = window.innerWidth;
const screenHeight = window.innerHeight;
return {
x: .1 * screenWidth,
y: .1 * screenHeight,
width: .8 * screenWidth,
height: .8 * screenHeight,
};
}
private recordBrushPoint(evt: MouseEvent) {
const $brushElement = $(this.$refs.node).find(this.BRUSH_ELEMENT);
if (!$brushElement.length) {
return;
}
const offset = mouseOffset(evt, $brushElement as JQuery<HTMLElement>);
this.brushPoints.push({ x: offset.left, y: offset.top });
}
private onBrushStart(evt: MouseEvent) {
if (this.isAltPressed || // dragging
!this.isBrushable() || // other blocking interaction, e.g. navigation
evt.which !== 1) { // not left click
return;
}
this.isBrushing = true;
this.brushTime = new Date().getTime();
this.recordBrushPoint(evt);
}
private onBrushMove(evt: MouseEvent) {
if (!this.isBrushing) {
return;
}
this.recordBrushPoint(evt);
this.brushed(this.brushPoints);
}
private onBrushLeave(evt: MouseEvent) {
if (!this.isBrushing) {
return;
}
this.onBrushStop(evt);
}
private onBrushStop(evt: MouseEvent) {
if (!this.isBrushing) {
return;
}
this.isBrushing = false;
this.recordPrevSelection();
this.brushed(this.brushPoints, true);
this.commitSelectionHistory();
if (this.brushPoints.length) {
const [p, q] = [_.first(this.brushPoints), _.last(this.brushPoints)] as [Point, Point];
this.brushDistance = Math.abs(p.x - q.x) + Math.abs(p.y - q.y);
} else {
this.brushDistance = 0;
}
this.brushTime = new Date().getTime() - this.brushTime;
this.brushPoints = [];
if (this.isProbablyFailedDrag()) {
this.onFailedDrag();
}
}
private isProbablyFailedDrag(): boolean {
if (!this.isSelectionEmpty()) {
this.failedDragCount = 0; // clear failed count on successful brush
return false;
}
return this.brushDistance > FAILED_DRAG_DISTANCE_THRESHOLD &&
this.brushTime < FAILED_DRAG_TIME_THRESHOLD;
}
private onFailedDrag() {
this.failedDragCount++;
if (this.failedDragCount === this.failedDragsBeforeHint) {
showSystemMessage(this.$store,
'Hold [Alt] key to drag a visualization node inside the plot area.', 'info');
this.failedDragCount = 0;
// TODO: check this?
// Increases the number of failed drags required before showing the hint again.
this.failedDragsBeforeHint += FAILED_DRAGS_BEFORE_HINT_INCREMENT;
}
}
/**
* When Alt is pressed, disables mouse interaction on the plot area.
*/
@Watch('isAltPressed')
private onAltPressedChange(value: boolean) {
if (this.isShiftPressed) {
// Ignore other key combination.
return;
}
if (value) {
$(this.$refs.node).find(this.ALT_DRAG_ELEMENT).css('pointer-events', 'none');
} else {
$(this.$refs.node).find(this.ALT_DRAG_ELEMENT).css('pointer-events', '');
}
}
@Watch('nodeModalVisible')
private onNodeModalVisibleChange(value: boolean) {
if (!value && this.isEnlarged) {
// Close by global keystroke (escape).
this.closeEnlargeModal();
}
}
} | the_stack |
import * as GObject from "@gi-types/gobject";
import * as HarfBuzz from "@gi-types/harfbuzz";
import * as cairo from "@gi-types/cairo";
import * as GLib from "@gi-types/glib";
export const ANALYSIS_FLAG_CENTERED_BASELINE: number;
export const ANALYSIS_FLAG_IS_ELLIPSIS: number;
export const ANALYSIS_FLAG_NEED_HYPHEN: number;
export const ATTR_INDEX_FROM_TEXT_BEGINNING: number;
export const ENGINE_TYPE_LANG: string;
export const ENGINE_TYPE_SHAPE: string;
export const GLYPH_EMPTY: Glyph;
export const GLYPH_INVALID_INPUT: Glyph;
export const GLYPH_UNKNOWN_FLAG: Glyph;
export const RENDER_TYPE_NONE: string;
export const SCALE: number;
export const UNKNOWN_GLYPH_HEIGHT: number;
export const UNKNOWN_GLYPH_WIDTH: number;
export const VERSION_MIN_REQUIRED: number;
export function attr_allow_breaks_new(allow_breaks: boolean): Attribute;
export function attr_background_alpha_new(alpha: number): Attribute;
export function attr_background_new(red: number, green: number, blue: number): Attribute;
export function attr_fallback_new(enable_fallback: boolean): Attribute;
export function attr_family_new(family: string): Attribute;
export function attr_font_desc_new(desc: FontDescription): Attribute;
export function attr_font_features_new(features: string): Attribute;
export function attr_foreground_alpha_new(alpha: number): Attribute;
export function attr_foreground_new(red: number, green: number, blue: number): Attribute;
export function attr_gravity_hint_new(hint: GravityHint): Attribute;
export function attr_gravity_new(gravity: Gravity): Attribute;
export function attr_insert_hyphens_new(insert_hyphens: boolean): Attribute;
export function attr_language_new(language: Language): Attribute;
export function attr_letter_spacing_new(letter_spacing: number): Attribute;
export function attr_overline_color_new(red: number, green: number, blue: number): Attribute;
export function attr_overline_new(overline: Overline): Attribute;
export function attr_rise_new(rise: number): Attribute;
export function attr_scale_new(scale_factor: number): Attribute;
export function attr_shape_new(ink_rect: Rectangle, logical_rect: Rectangle): Attribute;
export function attr_shape_new_with_data(
ink_rect: Rectangle,
logical_rect: Rectangle,
data?: any | null,
copy_func?: AttrDataCopyFunc | null,
destroy_func?: GLib.DestroyNotify | null
): Attribute;
export function attr_show_new(flags: ShowFlags): Attribute;
export function attr_size_new(size: number): Attribute;
export function attr_size_new_absolute(size: number): Attribute;
export function attr_stretch_new(stretch: Stretch): Attribute;
export function attr_strikethrough_color_new(red: number, green: number, blue: number): Attribute;
export function attr_strikethrough_new(strikethrough: boolean): Attribute;
export function attr_style_new(style: Style): Attribute;
export function attr_type_get_name(type: AttrType): string | null;
export function attr_type_register(name: string): AttrType;
export function attr_underline_color_new(red: number, green: number, blue: number): Attribute;
export function attr_underline_new(underline: Underline): Attribute;
export function attr_variant_new(variant: Variant): Attribute;
export function attr_weight_new(weight: Weight): Attribute;
export function bidi_type_for_unichar(ch: number): BidiType;
export function __break(text: string, length: number, analysis: Analysis, attrs: LogAttr[]): void;
export function default_break(
text: string,
length: number,
analysis: Analysis | null,
attrs: LogAttr,
attrs_len: number
): void;
export function extents_to_pixels(inclusive?: Rectangle | null, nearest?: Rectangle | null): void;
export function find_base_dir(text: string, length: number): Direction;
export function find_paragraph_boundary(text: string, length: number): [number, number];
export function font_description_from_string(str: string): FontDescription;
export function get_log_attrs(
text: string,
length: number,
level: number,
language: Language,
log_attrs: LogAttr[]
): void;
export function get_mirror_char(ch: number, mirrored_ch: number): boolean;
export function gravity_get_for_matrix(matrix?: Matrix | null): Gravity;
export function gravity_get_for_script(script: Script, base_gravity: Gravity, hint: GravityHint): Gravity;
export function gravity_get_for_script_and_width(
script: Script,
wide: boolean,
base_gravity: Gravity,
hint: GravityHint
): Gravity;
export function gravity_to_rotation(gravity: Gravity): number;
export function is_zero_width(ch: number): boolean;
export function itemize(
context: Context,
text: string,
start_index: number,
length: number,
attrs: AttrList,
cached_iter?: AttrIterator | null
): Item[];
export function itemize_with_base_dir(
context: Context,
base_dir: Direction,
text: string,
start_index: number,
length: number,
attrs: AttrList,
cached_iter?: AttrIterator | null
): Item[];
export function language_from_string(language?: string | null): Language | null;
export function language_get_default(): Language;
export function language_get_preferred(): Language | null;
export function log2vis_get_embedding_levels(text: string, length: number, pbase_dir: Direction): number;
export function markup_parser_finish(
context: GLib.MarkupParseContext
): [boolean, AttrList | null, string | null, number | null];
export function markup_parser_new(accel_marker: number): GLib.MarkupParseContext;
export function parse_enum(
type: GObject.GType,
str: string | null,
warn: boolean
): [boolean, number | null, string | null];
export function parse_markup(
markup_text: string,
length: number,
accel_marker: number
): [boolean, AttrList | null, string | null, number | null];
export function parse_stretch(str: string, warn: boolean): [boolean, Stretch];
export function parse_style(str: string, warn: boolean): [boolean, Style];
export function parse_variant(str: string, warn: boolean): [boolean, Variant];
export function parse_weight(str: string, warn: boolean): [boolean, Weight];
export function quantize_line_geometry(thickness: number, position: number): [number, number];
export function read_line(stream: any | null, str: GLib.String): number;
export function reorder_items(logical_items: Item[]): Item[];
export function scan_int(pos: string): [boolean, string, number];
export function scan_string(pos: string, out: GLib.String): [boolean, string];
export function scan_word(pos: string, out: GLib.String): [boolean, string];
export function script_for_unichar(ch: number): Script;
export function script_get_sample_language(script: Script): Language | null;
export function shape(text: string, length: number, analysis: Analysis, glyphs: GlyphString): void;
export function shape_full(
item_text: string,
item_length: number,
paragraph_text: string | null,
paragraph_length: number,
analysis: Analysis,
glyphs: GlyphString
): void;
export function shape_with_flags(
item_text: string,
item_length: number,
paragraph_text: string | null,
paragraph_length: number,
analysis: Analysis,
glyphs: GlyphString,
flags: ShapeFlags
): void;
export function skip_space(pos: string): [boolean, string];
export function split_file_list(str: string): string[];
export function tailor_break(
text: string,
length: number,
analysis: Analysis,
offset: number,
log_attrs: LogAttr[]
): void;
export function trim_string(str: string): string;
export function unichar_direction(ch: number): Direction;
export function units_from_double(d: number): number;
export function units_to_double(i: number): number;
export function version(): number;
export function version_check(required_major: number, required_minor: number, required_micro: number): string | null;
export function version_string(): string;
export type AttrDataCopyFunc = () => any | null;
export type AttrFilterFunc = (attribute: Attribute) => boolean;
export type FontsetForeachFunc = (fontset: Fontset, font: Font) => boolean;
export namespace Alignment {
export const $gtype: GObject.GType<Alignment>;
}
export enum Alignment {
LEFT = 0,
CENTER = 1,
RIGHT = 2,
}
export namespace AttrType {
export const $gtype: GObject.GType<AttrType>;
}
export enum AttrType {
INVALID = 0,
LANGUAGE = 1,
FAMILY = 2,
STYLE = 3,
WEIGHT = 4,
VARIANT = 5,
STRETCH = 6,
SIZE = 7,
FONT_DESC = 8,
FOREGROUND = 9,
BACKGROUND = 10,
UNDERLINE = 11,
STRIKETHROUGH = 12,
RISE = 13,
SHAPE = 14,
SCALE = 15,
FALLBACK = 16,
LETTER_SPACING = 17,
UNDERLINE_COLOR = 18,
STRIKETHROUGH_COLOR = 19,
ABSOLUTE_SIZE = 20,
GRAVITY = 21,
GRAVITY_HINT = 22,
FONT_FEATURES = 23,
FOREGROUND_ALPHA = 24,
BACKGROUND_ALPHA = 25,
ALLOW_BREAKS = 26,
SHOW = 27,
INSERT_HYPHENS = 28,
OVERLINE = 29,
OVERLINE_COLOR = 30,
}
export namespace BidiType {
export const $gtype: GObject.GType<BidiType>;
}
export enum BidiType {
L = 0,
LRE = 1,
LRO = 2,
R = 3,
AL = 4,
RLE = 5,
RLO = 6,
PDF = 7,
EN = 8,
ES = 9,
ET = 10,
AN = 11,
CS = 12,
NSM = 13,
BN = 14,
B = 15,
S = 16,
WS = 17,
ON = 18,
}
export namespace CoverageLevel {
export const $gtype: GObject.GType<CoverageLevel>;
}
export enum CoverageLevel {
NONE = 0,
FALLBACK = 1,
APPROXIMATE = 2,
EXACT = 3,
}
export namespace Direction {
export const $gtype: GObject.GType<Direction>;
}
export enum Direction {
LTR = 0,
RTL = 1,
TTB_LTR = 2,
TTB_RTL = 3,
WEAK_LTR = 4,
WEAK_RTL = 5,
NEUTRAL = 6,
}
export namespace EllipsizeMode {
export const $gtype: GObject.GType<EllipsizeMode>;
}
export enum EllipsizeMode {
NONE = 0,
START = 1,
MIDDLE = 2,
END = 3,
}
export namespace Gravity {
export const $gtype: GObject.GType<Gravity>;
}
export enum Gravity {
SOUTH = 0,
EAST = 1,
NORTH = 2,
WEST = 3,
AUTO = 4,
}
export namespace GravityHint {
export const $gtype: GObject.GType<GravityHint>;
}
export enum GravityHint {
NATURAL = 0,
STRONG = 1,
LINE = 2,
}
export namespace Overline {
export const $gtype: GObject.GType<Overline>;
}
export enum Overline {
NONE = 0,
SINGLE = 1,
}
export namespace RenderPart {
export const $gtype: GObject.GType<RenderPart>;
}
export enum RenderPart {
FOREGROUND = 0,
BACKGROUND = 1,
UNDERLINE = 2,
STRIKETHROUGH = 3,
OVERLINE = 4,
}
export namespace Script {
export const $gtype: GObject.GType<Script>;
}
export enum Script {
INVALID_CODE = -1,
COMMON = 0,
INHERITED = 1,
ARABIC = 2,
ARMENIAN = 3,
BENGALI = 4,
BOPOMOFO = 5,
CHEROKEE = 6,
COPTIC = 7,
CYRILLIC = 8,
DESERET = 9,
DEVANAGARI = 10,
ETHIOPIC = 11,
GEORGIAN = 12,
GOTHIC = 13,
GREEK = 14,
GUJARATI = 15,
GURMUKHI = 16,
HAN = 17,
HANGUL = 18,
HEBREW = 19,
HIRAGANA = 20,
KANNADA = 21,
KATAKANA = 22,
KHMER = 23,
LAO = 24,
LATIN = 25,
MALAYALAM = 26,
MONGOLIAN = 27,
MYANMAR = 28,
OGHAM = 29,
OLD_ITALIC = 30,
ORIYA = 31,
RUNIC = 32,
SINHALA = 33,
SYRIAC = 34,
TAMIL = 35,
TELUGU = 36,
THAANA = 37,
THAI = 38,
TIBETAN = 39,
CANADIAN_ABORIGINAL = 40,
YI = 41,
TAGALOG = 42,
HANUNOO = 43,
BUHID = 44,
TAGBANWA = 45,
BRAILLE = 46,
CYPRIOT = 47,
LIMBU = 48,
OSMANYA = 49,
SHAVIAN = 50,
LINEAR_B = 51,
TAI_LE = 52,
UGARITIC = 53,
NEW_TAI_LUE = 54,
BUGINESE = 55,
GLAGOLITIC = 56,
TIFINAGH = 57,
SYLOTI_NAGRI = 58,
OLD_PERSIAN = 59,
KHAROSHTHI = 60,
UNKNOWN = 61,
BALINESE = 62,
CUNEIFORM = 63,
PHOENICIAN = 64,
PHAGS_PA = 65,
NKO = 66,
KAYAH_LI = 67,
LEPCHA = 68,
REJANG = 69,
SUNDANESE = 70,
SAURASHTRA = 71,
CHAM = 72,
OL_CHIKI = 73,
VAI = 74,
CARIAN = 75,
LYCIAN = 76,
LYDIAN = 77,
BATAK = 78,
BRAHMI = 79,
MANDAIC = 80,
CHAKMA = 81,
MEROITIC_CURSIVE = 82,
MEROITIC_HIEROGLYPHS = 83,
MIAO = 84,
SHARADA = 85,
SORA_SOMPENG = 86,
TAKRI = 87,
BASSA_VAH = 88,
CAUCASIAN_ALBANIAN = 89,
DUPLOYAN = 90,
ELBASAN = 91,
GRANTHA = 92,
KHOJKI = 93,
KHUDAWADI = 94,
LINEAR_A = 95,
MAHAJANI = 96,
MANICHAEAN = 97,
MENDE_KIKAKUI = 98,
MODI = 99,
MRO = 100,
NABATAEAN = 101,
OLD_NORTH_ARABIAN = 102,
OLD_PERMIC = 103,
PAHAWH_HMONG = 104,
PALMYRENE = 105,
PAU_CIN_HAU = 106,
PSALTER_PAHLAVI = 107,
SIDDHAM = 108,
TIRHUTA = 109,
WARANG_CITI = 110,
AHOM = 111,
ANATOLIAN_HIEROGLYPHS = 112,
HATRAN = 113,
MULTANI = 114,
OLD_HUNGARIAN = 115,
SIGNWRITING = 116,
}
export namespace Stretch {
export const $gtype: GObject.GType<Stretch>;
}
export enum Stretch {
ULTRA_CONDENSED = 0,
EXTRA_CONDENSED = 1,
CONDENSED = 2,
SEMI_CONDENSED = 3,
NORMAL = 4,
SEMI_EXPANDED = 5,
EXPANDED = 6,
EXTRA_EXPANDED = 7,
ULTRA_EXPANDED = 8,
}
export namespace Style {
export const $gtype: GObject.GType<Style>;
}
export enum Style {
NORMAL = 0,
OBLIQUE = 1,
ITALIC = 2,
}
export namespace TabAlign {
export const $gtype: GObject.GType<TabAlign>;
}
export enum TabAlign {
LEFT = 0,
}
export namespace Underline {
export const $gtype: GObject.GType<Underline>;
}
export enum Underline {
NONE = 0,
SINGLE = 1,
DOUBLE = 2,
LOW = 3,
ERROR = 4,
SINGLE_LINE = 5,
DOUBLE_LINE = 6,
ERROR_LINE = 7,
}
export namespace Variant {
export const $gtype: GObject.GType<Variant>;
}
export enum Variant {
NORMAL = 0,
SMALL_CAPS = 1,
}
export namespace Weight {
export const $gtype: GObject.GType<Weight>;
}
export enum Weight {
THIN = 100,
ULTRALIGHT = 200,
LIGHT = 300,
SEMILIGHT = 350,
BOOK = 380,
NORMAL = 400,
MEDIUM = 500,
SEMIBOLD = 600,
BOLD = 700,
ULTRABOLD = 800,
HEAVY = 900,
ULTRAHEAVY = 1000,
}
export namespace WrapMode {
export const $gtype: GObject.GType<WrapMode>;
}
export enum WrapMode {
WORD = 0,
CHAR = 1,
WORD_CHAR = 2,
}
export namespace FontMask {
export const $gtype: GObject.GType<FontMask>;
}
export enum FontMask {
FAMILY = 1,
STYLE = 2,
VARIANT = 4,
WEIGHT = 8,
STRETCH = 16,
SIZE = 32,
GRAVITY = 64,
VARIATIONS = 128,
}
export namespace ShapeFlags {
export const $gtype: GObject.GType<ShapeFlags>;
}
export enum ShapeFlags {
NONE = 0,
ROUND_POSITIONS = 1,
}
export namespace ShowFlags {
export const $gtype: GObject.GType<ShowFlags>;
}
export enum ShowFlags {
NONE = 0,
SPACES = 1,
LINE_BREAKS = 2,
IGNORABLES = 4,
}
export module Context {
export interface ConstructorProperties extends GObject.Object.ConstructorProperties {
[key: string]: any;
}
}
export class Context extends GObject.Object {
static $gtype: GObject.GType<Context>;
constructor(properties?: Partial<Context.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<Context.ConstructorProperties>, ...args: any[]): void;
// Constructors
static ["new"](): Context;
// Members
changed(): void;
get_base_dir(): Direction;
get_base_gravity(): Gravity;
get_font_description(): FontDescription;
get_font_map(): FontMap;
get_gravity(): Gravity;
get_gravity_hint(): GravityHint;
get_language(): Language;
get_matrix(): Matrix | null;
get_metrics(desc?: FontDescription | null, language?: Language | null): FontMetrics;
get_round_glyph_positions(): boolean;
get_serial(): number;
list_families(): FontFamily[];
load_font(desc: FontDescription): Font | null;
load_fontset(desc: FontDescription, language: Language): Fontset | null;
set_base_dir(direction: Direction): void;
set_base_gravity(gravity: Gravity): void;
set_font_description(desc: FontDescription): void;
set_font_map(font_map: FontMap): void;
set_gravity_hint(hint: GravityHint): void;
set_language(language: Language): void;
set_matrix(matrix?: Matrix | null): void;
set_round_glyph_positions(round_positions: boolean): void;
}
export module Coverage {
export interface ConstructorProperties extends GObject.Object.ConstructorProperties {
[key: string]: any;
}
}
export class Coverage extends GObject.Object {
static $gtype: GObject.GType<Coverage>;
constructor(properties?: Partial<Coverage.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<Coverage.ConstructorProperties>, ...args: any[]): void;
// Constructors
static ["new"](): Coverage;
// Members
copy(): Coverage;
get(index_: number): CoverageLevel;
max(other: Coverage): void;
ref(): Coverage;
set(index_: number, level: CoverageLevel): void;
set(...args: never[]): never;
to_bytes(): Uint8Array;
unref(): void;
static from_bytes(bytes: Uint8Array | string): Coverage | null;
}
export module Engine {
export interface ConstructorProperties extends GObject.Object.ConstructorProperties {
[key: string]: any;
}
}
export abstract class Engine extends GObject.Object {
static $gtype: GObject.GType<Engine>;
constructor(properties?: Partial<Engine.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<Engine.ConstructorProperties>, ...args: any[]): void;
}
export module EngineLang {
export interface ConstructorProperties extends Engine.ConstructorProperties {
[key: string]: any;
}
}
export abstract class EngineLang extends Engine {
static $gtype: GObject.GType<EngineLang>;
constructor(properties?: Partial<EngineLang.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<EngineLang.ConstructorProperties>, ...args: any[]): void;
// Members
vfunc_script_break(text: string, len: number, analysis: Analysis, attrs: LogAttr, attrs_len: number): void;
}
export module EngineShape {
export interface ConstructorProperties extends Engine.ConstructorProperties {
[key: string]: any;
}
}
export abstract class EngineShape extends Engine {
static $gtype: GObject.GType<EngineShape>;
constructor(properties?: Partial<EngineShape.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<EngineShape.ConstructorProperties>, ...args: any[]): void;
// Members
vfunc_covers(font: Font, language: Language, wc: number): CoverageLevel;
vfunc_script_shape(
font: Font,
item_text: string,
item_length: number,
analysis: Analysis,
glyphs: GlyphString,
paragraph_text: string,
paragraph_length: number
): void;
}
export module Font {
export interface ConstructorProperties extends GObject.Object.ConstructorProperties {
[key: string]: any;
}
}
export abstract class Font extends GObject.Object {
static $gtype: GObject.GType<Font>;
constructor(properties?: Partial<Font.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<Font.ConstructorProperties>, ...args: any[]): void;
// Members
describe(): FontDescription;
describe_with_absolute_size(): FontDescription;
find_shaper(language: Language, ch: number): EngineShape;
get_coverage(language: Language): Coverage;
get_face(): FontFace;
get_features(num_features: number): [HarfBuzz.feature_t[], number];
get_font_map(): FontMap | null;
get_glyph_extents(glyph: Glyph): [Rectangle | null, Rectangle | null];
get_metrics(language?: Language | null): FontMetrics;
has_char(wc: number): boolean;
vfunc_create_hb_font(): HarfBuzz.font_t;
vfunc_describe(): FontDescription;
vfunc_describe_absolute(): FontDescription;
vfunc_get_coverage(language: Language): Coverage;
vfunc_get_features(num_features: number): [HarfBuzz.feature_t[], number];
vfunc_get_font_map(): FontMap | null;
vfunc_get_glyph_extents(glyph: Glyph): [Rectangle | null, Rectangle | null];
vfunc_get_metrics(language?: Language | null): FontMetrics;
static descriptions_free(descs?: FontDescription[] | null): void;
}
export module FontFace {
export interface ConstructorProperties extends GObject.Object.ConstructorProperties {
[key: string]: any;
}
}
export abstract class FontFace extends GObject.Object {
static $gtype: GObject.GType<FontFace>;
constructor(properties?: Partial<FontFace.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<FontFace.ConstructorProperties>, ...args: any[]): void;
// Members
describe(): FontDescription;
get_face_name(): string;
get_family(): FontFamily;
is_synthesized(): boolean;
list_sizes(): number[] | null;
vfunc_describe(): FontDescription;
vfunc_get_face_name(): string;
vfunc_get_family(): FontFamily;
vfunc_is_synthesized(): boolean;
vfunc_list_sizes(): number[] | null;
}
export module FontFamily {
export interface ConstructorProperties extends GObject.Object.ConstructorProperties {
[key: string]: any;
}
}
export abstract class FontFamily extends GObject.Object {
static $gtype: GObject.GType<FontFamily>;
constructor(properties?: Partial<FontFamily.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<FontFamily.ConstructorProperties>, ...args: any[]): void;
// Members
get_face(name?: string | null): FontFace | null;
get_name(): string;
is_monospace(): boolean;
is_variable(): boolean;
list_faces(): FontFace[] | null;
vfunc_get_face(name?: string | null): FontFace | null;
vfunc_get_name(): string;
vfunc_is_monospace(): boolean;
vfunc_is_variable(): boolean;
vfunc_list_faces(): FontFace[] | null;
}
export module FontMap {
export interface ConstructorProperties extends GObject.Object.ConstructorProperties {
[key: string]: any;
}
}
export abstract class FontMap extends GObject.Object {
static $gtype: GObject.GType<FontMap>;
constructor(properties?: Partial<FontMap.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<FontMap.ConstructorProperties>, ...args: any[]): void;
// Members
changed(): void;
create_context(): Context;
get_family(name: string): FontFamily;
get_serial(): number;
list_families(): FontFamily[];
load_font(context: Context, desc: FontDescription): Font | null;
load_fontset(context: Context, desc: FontDescription, language: Language): Fontset | null;
vfunc_changed(): void;
vfunc_get_family(name: string): FontFamily;
vfunc_get_serial(): number;
vfunc_list_families(): FontFamily[];
vfunc_load_font(context: Context, desc: FontDescription): Font | null;
vfunc_load_fontset(context: Context, desc: FontDescription, language: Language): Fontset | null;
}
export module Fontset {
export interface ConstructorProperties extends GObject.Object.ConstructorProperties {
[key: string]: any;
}
}
export abstract class Fontset extends GObject.Object {
static $gtype: GObject.GType<Fontset>;
constructor(properties?: Partial<Fontset.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<Fontset.ConstructorProperties>, ...args: any[]): void;
// Members
foreach(func: FontsetForeachFunc): void;
get_font(wc: number): Font;
get_metrics(): FontMetrics;
vfunc_foreach(func: FontsetForeachFunc): void;
vfunc_get_font(wc: number): Font;
vfunc_get_language(): Language;
vfunc_get_metrics(): FontMetrics;
}
export module FontsetSimple {
export interface ConstructorProperties extends Fontset.ConstructorProperties {
[key: string]: any;
}
}
export class FontsetSimple extends Fontset {
static $gtype: GObject.GType<FontsetSimple>;
constructor(properties?: Partial<FontsetSimple.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<FontsetSimple.ConstructorProperties>, ...args: any[]): void;
// Constructors
static ["new"](language: Language): FontsetSimple;
// Members
append(font: Font): void;
size(): number;
}
export module Layout {
export interface ConstructorProperties extends GObject.Object.ConstructorProperties {
[key: string]: any;
}
}
export class Layout extends GObject.Object {
static $gtype: GObject.GType<Layout>;
constructor(properties?: Partial<Layout.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<Layout.ConstructorProperties>, ...args: any[]): void;
// Constructors
static ["new"](context: Context): Layout;
// Members
context_changed(): void;
copy(): Layout;
get_alignment(): Alignment;
get_attributes(): AttrList | null;
get_auto_dir(): boolean;
get_baseline(): number;
get_character_count(): number;
get_context(): Context;
get_cursor_pos(index_: number): [Rectangle | null, Rectangle | null];
get_direction(index: number): Direction;
get_ellipsize(): EllipsizeMode;
get_extents(): [Rectangle | null, Rectangle | null];
get_font_description(): FontDescription | null;
get_height(): number;
get_indent(): number;
get_iter(): LayoutIter;
get_justify(): boolean;
get_line(line: number): LayoutLine | null;
get_line_count(): number;
get_line_readonly(line: number): LayoutLine | null;
get_line_spacing(): number;
get_lines(): LayoutLine[];
get_lines_readonly(): LayoutLine[];
get_log_attrs(): LogAttr[];
get_log_attrs_readonly(): LogAttr[];
get_pixel_extents(): [Rectangle | null, Rectangle | null];
get_pixel_size(): [number | null, number | null];
get_serial(): number;
get_single_paragraph_mode(): boolean;
get_size(): [number | null, number | null];
get_spacing(): number;
get_tabs(): TabArray | null;
get_text(): string;
get_unknown_glyphs_count(): number;
get_width(): number;
get_wrap(): WrapMode;
index_to_line_x(index_: number, trailing: boolean): [number | null, number | null];
index_to_pos(index_: number): Rectangle;
is_ellipsized(): boolean;
is_wrapped(): boolean;
move_cursor_visually(strong: boolean, old_index: number, old_trailing: number, direction: number): [number, number];
set_alignment(alignment: Alignment): void;
set_attributes(attrs?: AttrList | null): void;
set_auto_dir(auto_dir: boolean): void;
set_ellipsize(ellipsize: EllipsizeMode): void;
set_font_description(desc?: FontDescription | null): void;
set_height(height: number): void;
set_indent(indent: number): void;
set_justify(justify: boolean): void;
set_line_spacing(factor: number): void;
set_markup(markup: string, length: number): void;
set_markup_with_accel(markup: string, length: number, accel_marker: number): number | null;
set_single_paragraph_mode(setting: boolean): void;
set_spacing(spacing: number): void;
set_tabs(tabs?: TabArray | null): void;
set_text(text: string, length: number): void;
set_width(width: number): void;
set_wrap(wrap: WrapMode): void;
xy_to_index(x: number, y: number): [boolean, number, number];
}
export module Renderer {
export interface ConstructorProperties extends GObject.Object.ConstructorProperties {
[key: string]: any;
}
}
export abstract class Renderer extends GObject.Object {
static $gtype: GObject.GType<Renderer>;
constructor(properties?: Partial<Renderer.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<Renderer.ConstructorProperties>, ...args: any[]): void;
// Fields
matrix: Matrix;
// Members
activate(): void;
deactivate(): void;
draw_error_underline(x: number, y: number, width: number, height: number): void;
draw_glyph(font: Font, glyph: Glyph, x: number, y: number): void;
draw_glyph_item(text: string | null, glyph_item: GlyphItem, x: number, y: number): void;
draw_glyphs(font: Font, glyphs: GlyphString, x: number, y: number): void;
draw_layout(layout: Layout, x: number, y: number): void;
draw_layout_line(line: LayoutLine, x: number, y: number): void;
draw_rectangle(part: RenderPart, x: number, y: number, width: number, height: number): void;
draw_trapezoid(part: RenderPart, y1_: number, x11: number, x21: number, y2: number, x12: number, x22: number): void;
get_alpha(part: RenderPart): number;
get_color(part: RenderPart): Color | null;
get_layout(): Layout | null;
get_layout_line(): LayoutLine | null;
get_matrix(): Matrix | null;
part_changed(part: RenderPart): void;
set_alpha(part: RenderPart, alpha: number): void;
set_color(part: RenderPart, color?: Color | null): void;
set_matrix(matrix?: Matrix | null): void;
vfunc_begin(): void;
vfunc_draw_error_underline(x: number, y: number, width: number, height: number): void;
vfunc_draw_glyph(font: Font, glyph: Glyph, x: number, y: number): void;
vfunc_draw_glyph_item(text: string | null, glyph_item: GlyphItem, x: number, y: number): void;
vfunc_draw_glyphs(font: Font, glyphs: GlyphString, x: number, y: number): void;
vfunc_draw_rectangle(part: RenderPart, x: number, y: number, width: number, height: number): void;
vfunc_draw_shape(attr: AttrShape, x: number, y: number): void;
vfunc_draw_trapezoid(
part: RenderPart,
y1_: number,
x11: number,
x21: number,
y2: number,
x12: number,
x22: number
): void;
vfunc_end(): void;
vfunc_part_changed(part: RenderPart): void;
vfunc_prepare_run(run: LayoutRun): void;
}
export class Analysis {
static $gtype: GObject.GType<Analysis>;
constructor(copy: Analysis);
// Fields
shape_engine: EngineShape;
lang_engine: EngineLang;
font: Font;
level: number;
gravity: number;
flags: number;
script: number;
language: Language;
extra_attrs: any[];
}
export class AttrClass {
static $gtype: GObject.GType<AttrClass>;
constructor(copy: AttrClass);
// Fields
type: AttrType;
}
export class AttrColor {
static $gtype: GObject.GType<AttrColor>;
constructor(copy: AttrColor);
// Fields
attr: Attribute;
color: Color;
}
export class AttrFloat {
static $gtype: GObject.GType<AttrFloat>;
constructor(copy: AttrFloat);
// Fields
attr: Attribute;
value: number;
}
export class AttrFontDesc {
static $gtype: GObject.GType<AttrFontDesc>;
constructor(copy: AttrFontDesc);
// Fields
attr: Attribute;
desc: FontDescription;
// Members
static new(desc: FontDescription): Attribute;
}
export class AttrFontFeatures {
static $gtype: GObject.GType<AttrFontFeatures>;
constructor(copy: AttrFontFeatures);
// Fields
attr: Attribute;
features: string;
// Members
static new(features: string): Attribute;
}
export class AttrInt {
static $gtype: GObject.GType<AttrInt>;
constructor(copy: AttrInt);
// Fields
attr: Attribute;
value: number;
}
export class AttrIterator {
static $gtype: GObject.GType<AttrIterator>;
constructor(copy: AttrIterator);
// Members
copy(): AttrIterator;
destroy(): void;
get(type: AttrType): Attribute | null;
get_attrs(): Attribute[];
get_font(desc: FontDescription, language?: Language | null, extra_attrs?: Attribute[] | null): void;
next(): boolean;
range(): [number, number];
}
export class AttrLanguage {
static $gtype: GObject.GType<AttrLanguage>;
constructor(copy: AttrLanguage);
// Fields
attr: Attribute;
value: Language;
// Members
static new(language: Language): Attribute;
}
export class AttrList {
static $gtype: GObject.GType<AttrList>;
constructor();
constructor(copy: AttrList);
// Constructors
static ["new"](): AttrList;
// Members
change(attr: Attribute): void;
copy(): AttrList | null;
equal(other_list: AttrList): boolean;
filter(func: AttrFilterFunc): AttrList | null;
get_attributes(): Attribute[];
get_iterator(): AttrIterator;
insert(attr: Attribute): void;
insert_before(attr: Attribute): void;
ref(): AttrList;
splice(other: AttrList, pos: number, len: number): void;
unref(): void;
update(pos: number, remove: number, add: number): void;
}
export class AttrShape {
static $gtype: GObject.GType<AttrShape>;
constructor(copy: AttrShape);
// Fields
attr: Attribute;
ink_rect: Rectangle;
logical_rect: Rectangle;
data: any;
copy_func: AttrDataCopyFunc;
destroy_func: GLib.DestroyNotify;
// Members
static new(ink_rect: Rectangle, logical_rect: Rectangle): Attribute;
static new_with_data(
ink_rect: Rectangle,
logical_rect: Rectangle,
data?: any | null,
copy_func?: AttrDataCopyFunc | null,
destroy_func?: GLib.DestroyNotify | null
): Attribute;
}
export class AttrSize {
static $gtype: GObject.GType<AttrSize>;
constructor(copy: AttrSize);
// Fields
attr: Attribute;
size: number;
absolute: number;
// Members
static new(size: number): Attribute;
static new_absolute(size: number): Attribute;
}
export class AttrString {
static $gtype: GObject.GType<AttrString>;
constructor(copy: AttrString);
// Fields
attr: Attribute;
value: string;
}
export class Attribute {
static $gtype: GObject.GType<Attribute>;
constructor(copy: Attribute);
// Fields
klass: AttrClass;
start_index: number;
end_index: number;
// Members
copy(): Attribute;
destroy(): void;
equal(attr2: Attribute): boolean;
init(klass: AttrClass): void;
}
export class Color {
static $gtype: GObject.GType<Color>;
constructor(
properties?: Partial<{
red?: number;
green?: number;
blue?: number;
}>
);
constructor(copy: Color);
// Fields
red: number;
green: number;
blue: number;
// Members
copy(): Color | null;
free(): void;
parse(spec: string): boolean;
parse_with_alpha(spec: string): [boolean, number | null];
to_string(): string;
}
export class EngineInfo {
static $gtype: GObject.GType<EngineInfo>;
constructor(copy: EngineInfo);
// Fields
id: string;
engine_type: string;
render_type: string;
scripts: EngineScriptInfo;
n_scripts: number;
}
export class EngineScriptInfo {
static $gtype: GObject.GType<EngineScriptInfo>;
constructor(copy: EngineScriptInfo);
// Fields
script: Script;
langs: string;
}
export class FontDescription {
static $gtype: GObject.GType<FontDescription>;
constructor();
constructor(copy: FontDescription);
// Constructors
static ["new"](): FontDescription;
// Members
better_match(old_match: FontDescription | null, new_match: FontDescription): boolean;
copy(): FontDescription | null;
copy_static(): FontDescription | null;
equal(desc2: FontDescription): boolean;
free(): void;
get_family(): string | null;
get_gravity(): Gravity;
get_set_fields(): FontMask;
get_size(): number;
get_size_is_absolute(): boolean;
get_stretch(): Stretch;
get_style(): Style;
get_variant(): Variant;
get_variations(): string | null;
get_weight(): Weight;
hash(): number;
merge(desc_to_merge: FontDescription | null, replace_existing: boolean): void;
merge_static(desc_to_merge: FontDescription, replace_existing: boolean): void;
set_absolute_size(size: number): void;
set_family(family: string): void;
set_family_static(family: string): void;
set_gravity(gravity: Gravity): void;
set_size(size: number): void;
set_stretch(stretch: Stretch): void;
set_style(style: Style): void;
set_variant(variant: Variant): void;
set_variations(variations: string): void;
set_variations_static(variations: string): void;
set_weight(weight: Weight): void;
to_filename(): string;
to_string(): string;
unset_fields(to_unset: FontMask): void;
static from_string(str: string): FontDescription;
}
export class FontMetrics {
static $gtype: GObject.GType<FontMetrics>;
constructor(
properties?: Partial<{
ref_count?: number;
ascent?: number;
descent?: number;
height?: number;
approximate_char_width?: number;
approximate_digit_width?: number;
underline_position?: number;
underline_thickness?: number;
strikethrough_position?: number;
strikethrough_thickness?: number;
}>
);
constructor(copy: FontMetrics);
// Fields
ref_count: number;
ascent: number;
descent: number;
height: number;
approximate_char_width: number;
approximate_digit_width: number;
underline_position: number;
underline_thickness: number;
strikethrough_position: number;
strikethrough_thickness: number;
// Members
get_approximate_char_width(): number;
get_approximate_digit_width(): number;
get_ascent(): number;
get_descent(): number;
get_height(): number;
get_strikethrough_position(): number;
get_strikethrough_thickness(): number;
get_underline_position(): number;
get_underline_thickness(): number;
ref(): FontMetrics | null;
unref(): void;
}
export class GlyphGeometry {
static $gtype: GObject.GType<GlyphGeometry>;
constructor(copy: GlyphGeometry);
// Fields
width: GlyphUnit;
x_offset: GlyphUnit;
y_offset: GlyphUnit;
}
export class GlyphInfo {
static $gtype: GObject.GType<GlyphInfo>;
constructor(copy: GlyphInfo);
// Fields
glyph: Glyph;
geometry: GlyphGeometry;
attr: GlyphVisAttr;
}
export class GlyphItem {
static $gtype: GObject.GType<GlyphItem>;
constructor(copy: GlyphItem);
// Fields
item: Item;
glyphs: GlyphString;
// Members
apply_attrs(text: string, list: AttrList): GlyphItem[];
copy(): GlyphItem | null;
free(): void;
get_logical_widths(text: string, logical_widths: number[]): void;
letter_space(text: string, log_attrs: LogAttr[], letter_spacing: number): void;
split(text: string, split_index: number): GlyphItem;
}
export class GlyphItemIter {
static $gtype: GObject.GType<GlyphItemIter>;
constructor(copy: GlyphItemIter);
// Fields
glyph_item: GlyphItem;
text: string;
start_glyph: number;
start_index: number;
start_char: number;
end_glyph: number;
end_index: number;
end_char: number;
// Members
copy(): GlyphItemIter | null;
free(): void;
init_end(glyph_item: GlyphItem, text: string): boolean;
init_start(glyph_item: GlyphItem, text: string): boolean;
next_cluster(): boolean;
prev_cluster(): boolean;
}
export class GlyphString {
static $gtype: GObject.GType<GlyphString>;
constructor();
constructor(copy: GlyphString);
// Fields
num_glyphs: number;
glyphs: GlyphInfo[];
log_clusters: number;
space: number;
// Constructors
static ["new"](): GlyphString;
// Members
copy(): GlyphString | null;
extents(font: Font): [Rectangle | null, Rectangle | null];
extents_range(start: number, end: number, font: Font): [Rectangle | null, Rectangle | null];
free(): void;
get_logical_widths(text: string, length: number, embedding_level: number, logical_widths: number[]): void;
get_width(): number;
index_to_x(text: string, length: number, analysis: Analysis, index_: number, trailing: boolean): number;
set_size(new_len: number): void;
x_to_index(text: string, length: number, analysis: Analysis, x_pos: number): [number, number];
}
export class GlyphVisAttr {
static $gtype: GObject.GType<GlyphVisAttr>;
constructor(
properties?: Partial<{
is_cluster_start?: number;
}>
);
constructor(copy: GlyphVisAttr);
// Fields
is_cluster_start: number;
}
export class IncludedModule {
static $gtype: GObject.GType<IncludedModule>;
constructor(copy: IncludedModule);
}
export class Item {
static $gtype: GObject.GType<Item>;
constructor();
constructor(copy: Item);
// Fields
offset: number;
length: number;
num_chars: number;
analysis: Analysis;
// Constructors
static ["new"](): Item;
// Members
apply_attrs(iter: AttrIterator): void;
copy(): Item | null;
free(): void;
split(split_index: number, split_offset: number): Item;
}
export class Language {
static $gtype: GObject.GType<Language>;
constructor(copy: Language);
// Members
get_sample_string(): string;
get_scripts(): Script[] | null;
includes_script(script: Script): boolean;
matches(range_list: string): boolean;
to_string(): string;
static from_string(language?: string | null): Language | null;
static get_default(): Language;
static get_preferred(): Language | null;
}
export class LayoutIter {
static $gtype: GObject.GType<LayoutIter>;
constructor(copy: LayoutIter);
// Members
at_last_line(): boolean;
copy(): LayoutIter | null;
free(): void;
get_baseline(): number;
get_char_extents(): Rectangle;
get_cluster_extents(): [Rectangle | null, Rectangle | null];
get_index(): number;
get_layout(): Layout;
get_layout_extents(): [Rectangle | null, Rectangle | null];
get_line(): LayoutLine;
get_line_extents(): [Rectangle | null, Rectangle | null];
get_line_readonly(): LayoutLine;
get_line_yrange(): [number | null, number | null];
get_run(): LayoutRun | null;
get_run_extents(): [Rectangle | null, Rectangle | null];
get_run_readonly(): LayoutRun | null;
next_char(): boolean;
next_cluster(): boolean;
next_line(): boolean;
next_run(): boolean;
}
export class LayoutLine {
static $gtype: GObject.GType<LayoutLine>;
constructor(copy: LayoutLine);
// Fields
layout: Layout;
start_index: number;
length: number;
runs: LayoutRun[];
is_paragraph_start: number;
resolved_dir: number;
// Members
get_extents(): [Rectangle | null, Rectangle | null];
get_height(): number | null;
get_pixel_extents(): [Rectangle | null, Rectangle | null];
get_x_ranges(start_index: number, end_index: number): number[];
index_to_x(index_: number, trailing: boolean): number;
ref(): LayoutLine;
unref(): void;
x_to_index(x_pos: number): [boolean, number, number];
}
export class LogAttr {
static $gtype: GObject.GType<LogAttr>;
constructor(
properties?: Partial<{
is_line_break?: number;
is_mandatory_break?: number;
is_char_break?: number;
is_white?: number;
is_cursor_position?: number;
is_word_start?: number;
is_word_end?: number;
is_sentence_boundary?: number;
is_sentence_start?: number;
is_sentence_end?: number;
backspace_deletes_character?: number;
is_expandable_space?: number;
is_word_boundary?: number;
}>
);
constructor(copy: LogAttr);
// Fields
is_line_break: number;
is_mandatory_break: number;
is_char_break: number;
is_white: number;
is_cursor_position: number;
is_word_start: number;
is_word_end: number;
is_sentence_boundary: number;
is_sentence_start: number;
is_sentence_end: number;
backspace_deletes_character: number;
is_expandable_space: number;
is_word_boundary: number;
}
export class Map {
static $gtype: GObject.GType<Map>;
constructor(copy: Map);
}
export class MapEntry {
static $gtype: GObject.GType<MapEntry>;
constructor(copy: MapEntry);
}
export class Matrix {
static $gtype: GObject.GType<Matrix>;
constructor(
properties?: Partial<{
xx?: number;
xy?: number;
yx?: number;
yy?: number;
x0?: number;
y0?: number;
}>
);
constructor(copy: Matrix);
// Fields
xx: number;
xy: number;
yx: number;
yy: number;
x0: number;
y0: number;
// Members
concat(new_matrix: Matrix): void;
copy(): Matrix | null;
free(): void;
get_font_scale_factor(): number;
get_font_scale_factors(): [number | null, number | null];
rotate(degrees: number): void;
scale(scale_x: number, scale_y: number): void;
transform_distance(dx: number, dy: number): [number, number];
transform_pixel_rectangle(rect?: Rectangle | null): Rectangle | null;
transform_point(x: number, y: number): [number, number];
transform_rectangle(rect?: Rectangle | null): Rectangle | null;
translate(tx: number, ty: number): void;
}
export class Rectangle {
static $gtype: GObject.GType<Rectangle>;
constructor(
properties?: Partial<{
x?: number;
y?: number;
width?: number;
height?: number;
}>
);
constructor(copy: Rectangle);
// Fields
x: number;
y: number;
width: number;
height: number;
}
export class RendererPrivate {
static $gtype: GObject.GType<RendererPrivate>;
constructor(copy: RendererPrivate);
}
export class ScriptIter {
static $gtype: GObject.GType<ScriptIter>;
constructor(text: string, length: number);
constructor(copy: ScriptIter);
// Constructors
static ["new"](text: string, length: number): ScriptIter;
// Members
free(): void;
get_range(): [string | null, string | null, Script | null];
next(): boolean;
}
export class TabArray {
static $gtype: GObject.GType<TabArray>;
constructor(initial_size: number, positions_in_pixels: boolean);
constructor(copy: TabArray);
// Constructors
static ["new"](initial_size: number, positions_in_pixels: boolean): TabArray;
// Members
copy(): TabArray;
free(): void;
get_positions_in_pixels(): boolean;
get_size(): number;
get_tab(tab_index: number): [TabAlign | null, number | null];
get_tabs(): [TabAlign | null, number[] | null];
resize(new_size: number): void;
set_tab(tab_index: number, alignment: TabAlign, location: number): void;
}
export type Glyph = number;
export type GlyphUnit = number;
export type LayoutRun = GlyphItem; | the_stack |
import { SplitterOptions, defaultSplitterOptions } from './options';
const SEMICOLON = ';';
export interface SplitStreamContext {
options: SplitterOptions;
currentDelimiter: string;
pushOutput: (item: SplitResultItem) => void;
commandPart: string;
line: number;
column: number;
streamPosition: number;
commandStartPosition: number;
commandStartLine: number;
commandStartColumn: number;
}
export interface SplitLineContext extends SplitStreamContext {
source: string;
position: number;
// output: string[];
end: number;
wasDataOnLine: boolean;
currentCommandStart: number;
// unread: string;
// currentStatement: string;
// semicolonKeyTokenRegex: RegExp;
}
export interface SplitPositionDefinition {
position: number;
line: number;
column: number;
}
export interface SplitResultItemRich {
text: string;
start: SplitPositionDefinition;
end: SplitPositionDefinition;
trimStart?: SplitPositionDefinition;
trimEnd?: SplitPositionDefinition;
}
export type SplitResultItem = string | SplitResultItemRich;
function movePosition(context: SplitLineContext, count: number) {
if (context.options.returnRichInfo) {
let { source, position, line, column, streamPosition } = context;
while (count > 0) {
if (source[position] == '\n') {
line += 1;
column = 0;
} else {
column += 1;
}
position += 1;
streamPosition += 1;
count -= 1;
}
context.position = position;
context.streamPosition = streamPosition;
context.line = line;
context.column = column;
} else {
context.position += count;
}
}
function isStringEnd(s: string, pos: number, endch: string, escapech: string) {
if (!escapech) {
return s[pos] == endch;
}
if (endch == escapech) {
return s[pos] == endch && s[pos + 1] != endch;
} else {
return s[pos] == endch && s[pos - 1] != escapech;
}
}
interface Token {
type: 'string' | 'delimiter' | 'whitespace' | 'eoln' | 'data' | 'set_delimiter' | 'comment' | 'go_delimiter';
length: number;
value?: string;
}
const WHITESPACE_TOKEN: Token = {
type: 'whitespace',
length: 1,
};
const EOLN_TOKEN: Token = {
type: 'eoln',
length: 1,
};
const DATA_TOKEN: Token = {
type: 'data',
length: 1,
};
function scanDollarQuotedString(context: SplitLineContext): Token {
if (!context.options.allowDollarDollarString) return null;
let pos = context.position;
const s = context.source;
const match = /^(\$[a-zA-Z0-9_]*\$)/.exec(s.slice(pos));
if (!match) return null;
const label = match[1];
pos += label.length;
while (pos < context.end) {
if (s.slice(pos).startsWith(label)) {
return {
type: 'string',
length: pos + label.length - context.position,
};
}
pos++;
}
return null;
}
function scanToken(context: SplitLineContext): Token {
let pos = context.position;
const s = context.source;
const ch = s[pos];
if (context.options.stringsBegins.includes(ch)) {
pos++;
const endch = context.options.stringsEnds[ch];
const escapech = context.options.stringEscapes[ch];
while (pos < context.end && !isStringEnd(s, pos, endch, escapech)) {
if (endch == escapech && s[pos] == endch && s[pos + 1] == endch) {
pos += 2;
} else {
pos++;
}
}
return {
type: 'string',
length: pos - context.position + 1,
};
}
if (context.currentDelimiter && s.slice(pos).startsWith(context.currentDelimiter)) {
return {
type: 'delimiter',
length: context.currentDelimiter.length,
};
}
if (ch == ' ' || ch == '\t' || ch == '\r') {
return WHITESPACE_TOKEN;
}
if (ch == '\n') {
return EOLN_TOKEN;
}
if (context.options.doubleDashComments && ch == '-' && s[pos + 1] == '-') {
while (pos < context.end && s[pos] != '\n') pos++;
return {
type: 'comment',
length: pos - context.position,
};
}
if (context.options.multilineComments && ch == '/' && s[pos + 1] == '*') {
pos += 2;
while (pos < context.end) {
if (s[pos] == '*' && s[pos + 1] == '/') break;
pos++;
}
return {
type: 'comment',
length: pos - context.position + 2,
};
}
if (context.options.allowCustomDelimiter && !context.wasDataOnLine) {
const m = s.slice(pos).match(/^DELIMITER[ \t]+([^\n]+)/i);
if (m) {
return {
type: 'set_delimiter',
value: m[1].trim(),
length: m[0].length,
};
}
}
if (context.options.allowGoDelimiter && !context.wasDataOnLine) {
const m = s.slice(pos).match(/^GO[\t\r ]*(\n|$)/i);
if (m) {
return {
type: 'go_delimiter',
length: m[0].length - 1,
};
}
}
const dollarString = scanDollarQuotedString(context);
if (dollarString) return dollarString;
return DATA_TOKEN;
}
function pushQuery(context: SplitLineContext) {
const sql = (context.commandPart || '') + context.source.slice(context.currentCommandStart, context.position);
const trimmed = sql.trim();
if (trimmed) {
if (context.options.returnRichInfo) {
context.pushOutput(
countTrimmedPositions(sql, {
text: trimmed,
start: {
position: context.commandStartPosition,
line: context.commandStartLine,
column: context.commandStartColumn,
},
end: {
position: context.streamPosition,
line: context.line,
column: context.column,
},
})
);
} else {
context.pushOutput(trimmed);
}
}
}
function countTrimmedPositions(full: string, positions: SplitResultItemRich): SplitResultItemRich {
const startIndex = full.indexOf(positions.text);
const trimStart = { ...positions.start };
for (let i = 0; i < startIndex; i += 1) {
if (full[i] == '\n') {
trimStart.position += 1;
trimStart.line += 1;
trimStart.column = 0;
} else {
trimStart.position += 1;
trimStart.column += 1;
}
}
return {
...positions,
trimStart,
trimEnd: positions.end,
};
}
function markStartCommand(context: SplitLineContext) {
if (context.options.returnRichInfo) {
context.commandStartPosition = context.streamPosition;
context.commandStartLine = context.line;
context.commandStartColumn = context.column;
}
}
export function splitQueryLine(context: SplitLineContext) {
while (context.position < context.end) {
const token = scanToken(context);
if (!token) {
// nothing special, move forward
movePosition(context, 1);
continue;
}
switch (token.type) {
case 'string':
movePosition(context, token.length);
context.wasDataOnLine = true;
break;
case 'comment':
movePosition(context, token.length);
context.wasDataOnLine = true;
break;
case 'eoln':
movePosition(context, token.length);
context.wasDataOnLine = false;
break;
case 'data':
movePosition(context, token.length);
context.wasDataOnLine = true;
break;
case 'whitespace':
movePosition(context, token.length);
break;
case 'set_delimiter':
pushQuery(context);
context.commandPart = '';
context.currentDelimiter = token.value;
movePosition(context, token.length);
context.currentCommandStart = context.position;
markStartCommand(context);
break;
case 'go_delimiter':
pushQuery(context);
context.commandPart = '';
movePosition(context, token.length);
context.currentCommandStart = context.position;
markStartCommand(context);
break;
case 'delimiter':
pushQuery(context);
context.commandPart = '';
movePosition(context, token.length);
context.currentCommandStart = context.position;
markStartCommand(context);
break;
}
}
if (context.end > context.currentCommandStart) {
context.commandPart += context.source.slice(context.currentCommandStart, context.position);
}
}
export function getInitialDelimiter(options: SplitterOptions) {
return options?.allowSemicolon === false ? null : SEMICOLON;
}
export function finishSplitStream(context: SplitStreamContext) {
const trimmed = context.commandPart.trim();
if (trimmed) {
if (context.options.returnRichInfo) {
context.pushOutput(
countTrimmedPositions(context.commandPart, {
text: trimmed,
start: {
position: context.commandStartPosition,
line: context.commandStartLine,
column: context.commandStartColumn,
},
end: {
position: context.streamPosition,
line: context.line,
column: context.column,
},
})
);
} else {
context.pushOutput(trimmed);
}
}
}
export function splitQuery(sql: string, options: SplitterOptions = null): SplitResultItem[] {
const usedOptions = {
...defaultSplitterOptions,
...options,
};
if (usedOptions.noSplit) {
if (usedOptions.returnRichInfo) {
const lines = sql.split('\n');
return [
{
text: sql,
start: {
position: 0,
line: 0,
column: 0,
},
end: {
position: sql.length,
line: lines.length,
column: lines[lines.length - 1]?.length || 0,
},
},
];
}
return [sql];
}
const output = [];
const context: SplitLineContext = {
source: sql,
end: sql.length,
currentDelimiter: getInitialDelimiter(options),
position: 0,
column: 0,
line: 0,
currentCommandStart: 0,
commandStartLine: 0,
commandStartColumn: 0,
commandStartPosition: 0,
streamPosition: 0,
pushOutput: cmd => output.push(cmd),
wasDataOnLine: false,
options: usedOptions,
commandPart: '',
};
splitQueryLine(context);
finishSplitStream(context);
return output;
} | the_stack |
import SObject, {isSObject} from '../Core/SObject';
import Component, {isComponent} from '../Core/Component';
import SName from '../Core/SName';
import Game from '../Core/Game';
import World, {isWorld} from '../Core/World';
import Level from '../Core/Level';
import {SClass} from '../Core/Decorator';
import ChildActorComponent, {isChildActorComponent} from '../Core/ChildActorComponent';
import {TConstructor} from '../types/Common';
import BaseException from '../Exception/BaseException';
import throwException from '../Exception/throwException';
import BreakGuardException from '../Exception/BreakGuardException';
import MemberConflictException from '../Exception/MemberConflictException';
import SceneComponent, {isSceneComponent} from '../Renderer/ISceneComponent';
import Debug from '../Debug';
import SMap from '../DataStructure/SMap';
import StateActor from '../Info/StateActor';
import {IPhysicWorld} from '../types/Physic';
import SArray from '../DataStructure/SArray';
import {isSceneActor} from '../Renderer/ISceneActor';
/**
* 判断一个实例是否为`Actor`。
*/
export function isActor(value: SObject): value is Actor {
return (value as Actor).isActor;
}
/**
* 游戏世界的基石,作为Components的封装容器。
* 自身可以包含一定程度的业务逻辑,但不推荐,推荐在专用Actor中编写业务逻辑,比如`GameModeActor`和`LevelScriptActor`。
*
* @template IOptionTypes 初始化参数类型,一般交由由继承的类定义实现多态。
* @template TRootComponent 根级组件类型,一般交由由继承的类定义实现多态。
*
* @noInheritDoc
*/
@SClass({className: 'Actor', classType: 'Actor'})
export default class Actor<
IOptionTypes extends Object = {},
TRootComponent extends Component = Component<any>
> extends SObject {
public isActor: boolean = true;
/**
* @hidden
*/
public OPTIONS_TYPE: IOptionTypes;
/**
* Actor是否需要在每一帧进行进行`update`调用,如果为`false`,则将不会触发`onUpdate`生命周期(包括挂载在其下的所有Component)。
* 用于性能优化。
*/
public updateOnEverTick: boolean = true;
/**
* 在Actor自身销毁时,是否同时需要触发其下挂载的所有Component的销毁,也就是`onDestroy`生命周期的调用。
* 用于性能优化。
*/
public emitComponentsDestroy: boolean = true;
/**
* 用于给Actor归类的标签,可以用于后续的快速索引。
*/
public tag: SName = new SName('UnTagged');
protected _game: Game = null;
protected _root: TRootComponent = null;
protected _components: SMap<Component> = new SMap();
protected _componentsForUpdate: SArray<Component> = new SArray();
protected _componentsNeedUpdate: boolean = false;
protected _parent: Level | World | Game | ChildActorComponent = null;
protected _initOptions: IOptionTypes;
protected _inWorld: boolean = false;
protected _linked: boolean = false;
/**
* 构造Actor,**不可自行构造!!!**请参见`game.addActor`或`world.addActor`方法。
*/
constructor (
name: string,
game: Game,
initOptions?: IOptionTypes
) {
super(name);
this._game = game;
this._initOptions = initOptions;
}
/**
* 获取自身的父级实例,根据情况不同可能有不同的类型,一般不需要自己使用。
*/
get parent() {
return this._parent;
}
/**
* Actor是否被连接到了舞台上。
*/
get linked() {
return this._linked;
}
/**
* Actor自身范围内的事件系统管理器,将会直接代理到其的根组件`root`。
*/
get event(): TRootComponent['event'] {
return this._root.event;
}
/**
* Actor自身的根组件。一般来讲创建后就不会变更。
*/
get root(): TRootComponent {
return this._root;
}
/**
* 用于验证改Actor在当前状态是否可被添加,一般用于防止重复添加不可重复的系统Actor等。
* 你可以重写此方法来达成验证,如果验证不通过请抛出异常。
* 注意,此验证仅会在`development`环境下被执行!
*/
public verifyAdding(initOptions: IOptionTypes): void {}
/**
* 用于验证该Actor在当前状态是否可被移除。
* 你可以重写此方法来达成验证,如果验证不通过请抛出异常。
* 注意,此验证仅会在`development`环境下被执行!
*/
public verifyRemoving(): void {}
/**
* 生命周期,将在Actor被创建时最先调用,用于创建从属于该Actor的根组件。
* 在原生Actor中均有默认值,你可以用此周期来定义你自己的Actor。
*/
public onCreateRoot(initOptions: IOptionTypes): TRootComponent {
return this.addComponent('root', Component) as TRootComponent;
}
/**
* 生命周期,将在Actor创建了根组件后、在正式被添加到游戏中之前被调用。
*/
public onInit(initOptions: IOptionTypes) {
}
/**
* 生命周期,将在Actor被正式加入到游戏中之后被调用。
*/
public onAdd(initOptions: IOptionTypes) {
}
/**
* 生命周期,将在Actor被正式加入到游戏中之后,并且`updateOnEverTick`为`true`时在每一帧被调用。
*/
public onUpdate(delta: number) {
}
/**
* 生命周期,用于错误边界处理。将在游戏中大部分可预知错误发生时被调用(通常是生命周期中的非异步错误)。
* 错误将会根据一定的路径向上传递,一直到`Engine`的层次,你可以在确保完美处理了问题后返回`true`来通知引擎不再向上传递。
* 当然你也可以将自定义的一些错误加入错误边界机制中,详见[Exception](../../guide/exception)。
*/
public onError(error: BaseException, details?: any): boolean | void {
}
/**
* 生命周期,将在调用`actor.unLink`方法后触发。
*/
public onUnLink() {
}
/**
* 生命周期,将在调用`actor.reLink`方法后触发。
*
* @param parent 要恢复连接到的父级。
*/
public onReLink(parent: Actor | World | Game) {
}
/**
* 生命周期,将在Actor被销毁时触发。
*/
public onDestroy() {
}
/**
* **不要自己调用!!**
*
* @hidden
*/
public initialized() {
try {
this._root = this.onCreateRoot(this._initOptions);
this._componentsForUpdate.add(this._root);
} catch (error) {
throwException(error, this);
}
this._root.isRoot = true;
try {
this.onInit(this._initOptions);
} catch (error) {
throwException(error, this);
}
}
/**
* **不要自己调用!!**
*
* @hidden
*/
public added() {
if (this._inWorld) {
return;
}
this._inWorld = true;
this._linked = true;
this._components.forEach(component => {
component.added();
});
try {
this.onAdd(this._initOptions);
} catch (error) {
throwException(error, this);
}
}
/**
* **不要自己调用!!**
*
* @hidden
*/
public update(delta: number) {
this.syncComponentsNeedUpdate();
if (!this.updateOnEverTick || !this._inWorld || !this._parent) {
return;
}
try {
this.onUpdate(delta);
} catch (error) {
throwException(error, this);
}
this._componentsForUpdate.forEach(component => {
component.update(delta);
});
}
/**
* 将一个已经创建的`actor`从游戏世界中移除,但仍然保留其状态。之后可以用`reLink`方法让其重新和游戏世界建立连接。
* 注意如果有子级`actor`,并不会自动`unLink`!
* 这一般用于性能优化,比如对象池的创建。
*/
public unLink(): this {
if (!this._linked) {
return this;
}
try {
this.onUnLink();
} catch (error) {
throwException(error, this);
}
this.syncComponentsNeedUpdate();
this._componentsForUpdate.forEach(component => component.unLink());
const parent = this._parent;
let realParent: Game | Level | World;
if (isChildActorComponent(parent)) {
parent.autoDestroyActor = false;
parent.removeFromParent();
this._parent = parent.getOwner() as any;
realParent = !isSceneActor(parent.getOwner()) ? parent.getGame() : parent.getWorld();
} else {
realParent = parent;
}
/**
* @todo: fix types
*/
(realParent.actors as any).remove(this as any);
(realParent as any)._actorsNeedUpdate = true;
this._linked = false;
return this;
}
/**
* 将一个已经使用`unLink`方法和游戏世界断开连接的`actor`恢复连接,将其重新加入世界中。
* 这一般用于性能优化,比如对象池的创建。
*
* @param parent 指定要恢复连接到的父级,不指定则使用上一次的父级。
*/
public reLink(parent?: Actor | World | Game): this {
if (this._linked) {
return this;
}
parent = parent || (this._parent as any);
let realParent: Game | World;
if (isActor(parent)) {
realParent = !isSceneActor(parent) ? parent.getGame() : parent.getWorld();
} else {
realParent = parent;
}
if (isWorld(realParent) && this.getGame().world !== realParent) {
throw new Error(`ReLink error! Current world is different from pre world !`);
}
/**
* @todo: fix types
*/
(realParent.actors as any).add(this as any);
(realParent as any)._actorsNeedUpdate = true;
if (isActor(parent)) {
parent.addChild(this);
}
try {
this.onReLink(parent);
} catch (error) {
throwException(error, this);
}
this.syncComponentsNeedUpdate();
this._componentsForUpdate.forEach(component => component.reLink());
this._linked = true;
return this;
}
/**
* **不要自己调用!!**
*
* @hidden
*/
public destroy() {
if (this.emitComponentsDestroy) {
this.syncComponentsNeedUpdate();
this._componentsForUpdate.forEach(component => component.destroy());
} else {
this._root.destroy();
}
super.destroy();
this._parent = null;
}
private syncComponentsNeedUpdate() {
if (this._componentsNeedUpdate) {
this._componentsForUpdate.clear();
this._components.forEach(item => {
if (item.isRoot || item.needUpdateAndDestroy) {
this._componentsForUpdate.add(item);
}
});
this._componentsNeedUpdate = false;
}
}
/**
* 获取当前`Game`实例。
*
* @template IGameState 当前游戏状态管理器的类型。
*/
public getGame<IGameState extends StateActor = StateActor>(): Game<IGameState> {
return this._game as Game<IGameState>;
}
/**
* 获取当前`World`实例。
*
* @template IWorldState 当前世界状态管理器的类型。
*/
public getWorld<IWorldState extends StateActor = StateActor>(): World<IWorldState> {
return this._game.world as World<IWorldState>;
}
/**
* 获取当前`Level`实例。
*
* @template ILevelState 当前关卡状态管理器的类型。
*/
public getLevel<ILevelState extends StateActor = StateActor>(): Level<ILevelState> {
return this._game.level as Level<ILevelState>;
}
/**
* 仅在初始化了物理引擎之后,用于获取当前物理世界`PhysicWorld`实例。
* 如何使用物理引擎请见**Guide**和**Demo**。
*/
public getPhysicWorld(): IPhysicWorld {
return this._game.world.physicWorld;
}
/**
* 将自己从父级移除,基本等同于`destroy`方法,从游戏中销毁自身。
*/
public removeFromParent() {
if (!this._parent) {
throwException(
new Error(`Actor ${this.name} has no parent, is it an invalid reference to an actor is already removed from world ?`),
this
);
}
(this._parent.removeActor as any)(this);
}
/**
* 根据指定的`ComponentClass`和其初始化参数`initState`来添加一个Component。**注意这里要求每个Component的名字`name`是唯一的**。
* 如果是在`world`中添加一个`SceneComponent`,你可以指定一个`parent`作为要添加的Component的父级,让它们在渲染层连接起来。
*/
public addComponent<IComponent extends Component<any>>(
name: string,
ComponentClass: TConstructor<IComponent>,
initState?: IComponent['STATE_TYPE'],
parent?: SceneComponent
): IComponent {
if (this._components.has(name)) {
throw new MemberConflictException(this, 'Component', name, this);
}
if (parent && parent.getOwner() !== this) {
throw new BreakGuardException(
this, `Owner of parent component ${parent.name} must be same to owner of child ${this.name}`
);
}
const component = new ComponentClass(name, this, initState);
if (parent && (!isSceneComponent(parent) || !isSceneComponent(component))) {
throw new BreakGuardException(
this,
`Only SceneComponent could have child SceneComponent !
Current parent: ${parent.name}(${parent.className}), child: ${component.name}(${component.className})`
);
}
if (Debug.devMode) {
try {
component.verifyAdding(initState);
} catch (error) {
throwException(error, component);
return;
}
}
this._components.set(name, component);
component.initialized();
if (isSceneComponent(component)) {
if (parent) {
parent.addChild(component);
} else if (this._root && isSceneComponent(this._root)) {
this._root.addChild(component);
} else {
(component as any)._parent = this;
}
} else {
(component as any)._parent = this;
}
if (this._inWorld) {
component.added();
}
if (component.needUpdateAndDestroy) {
this._componentsNeedUpdate = true;
}
return component;
}
/**
* 通过名字移除一个Component。
*/
public removeComponent(name: string): void;
/**
* 移除一个指定的Component。
*/
public removeComponent(component: Component): void;
public removeComponent(value: string | Component) {
let component: Component;
if (isSObject(value) && isComponent(value)) {
component = value;
} else {
component = this._components.get(value);
}
if (!component) {
return this;
}
if (!component.canBeRemoved || component.isRoot) {
throw new BreakGuardException(
this,
`In actor ${this.name}, component ${component.name} can not be removed.
It's one of '!canBeRemoved', 'root'`);
}
if (Debug.devMode) {
try {
component.verifyRemoving();
} catch (error) {
throwException(error, component);
return;
}
}
if (component.parent && isSceneComponent(component.parent)) {
component.parent.removeChild(component as SceneComponent);
this.clearSceneComponent(component as SceneComponent);
}
component.destroy();
this._components.remove(component.name.value);
if (component.needUpdateAndDestroy) {
this._componentsNeedUpdate = true;
}
return this;
}
/**
* 将一个Actor作为自身的子级,注意子级Actor将仍然存在于`game`或者`world`中,并拥有自身独立的生命周期,这里只是建立了一个连接关系。
* 如果父子为`SceneActor`,那么这层链接关系还会反映到渲染层,同时可以指定一个`SceneComponent`作为其父级节点。
*/
public addChild(actor: Actor, parentComponent?: Component) {
if (actor.parent && isChildActorComponent(actor.parent)) {
actor.parent.getOwner().removeComponent(actor.parent);
}
this.addComponent(
actor.name.value,
ChildActorComponent,
{actor, parent: parentComponent}
);
}
/**
* 解除自身和一个子级Actor的链接。注意此方法也会直接将子级Actor从游戏中销毁!
* 如果只是想要改变一个SceneActor的归属,请使用`SceneActor`下的`changeParent`方法。
*/
public removeChild(actor: Actor) {
if (!actor.parent || !isChildActorComponent(actor.parent)) {
return;
}
this.removeComponent(actor.parent);
}
// todo: 级联将导致所有子组件递归销毁
// 性能会不会有问题?
// 这个情况还要考虑更完美的处理方式?
protected clearSceneComponent(component: SceneComponent) {
component.children.forEach(child => {
component.destroy();
this._components.remove(child.name.value);
this.clearSceneComponent(child);
});
}
/**
* 根据名字查找一个Component。
*/
public findComponentByName<TComponent extends Component = Component>(
name: string
): TComponent {
return this._components.get<TComponent>(name);
}
/**
* 根据某个类查找一个Component。
*/
public findComponentByClass<TComponent extends Component = Component>(
ComponentClass: TConstructor<TComponent>
): TComponent {
return this._components.findByClass<TComponent>(ComponentClass);
}
/**
* 查找某个类的所有实例Component。
*/
public findComponentsByClass<TComponent extends Component = Component>(
ComponentClass: TConstructor<TComponent>
): TComponent[] {
return this._components.findAllByClass<TComponent>(ComponentClass);
}
/**
* 通过一个Filter来查找组件。
*/
public findComponentByFilter<TComponent extends Component = Component>(
filter: (item: Component<any>) => boolean
): TComponent {
return this._components.findByFilter<TComponent>(filter);
}
/**
* 通过一个Filter来查找所有。
*/
public findComponentsByFilter<TComponent extends Component = Component>(
filter: (item: Component<any>) => boolean
): TComponent[] {
return this._components.findAllByFilter<TComponent>(filter);
}
} | the_stack |
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* Sample is a stack trace with optional labels.
*
* @generated from protobuf message parca.metastore.v1alpha1.Sample
*/
export interface Sample {
/**
* location_ids are locations that define the stack trace.
*
* @generated from protobuf field: repeated bytes location_ids = 1;
*/
locationIds: Uint8Array[];
/**
* labels are extra labels for a stack trace.
*
* @generated from protobuf field: map<string, parca.metastore.v1alpha1.SampleLabel> labels = 2;
*/
labels: {
[key: string]: SampleLabel;
};
/**
* num_labels are the num of labels.
*
* @generated from protobuf field: map<string, parca.metastore.v1alpha1.SampleNumLabel> num_labels = 3;
*/
numLabels: {
[key: string]: SampleNumLabel;
};
/**
* num_units are the units for the labels.
*
* @generated from protobuf field: map<string, parca.metastore.v1alpha1.SampleNumUnit> num_units = 4;
*/
numUnits: {
[key: string]: SampleNumUnit;
};
}
/**
* SampleLabel are the labels added to a Sample.
*
* @generated from protobuf message parca.metastore.v1alpha1.SampleLabel
*/
export interface SampleLabel {
/**
* labels for a label in a Sample.
*
* @generated from protobuf field: repeated string labels = 1;
*/
labels: string[];
}
/**
* SampleNumLabel are the num of labels of a Sample.
*
* @generated from protobuf message parca.metastore.v1alpha1.SampleNumLabel
*/
export interface SampleNumLabel {
/**
* num_labels are the num_label of a Sample.
*
* @generated from protobuf field: repeated int64 num_labels = 1;
*/
numLabels: string[];
}
/**
* SampleNumUnit are the num units of a Sample.
*
* @generated from protobuf message parca.metastore.v1alpha1.SampleNumUnit
*/
export interface SampleNumUnit {
/**
* units of a labels of a Sample.
*
* @generated from protobuf field: repeated string units = 1;
*/
units: string[];
}
/**
* Location describes a single location of a stack traces.
*
* @generated from protobuf message parca.metastore.v1alpha1.Location
*/
export interface Location {
/**
* id is the unique identifier for the location.
*
* @generated from protobuf field: bytes id = 1;
*/
id: Uint8Array;
/**
* address is the memory address of the location if present.
*
* @generated from protobuf field: uint64 address = 2;
*/
address: string;
/**
* mapping_id is the unique identifier for the mapping associated with the location.
*
* @generated from protobuf field: bytes mapping_id = 4;
*/
mappingId: Uint8Array;
/**
* is_folded indicates whether the location is folded into the previous location.
*
* @generated from protobuf field: bool is_folded = 5;
*/
isFolded: boolean;
}
/**
* LocationLines describes a set of lines of a location.
*
* @generated from protobuf message parca.metastore.v1alpha1.LocationLines
*/
export interface LocationLines {
/**
* id is the unique identifier for the location.
*
* @generated from protobuf field: bytes id = 1;
*/
id: Uint8Array;
/**
* Lines is the list of lines associated with the location.
*
* @generated from protobuf field: repeated parca.metastore.v1alpha1.Line lines = 2;
*/
lines: Line[];
}
/**
* Line describes a source code function and its line number.
*
* @generated from protobuf message parca.metastore.v1alpha1.Line
*/
export interface Line {
/**
* function_id is the ID of the function.
*
* @generated from protobuf field: bytes function_id = 1;
*/
functionId: Uint8Array;
/**
* line is the line number in the source file of the referenced function.
*
* @generated from protobuf field: int64 line = 2;
*/
line: string;
}
/**
* Function describes metadata of a source code function.
*
* @generated from protobuf message parca.metastore.v1alpha1.Function
*/
export interface Function {
/**
* id is the unique identifier for the function.
*
* @generated from protobuf field: bytes id = 1;
*/
id: Uint8Array;
/**
* start_line is the line number in the source file of the first line of the function.
*
* @generated from protobuf field: int64 start_line = 2;
*/
startLine: string;
/**
* name is the name of the function.
*
* @generated from protobuf field: string name = 3;
*/
name: string;
/**
* system_name describes the name of the function, as identified by the
* system. For instance, it can be a C++ mangled name.
*
* @generated from protobuf field: string system_name = 4;
*/
systemName: string;
/**
* filename is the name of the source file of the function.
*
* @generated from protobuf field: string filename = 5;
*/
filename: string;
}
/**
* Mapping describes a memory mapping.
*
* @generated from protobuf message parca.metastore.v1alpha1.Mapping
*/
export interface Mapping {
/**
* id is the unique identifier for the mapping.
*
* @generated from protobuf field: bytes id = 1;
*/
id: Uint8Array;
/**
* start is the start address of the mapping.
*
* @generated from protobuf field: uint64 start = 2;
*/
start: string;
/**
* limit is the length of the address space of the mapping.
*
* @generated from protobuf field: uint64 limit = 3;
*/
limit: string;
/**
* offset is the offset of the mapping.
*
* @generated from protobuf field: uint64 offset = 4;
*/
offset: string;
/**
* file is the name of the file associated with the mapping.
*
* @generated from protobuf field: string file = 5;
*/
file: string;
/**
* build_id is the build ID of the mapping.
*
* @generated from protobuf field: string build_id = 6;
*/
buildId: string;
/**
* has_functions indicates whether the mapping has associated functions.
*
* @generated from protobuf field: bool has_functions = 7;
*/
hasFunctions: boolean;
/**
* has_filenames indicates whether the mapping has associated filenames.
*
* @generated from protobuf field: bool has_filenames = 8;
*/
hasFilenames: boolean;
/**
* has_line_numbers indicates whether the mapping has associated line numbers.
*
* @generated from protobuf field: bool has_line_numbers = 9;
*/
hasLineNumbers: boolean;
/**
* has_inline_frames indicates whether the mapping has associated inline frames.
*
* @generated from protobuf field: bool has_inline_frames = 10;
*/
hasInlineFrames: boolean;
}
// @generated message type with reflection information, may provide speed optimized methods
class Sample$Type extends MessageType<Sample> {
constructor() {
super("parca.metastore.v1alpha1.Sample", [
{ no: 1, name: "location_ids", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 12 /*ScalarType.BYTES*/ },
{ no: 2, name: "labels", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => SampleLabel } },
{ no: 3, name: "num_labels", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => SampleNumLabel } },
{ no: 4, name: "num_units", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => SampleNumUnit } }
]);
}
create(value?: PartialMessage<Sample>): Sample {
const message = { locationIds: [], labels: {}, numLabels: {}, numUnits: {} };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Sample>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Sample): Sample {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* repeated bytes location_ids */ 1:
message.locationIds.push(reader.bytes());
break;
case /* map<string, parca.metastore.v1alpha1.SampleLabel> labels */ 2:
this.binaryReadMap2(message.labels, reader, options);
break;
case /* map<string, parca.metastore.v1alpha1.SampleNumLabel> num_labels */ 3:
this.binaryReadMap3(message.numLabels, reader, options);
break;
case /* map<string, parca.metastore.v1alpha1.SampleNumUnit> num_units */ 4:
this.binaryReadMap4(message.numUnits, reader, options);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
private binaryReadMap2(map: Sample["labels"], reader: IBinaryReader, options: BinaryReadOptions): void {
let len = reader.uint32(), end = reader.pos + len, key: keyof Sample["labels"] | undefined, val: Sample["labels"][any] | undefined;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case 1:
key = reader.string();
break;
case 2:
val = SampleLabel.internalBinaryRead(reader, reader.uint32(), options);
break;
default: throw new globalThis.Error("unknown map entry field for field parca.metastore.v1alpha1.Sample.labels");
}
}
map[key ?? ""] = val ?? SampleLabel.create();
}
private binaryReadMap3(map: Sample["numLabels"], reader: IBinaryReader, options: BinaryReadOptions): void {
let len = reader.uint32(), end = reader.pos + len, key: keyof Sample["numLabels"] | undefined, val: Sample["numLabels"][any] | undefined;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case 1:
key = reader.string();
break;
case 2:
val = SampleNumLabel.internalBinaryRead(reader, reader.uint32(), options);
break;
default: throw new globalThis.Error("unknown map entry field for field parca.metastore.v1alpha1.Sample.num_labels");
}
}
map[key ?? ""] = val ?? SampleNumLabel.create();
}
private binaryReadMap4(map: Sample["numUnits"], reader: IBinaryReader, options: BinaryReadOptions): void {
let len = reader.uint32(), end = reader.pos + len, key: keyof Sample["numUnits"] | undefined, val: Sample["numUnits"][any] | undefined;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case 1:
key = reader.string();
break;
case 2:
val = SampleNumUnit.internalBinaryRead(reader, reader.uint32(), options);
break;
default: throw new globalThis.Error("unknown map entry field for field parca.metastore.v1alpha1.Sample.num_units");
}
}
map[key ?? ""] = val ?? SampleNumUnit.create();
}
internalBinaryWrite(message: Sample, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* repeated bytes location_ids = 1; */
for (let i = 0; i < message.locationIds.length; i++)
writer.tag(1, WireType.LengthDelimited).bytes(message.locationIds[i]);
/* map<string, parca.metastore.v1alpha1.SampleLabel> labels = 2; */
for (let k of Object.keys(message.labels)) {
writer.tag(2, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k);
writer.tag(2, WireType.LengthDelimited).fork();
SampleLabel.internalBinaryWrite(message.labels[k], writer, options);
writer.join().join();
}
/* map<string, parca.metastore.v1alpha1.SampleNumLabel> num_labels = 3; */
for (let k of Object.keys(message.numLabels)) {
writer.tag(3, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k);
writer.tag(2, WireType.LengthDelimited).fork();
SampleNumLabel.internalBinaryWrite(message.numLabels[k], writer, options);
writer.join().join();
}
/* map<string, parca.metastore.v1alpha1.SampleNumUnit> num_units = 4; */
for (let k of Object.keys(message.numUnits)) {
writer.tag(4, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k);
writer.tag(2, WireType.LengthDelimited).fork();
SampleNumUnit.internalBinaryWrite(message.numUnits[k], writer, options);
writer.join().join();
}
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message parca.metastore.v1alpha1.Sample
*/
export const Sample = new Sample$Type();
// @generated message type with reflection information, may provide speed optimized methods
class SampleLabel$Type extends MessageType<SampleLabel> {
constructor() {
super("parca.metastore.v1alpha1.SampleLabel", [
{ no: 1, name: "labels", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<SampleLabel>): SampleLabel {
const message = { labels: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<SampleLabel>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SampleLabel): SampleLabel {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* repeated string labels */ 1:
message.labels.push(reader.string());
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: SampleLabel, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* repeated string labels = 1; */
for (let i = 0; i < message.labels.length; i++)
writer.tag(1, WireType.LengthDelimited).string(message.labels[i]);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message parca.metastore.v1alpha1.SampleLabel
*/
export const SampleLabel = new SampleLabel$Type();
// @generated message type with reflection information, may provide speed optimized methods
class SampleNumLabel$Type extends MessageType<SampleNumLabel> {
constructor() {
super("parca.metastore.v1alpha1.SampleNumLabel", [
{ no: 1, name: "num_labels", kind: "scalar", repeat: 1 /*RepeatType.PACKED*/, T: 3 /*ScalarType.INT64*/ }
]);
}
create(value?: PartialMessage<SampleNumLabel>): SampleNumLabel {
const message = { numLabels: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<SampleNumLabel>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SampleNumLabel): SampleNumLabel {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* repeated int64 num_labels */ 1:
if (wireType === WireType.LengthDelimited)
for (let e = reader.int32() + reader.pos; reader.pos < e;)
message.numLabels.push(reader.int64().toString());
else
message.numLabels.push(reader.int64().toString());
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: SampleNumLabel, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* repeated int64 num_labels = 1; */
if (message.numLabels.length) {
writer.tag(1, WireType.LengthDelimited).fork();
for (let i = 0; i < message.numLabels.length; i++)
writer.int64(message.numLabels[i]);
writer.join();
}
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message parca.metastore.v1alpha1.SampleNumLabel
*/
export const SampleNumLabel = new SampleNumLabel$Type();
// @generated message type with reflection information, may provide speed optimized methods
class SampleNumUnit$Type extends MessageType<SampleNumUnit> {
constructor() {
super("parca.metastore.v1alpha1.SampleNumUnit", [
{ no: 1, name: "units", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<SampleNumUnit>): SampleNumUnit {
const message = { units: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<SampleNumUnit>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SampleNumUnit): SampleNumUnit {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* repeated string units */ 1:
message.units.push(reader.string());
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: SampleNumUnit, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* repeated string units = 1; */
for (let i = 0; i < message.units.length; i++)
writer.tag(1, WireType.LengthDelimited).string(message.units[i]);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message parca.metastore.v1alpha1.SampleNumUnit
*/
export const SampleNumUnit = new SampleNumUnit$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Location$Type extends MessageType<Location> {
constructor() {
super("parca.metastore.v1alpha1.Location", [
{ no: 1, name: "id", kind: "scalar", T: 12 /*ScalarType.BYTES*/ },
{ no: 2, name: "address", kind: "scalar", T: 4 /*ScalarType.UINT64*/ },
{ no: 4, name: "mapping_id", kind: "scalar", T: 12 /*ScalarType.BYTES*/ },
{ no: 5, name: "is_folded", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
]);
}
create(value?: PartialMessage<Location>): Location {
const message = { id: new Uint8Array(0), address: "0", mappingId: new Uint8Array(0), isFolded: false };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Location>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Location): Location {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes id */ 1:
message.id = reader.bytes();
break;
case /* uint64 address */ 2:
message.address = reader.uint64().toString();
break;
case /* bytes mapping_id */ 4:
message.mappingId = reader.bytes();
break;
case /* bool is_folded */ 5:
message.isFolded = reader.bool();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Location, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bytes id = 1; */
if (message.id.length)
writer.tag(1, WireType.LengthDelimited).bytes(message.id);
/* uint64 address = 2; */
if (message.address !== "0")
writer.tag(2, WireType.Varint).uint64(message.address);
/* bytes mapping_id = 4; */
if (message.mappingId.length)
writer.tag(4, WireType.LengthDelimited).bytes(message.mappingId);
/* bool is_folded = 5; */
if (message.isFolded !== false)
writer.tag(5, WireType.Varint).bool(message.isFolded);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message parca.metastore.v1alpha1.Location
*/
export const Location = new Location$Type();
// @generated message type with reflection information, may provide speed optimized methods
class LocationLines$Type extends MessageType<LocationLines> {
constructor() {
super("parca.metastore.v1alpha1.LocationLines", [
{ no: 1, name: "id", kind: "scalar", T: 12 /*ScalarType.BYTES*/ },
{ no: 2, name: "lines", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Line }
]);
}
create(value?: PartialMessage<LocationLines>): LocationLines {
const message = { id: new Uint8Array(0), lines: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<LocationLines>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LocationLines): LocationLines {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes id */ 1:
message.id = reader.bytes();
break;
case /* repeated parca.metastore.v1alpha1.Line lines */ 2:
message.lines.push(Line.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: LocationLines, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bytes id = 1; */
if (message.id.length)
writer.tag(1, WireType.LengthDelimited).bytes(message.id);
/* repeated parca.metastore.v1alpha1.Line lines = 2; */
for (let i = 0; i < message.lines.length; i++)
Line.internalBinaryWrite(message.lines[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message parca.metastore.v1alpha1.LocationLines
*/
export const LocationLines = new LocationLines$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Line$Type extends MessageType<Line> {
constructor() {
super("parca.metastore.v1alpha1.Line", [
{ no: 1, name: "function_id", kind: "scalar", T: 12 /*ScalarType.BYTES*/ },
{ no: 2, name: "line", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value?: PartialMessage<Line>): Line {
const message = { functionId: new Uint8Array(0), line: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Line>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Line): Line {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes function_id */ 1:
message.functionId = reader.bytes();
break;
case /* int64 line */ 2:
message.line = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Line, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bytes function_id = 1; */
if (message.functionId.length)
writer.tag(1, WireType.LengthDelimited).bytes(message.functionId);
/* int64 line = 2; */
if (message.line !== "0")
writer.tag(2, WireType.Varint).int64(message.line);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message parca.metastore.v1alpha1.Line
*/
export const Line = new Line$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Function$Type extends MessageType<Function> {
constructor() {
super("parca.metastore.v1alpha1.Function", [
{ no: 1, name: "id", kind: "scalar", T: 12 /*ScalarType.BYTES*/ },
{ no: 2, name: "start_line", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "system_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 5, name: "filename", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<Function>): Function {
const message = { id: new Uint8Array(0), startLine: "0", name: "", systemName: "", filename: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Function>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Function): Function {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes id */ 1:
message.id = reader.bytes();
break;
case /* int64 start_line */ 2:
message.startLine = reader.int64().toString();
break;
case /* string name */ 3:
message.name = reader.string();
break;
case /* string system_name */ 4:
message.systemName = reader.string();
break;
case /* string filename */ 5:
message.filename = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Function, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bytes id = 1; */
if (message.id.length)
writer.tag(1, WireType.LengthDelimited).bytes(message.id);
/* int64 start_line = 2; */
if (message.startLine !== "0")
writer.tag(2, WireType.Varint).int64(message.startLine);
/* string name = 3; */
if (message.name !== "")
writer.tag(3, WireType.LengthDelimited).string(message.name);
/* string system_name = 4; */
if (message.systemName !== "")
writer.tag(4, WireType.LengthDelimited).string(message.systemName);
/* string filename = 5; */
if (message.filename !== "")
writer.tag(5, WireType.LengthDelimited).string(message.filename);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message parca.metastore.v1alpha1.Function
*/
export const Function = new Function$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Mapping$Type extends MessageType<Mapping> {
constructor() {
super("parca.metastore.v1alpha1.Mapping", [
{ no: 1, name: "id", kind: "scalar", T: 12 /*ScalarType.BYTES*/ },
{ no: 2, name: "start", kind: "scalar", T: 4 /*ScalarType.UINT64*/ },
{ no: 3, name: "limit", kind: "scalar", T: 4 /*ScalarType.UINT64*/ },
{ no: 4, name: "offset", kind: "scalar", T: 4 /*ScalarType.UINT64*/ },
{ no: 5, name: "file", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 6, name: "build_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 7, name: "has_functions", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 8, name: "has_filenames", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 9, name: "has_line_numbers", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 10, name: "has_inline_frames", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
]);
}
create(value?: PartialMessage<Mapping>): Mapping {
const message = { id: new Uint8Array(0), start: "0", limit: "0", offset: "0", file: "", buildId: "", hasFunctions: false, hasFilenames: false, hasLineNumbers: false, hasInlineFrames: false };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Mapping>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Mapping): Mapping {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes id */ 1:
message.id = reader.bytes();
break;
case /* uint64 start */ 2:
message.start = reader.uint64().toString();
break;
case /* uint64 limit */ 3:
message.limit = reader.uint64().toString();
break;
case /* uint64 offset */ 4:
message.offset = reader.uint64().toString();
break;
case /* string file */ 5:
message.file = reader.string();
break;
case /* string build_id */ 6:
message.buildId = reader.string();
break;
case /* bool has_functions */ 7:
message.hasFunctions = reader.bool();
break;
case /* bool has_filenames */ 8:
message.hasFilenames = reader.bool();
break;
case /* bool has_line_numbers */ 9:
message.hasLineNumbers = reader.bool();
break;
case /* bool has_inline_frames */ 10:
message.hasInlineFrames = reader.bool();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Mapping, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bytes id = 1; */
if (message.id.length)
writer.tag(1, WireType.LengthDelimited).bytes(message.id);
/* uint64 start = 2; */
if (message.start !== "0")
writer.tag(2, WireType.Varint).uint64(message.start);
/* uint64 limit = 3; */
if (message.limit !== "0")
writer.tag(3, WireType.Varint).uint64(message.limit);
/* uint64 offset = 4; */
if (message.offset !== "0")
writer.tag(4, WireType.Varint).uint64(message.offset);
/* string file = 5; */
if (message.file !== "")
writer.tag(5, WireType.LengthDelimited).string(message.file);
/* string build_id = 6; */
if (message.buildId !== "")
writer.tag(6, WireType.LengthDelimited).string(message.buildId);
/* bool has_functions = 7; */
if (message.hasFunctions !== false)
writer.tag(7, WireType.Varint).bool(message.hasFunctions);
/* bool has_filenames = 8; */
if (message.hasFilenames !== false)
writer.tag(8, WireType.Varint).bool(message.hasFilenames);
/* bool has_line_numbers = 9; */
if (message.hasLineNumbers !== false)
writer.tag(9, WireType.Varint).bool(message.hasLineNumbers);
/* bool has_inline_frames = 10; */
if (message.hasInlineFrames !== false)
writer.tag(10, WireType.Varint).bool(message.hasInlineFrames);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message parca.metastore.v1alpha1.Mapping
*/
export const Mapping = new Mapping$Type(); | the_stack |
import { ts } from 'ts-morph';
import {
CompletionItemKind,
TextDocument,
MarkupKind,
InsertTextFormat,
CompletionParams,
CompletionTriggerKind,
} from 'vscode-languageserver';
import {
AureliaLSP,
interpolationRegex,
TemplateAttributeTriggers,
} from '../../common/constants';
import { OffsetUtils } from '../../common/documens/OffsetUtils';
import { StringUtils } from '../../common/string/StringUtils';
import {
AbstractRegion,
RepeatForRegion,
} from '../../core/regions/ViewRegions';
import { AureliaProgram } from '../../core/viewModel/AureliaProgram';
import { AureliaCompletionItem } from './virtualCompletion';
interface EntryDetailsMapData {
displayParts: string | undefined;
documentation: string | undefined;
kind: CompletionItemKind;
methodArguments: string[];
}
interface EntryDetailsMap {
[key: string]: EntryDetailsMapData;
}
interface CustomizeEnhanceDocumentation {
/** Array of the arguments of the method (without types) */
customEnhanceMethodArguments: (methodArguments: string[]) => string;
omitMethodNameAndBrackets?: boolean;
}
const DEFAULT_CUSTOMIZE_ENHANCE_DOCUMENTATION: CustomizeEnhanceDocumentation = {
customEnhanceMethodArguments: enhanceMethodArguments,
omitMethodNameAndBrackets: false,
};
const VIRTUAL_METHOD_NAME = '__vir';
const PARAMETER_NAME = 'parameterName';
export function aureliaVirtualComplete_vNext(
aureliaProgram: AureliaProgram,
document: TextDocument,
region: AbstractRegion | undefined,
triggerCharacter?: string,
offset?: number,
insertTriggerCharacter?: boolean,
completionParams?: CompletionParams
) {
if (!region) return [];
if (!region.accessScopes) return [];
// Dont allow ` ` (Space) to trigger completions for view model,
// otherwise it will trigger 800 JS completions too often which takes +1.5secs
const shouldReturnOnSpace = getShouldReturnOnSpace(
completionParams,
triggerCharacter
);
if (shouldReturnOnSpace) return [];
const COMPLETIONS_ID = '//AUVSCCOMPL95';
// 1. Component
const project = aureliaProgram.tsMorphProject.get();
const targetComponent =
aureliaProgram.aureliaComponents.getOneByFromDocument(document);
if (!targetComponent) return [];
const sourceFile = project.getSourceFile(targetComponent.viewModelFilePath);
if (sourceFile == null) return [];
const sourceFilePath = sourceFile.getFilePath();
const myClass = sourceFile.getClass(targetComponent?.className);
// 2.1 Transform view content to virtual view model
// 2.1.1 Add `this.`
// region; /* ? */
let virtualContent = getVirtualContentFromRegion(
region,
offset,
triggerCharacter,
insertTriggerCharacter
);
// virtualContent; /*?*/
// 2.2 Perform completions
// 2.2.1 Differentiate Interpolation
let interpolationModifier = 0;
let targetStatementText = `${virtualContent}${COMPLETIONS_ID}`;
if (virtualContent.match(interpolationRegex)?.length != null) {
targetStatementText = `\`${virtualContent}\`${COMPLETIONS_ID}`;
interpolationModifier = 2; // - 2 we added "\`" because regionValue is ${}, thus in virtualContent we need to do `${}`
}
let targetStatement;
try {
const virMethod = myClass?.addMethod({
name: VIRTUAL_METHOD_NAME,
statements: [targetStatementText],
});
targetStatement = virMethod?.getStatements()[0];
} catch (error) {
// Dont pass on ts-morph error
return [];
}
if (!targetStatement) return [];
const finalTargetStatementText = `${targetStatement.getFullText()}${COMPLETIONS_ID}`;
const targetPos = finalTargetStatementText?.indexOf(COMPLETIONS_ID);
const finalPos = targetStatement.getPos() + targetPos - interpolationModifier;
// sourceFile.getText(); /* ? */
// sourceFile.getText().length /* ? */
// sourceFile.getText().substr(finalPos - 1, 30); /* ? */
// sourceFile.getText().substr(finalPos - 9, 30); /* ? */
const languageService = project.getLanguageService().compilerObject;
// Completions
const virtualCompletions = languageService
.getCompletionsAtPosition(
sourceFilePath.replace('file:///', 'file:/'),
finalPos,
{}
)
?.entries.filter((result) => {
return !result?.name.includes(VIRTUAL_METHOD_NAME);
});
if (!virtualCompletions) return [];
// virtualCompletions /* ? */
const virtualCompletionEntryDetails = virtualCompletions
.map((completion) => {
return languageService.getCompletionEntryDetails(
sourceFilePath.replace('file:///', 'file:/'),
finalPos,
completion.name,
undefined,
undefined,
undefined,
undefined
);
})
.filter((result) => {
if (result === undefined) return false;
return !result.name.includes(VIRTUAL_METHOD_NAME);
});
const entryDetailsMap: EntryDetailsMap = {};
const result = enhanceCompletionItemDocumentation(
virtualCompletionEntryDetails,
entryDetailsMap,
virtualCompletions
);
try {
targetStatement?.remove();
} catch (error) {
// Dont pass on ts-morph error
return [];
}
return result;
}
function getVirtualContentFromRegion(
region: AbstractRegion,
offset: number | undefined,
triggerCharacter?: string,
insertTriggerCharacter?: boolean
) {
if (offset == null) return '';
// triggerCharacter; /* ? */
// offset; /* ? */
let viewInput: string | undefined = '';
const isInterpolationRegion = AbstractRegion.isInterpolationRegion(region);
if (isInterpolationRegion) {
viewInput = region.regionValue;
} else if (RepeatForRegion.is(region)) {
const { iterableStartOffset, iterableEndOffset } = region.data;
const isIterableRegion = OffsetUtils.isIncluded(
iterableStartOffset,
iterableEndOffset,
offset
);
if (isIterableRegion) {
viewInput = region.data.iterableName;
}
} else {
viewInput = region.attributeValue;
}
const normalizedOffset = offset - region.sourceCodeLocation.startOffset;
// Add triggerCharacter at offset
if (insertTriggerCharacter) {
const insertLocation = normalizedOffset - 1; // - 1: insert one before
viewInput = StringUtils.insert(viewInput, insertLocation, triggerCharacter);
}
// Cut off content after offset
const cutOff = viewInput?.substring(0, normalizedOffset);
// Readd `}`
let ending = AbstractRegion.isInterpolationRegion(region) ? '}' : '';
const removeWhitespaceAtEnd = `${cutOff}${ending}`;
// viewInput; /* ? */
let virtualContent: string | undefined = removeWhitespaceAtEnd;
region.accessScopes?.forEach((scope) => {
const accessScopeName = scope.name;
if (accessScopeName === '') return;
const replaceRegexp = new RegExp(`\\b${accessScopeName}\\b`, 'g');
const alreadyHasThis = checkAlreadyHasThis(virtualContent, accessScopeName);
if (alreadyHasThis) return;
virtualContent = virtualContent?.replace(replaceRegexp, (match) => {
return `this.${match}`;
});
});
// 2.1.2 Defalut to any class member
const isEmptyInterpolation = getIsEmptyInterpolation(virtualContent);
const shouldDefault =
virtualContent === undefined ||
virtualContent.trim() === '' ||
isEmptyInterpolation;
if (shouldDefault) {
virtualContent = 'this.';
}
virtualContent; /* ? */
// 2.1.3 Return if no `this.` included, because we don't want (do we?) support any Javascript completion
if (!virtualContent.includes('this.')) return '';
return virtualContent;
}
function enhanceCompletionItemDocumentation(
virtualCompletionEntryDetails: (ts.CompletionEntryDetails | undefined)[],
entryDetailsMap: EntryDetailsMap,
virtualCompletions: ts.CompletionEntry[],
customizeEnhanceDocumentation: CustomizeEnhanceDocumentation = DEFAULT_CUSTOMIZE_ENHANCE_DOCUMENTATION
) {
const kindMap = {
[ts.ScriptElementKind['memberVariableElement'] as ts.ScriptElementKind]:
CompletionItemKind.Field,
[ts.ScriptElementKind['memberFunctionElement'] as ts.ScriptElementKind]:
CompletionItemKind.Method,
};
virtualCompletionEntryDetails.reduce((acc, entryDetail) => {
if (!entryDetail) return acc;
acc[entryDetail.name] = {
displayParts: entryDetail.displayParts?.map((part) => part.text).join(''),
documentation: entryDetail.documentation?.map((doc) => doc.text).join(''),
kind: kindMap[entryDetail.kind],
methodArguments: entryDetail.displayParts
.filter((part) => part?.kind === PARAMETER_NAME)
.map((part) => part?.text),
};
return acc;
}, entryDetailsMap);
/** ${1: argName1}, ${2: argName2} */
function createArgCompletion(entryDetail: EntryDetailsMapData) {
const result = customizeEnhanceDocumentation.customEnhanceMethodArguments(
entryDetail.methodArguments
);
return result;
}
const result = virtualCompletions.map((tsCompletion) => {
const entryDetail = entryDetailsMap[tsCompletion.name] ?? {};
const isMethod =
entryDetail.kind === CompletionItemKind.Method ||
entryDetail.displayParts?.includes('() => '); // If variable has function type, treat as method
/** Default value is just the method name */
let insertMethodTextWithArguments = tsCompletion.name;
if (isMethod === true) {
if (customizeEnhanceDocumentation?.omitMethodNameAndBrackets === true) {
insertMethodTextWithArguments = createArgCompletion(entryDetail);
} else {
insertMethodTextWithArguments = `${
tsCompletion.name
}(${createArgCompletion(entryDetail)})`;
}
}
let insertText: string;
if (isMethod !== undefined) {
insertText = insertMethodTextWithArguments;
} else {
insertText = tsCompletion.name.replace(/^\$/g, '\\$');
}
const completionItem: AureliaCompletionItem = {
documentation: {
kind: MarkupKind.Markdown,
value: entryDetail.documentation ?? '',
},
detail: entryDetail.displayParts ?? '',
insertText: insertText,
insertTextFormat: InsertTextFormat.Snippet,
kind: entryDetail.kind,
label: tsCompletion.name,
data: AureliaLSP.AureliaCompletionItemDataType,
};
/**
documentation: {
kind: MarkupKind.Markdown,
value: documentation,
},
detail: `${elementName}`,
insertText: `${elementName}$2>$1</${elementName}>$0`,
insertTextFormat: InsertTextFormat.Snippet,
kind: CompletionItemKind.Class,
label: `${elementName} (Au Class Declaration)`,
*/
return completionItem;
});
return result;
}
function enhanceMethodArguments(methodArguments: string[]): string {
return methodArguments
.map((argName, index) => {
return `\${${index + 1}:${argName}}`;
})
.join(', ');
}
function checkAlreadyHasThis(
virtualContent: string | undefined,
accessScopeName: string
) {
if (virtualContent == null) return false;
const checkHasThisRegex = new RegExp(`\\b(this.${accessScopeName})\\b`);
const has = checkHasThisRegex.exec(virtualContent ?? '');
return Boolean(has);
}
function getIsEmptyInterpolation(virtualContent: string) {
const withoutSpace = virtualContent.replace(/\s/g, '');
const isSimplestInterpolation = withoutSpace === '${}';
return isSimplestInterpolation;
}
function getShouldReturnOnSpace(
completionParams: CompletionParams | undefined,
triggerCharacter: string | undefined
) {
const isSpace = triggerCharacter === TemplateAttributeTriggers.SPACE;
const shouldReturn =
isSpace &&
completionParams?.context?.triggerKind !== CompletionTriggerKind.Invoked;
return shouldReturn;
} | the_stack |
import { BSONDecoder, BSONSerializer, getBSONDecoder, getBSONSerializer } from '@deepkit/bson';
import { arrayRemoveItem, asyncOperation, ClassType } from '@deepkit/core';
import { AsyncSubscription } from '@deepkit/core-rxjs';
import { createRpcMessage, RpcBaseClient, RpcDirectClientAdapter, RpcMessage, RpcMessageRouteType } from '@deepkit/rpc';
import { ClassSchema, FieldDecoratorResult, getClassSchema, isFieldDecorator, PropertySchema, t } from '@deepkit/type';
import { BrokerKernel } from './kernel';
import { brokerDelete, brokerEntityFields, brokerGet, brokerIncrement, brokerLock, brokerLockId, brokerPublish, brokerResponseIncrement, brokerResponseIsLock, brokerResponseSubscribeMessage, brokerSet, brokerSubscribe, BrokerType } from './model';
export class BrokerChannel<T> {
protected listener: number = 0;
protected callbacks: ((next: Uint8Array) => void)[] = [];
protected wrapped: boolean = false;
protected schema: ClassSchema;
protected decoder: (bson: Uint8Array) => any;
constructor(
public channel: string,
protected decoratorOrSchema: FieldDecoratorResult<T> | ClassSchema<T> | ClassType<T>,
protected client: BrokerClient,
) {
const extracted = this.getPubSubMessageSchema(decoratorOrSchema);
this.wrapped = extracted.wrapped;
this.schema = extracted.schema;
this.decoder = getBSONDecoder(this.schema);
}
protected getPubSubMessageSchema<T>(decoratorOrSchema: FieldDecoratorResult<T> | ClassSchema<T> | ClassType<T>): { schema: ClassSchema, wrapped: boolean } {
if (isFieldDecorator(decoratorOrSchema)) {
const propertySchema: PropertySchema = (decoratorOrSchema as any)._lastPropertySchema ||= decoratorOrSchema.buildPropertySchema('v');
const schema = propertySchema.type === 'class' ? propertySchema.getResolvedClassSchema() : t.schema({ v: decoratorOrSchema });
const wrapped = propertySchema.type !== 'class';
return { schema, wrapped }
}
return { schema: getClassSchema(decoratorOrSchema), wrapped: false };
}
public async publish(data: T) {
const serializer = getBSONSerializer(this.schema);
const v = this.wrapped ? serializer({ v: data }) : serializer(data);
await this.client.sendMessage(BrokerType.Publish, brokerPublish, { c: this.channel, v: v })
.ackThenClose();
return undefined;
}
next(data: Uint8Array) {
for (const callback of this.callbacks) {
callback(data);
}
}
async subscribe(callback: (next: T) => void): Promise<AsyncSubscription> {
const parsedCallback = (next: Uint8Array) => {
const parsed = this.decoder(next);
callback(this.wrapped ? parsed.v : parsed);
};
this.listener++;
this.callbacks.push(parsedCallback);
if (this.listener === 1) {
await this.client.sendMessage(BrokerType.Subscribe, brokerSubscribe, { c: this.channel })
.ackThenClose();
}
return new AsyncSubscription(async () => {
this.listener--;
arrayRemoveItem(this.callbacks, parsedCallback);
if (this.listener === 0) {
await this.client.sendMessage(BrokerType.Unsubscribe, brokerSubscribe, { c: this.channel })
.ackThenClose();
}
});
}
}
export class BrokerKeyValue<T> {
protected serializer: BSONSerializer;
protected decoder: BSONDecoder<T>;
constructor(
protected key: string,
protected schema: ClassSchema<T>,
protected client: BrokerClient,
) {
this.serializer = getBSONSerializer(schema);
this.decoder = getBSONDecoder(schema);
}
public async set(data: T): Promise<undefined> {
await this.client.sendMessage(BrokerType.Set, brokerSet, { n: this.key, v: this.serializer(data) }).ackThenClose();
return undefined;
}
public async get(): Promise<T> {
const v = await this.getOrUndefined();
if (v !== undefined) return v;
throw new Error(`No value for key ${this.key} found`);
}
public async delete(): Promise<boolean> {
await this.client.sendMessage(BrokerType.Delete, brokerGet, { n: this.key }).ackThenClose();
return true;
}
public async getOrUndefined(): Promise<T | undefined> {
const first: RpcMessage = await this.client.sendMessage(BrokerType.Get, brokerGet, { n: this.key }).firstThenClose(BrokerType.ResponseGet);
if (first.buffer && first.buffer.byteLength > first.bodyOffset) {
return this.decoder(first.buffer, first.bodyOffset);
}
return undefined;
}
}
export class BrokerClient extends RpcBaseClient {
protected activeChannels = new Map<string, BrokerChannel<any>>();
protected knownEntityFields = new Map<string, string[]>();
protected publishedEntityFields = new Map<string, Map<string, number>>();
/**
* On first getEntityFields() call we check if entityFieldsReceived is true. If not
* we connect and load all available entity-fields from the server and start
* streaming all changes to the entity-fields directly to our entityFields map.
*/
protected entityFieldsReceived = false;
protected entityFieldsPromise?: Promise<void>;
public async getEntityFields(classSchema: ClassSchema | string): Promise<string[]> {
const entityName = 'string' === typeof classSchema ? classSchema : classSchema.getName();
if (!this.entityFieldsReceived) {
this.entityFieldsReceived = true;
this.entityFieldsPromise = asyncOperation(async (resolve) => {
const subject = this.sendMessage(BrokerType.AllEntityFields)
const answer = await subject.waitNextMessage();
subject.release();
if (answer.type === BrokerType.AllEntityFields) {
for (const body of answer.getBodies()) {
const fields = body.parseBody(brokerEntityFields);
this.knownEntityFields.set(fields.name, fields.fields);
}
}
this.entityFieldsPromise = undefined;
resolve();
});
}
if (this.entityFieldsPromise) {
await this.entityFieldsPromise;
}
return this.knownEntityFields.get(entityName) || [];
}
protected onMessage(message: RpcMessage) {
if (message.routeType === RpcMessageRouteType.server) {
if (message.type === BrokerType.EntityFields) {
const fields = message.parseBody(brokerEntityFields);
this.knownEntityFields.set(fields.name, fields.fields);
this.transporter.send(createRpcMessage(message.id, BrokerType.Ack, undefined, undefined, RpcMessageRouteType.server));
} else if (message.type === BrokerType.ResponseSubscribeMessage) {
const body = message.parseBody(brokerResponseSubscribeMessage);
const channel = this.activeChannels.get(body.c);
if (!channel) return;
channel.next(body.v);
}
} else {
super.onMessage(message);
}
}
public async publishEntityFields<T>(classSchema: ClassSchema | string, fields: string[]): Promise<AsyncSubscription> {
const entityName = 'string' === typeof classSchema ? classSchema : classSchema.getName();
let store = this.publishedEntityFields.get(entityName);
if (!store) {
store = new Map;
this.publishedEntityFields.set(entityName, store);
}
let changed = false;
const newFields: string[] = [];
for (const field of fields) {
const v = store.get(field);
if (v === undefined) {
changed = true;
newFields.push(field);
};
store.set(field, v === undefined ? 1 : v + 1);
}
if (changed) {
const response = await this.sendMessage(
BrokerType.PublishEntityFields, brokerEntityFields,
{ name: entityName, fields: newFields }
).firstThenClose(BrokerType.EntityFields, brokerEntityFields);
this.knownEntityFields.set(response.name, response.fields);
}
return new AsyncSubscription(async () => {
if (!store) return;
const unsubscribed: string[] = [];
for (const field of fields) {
let v = store.get(field);
if (v === undefined) throw new Error(`Someone deleted our field ${field}`);
v--;
if (v === 0) {
store.delete(field);
unsubscribed.push(field);
//we can't remove it from knownEntityFields, because we don't know whether another
//its still used by another client.
} else {
store.set(field, v);
}
}
if (unsubscribed.length) {
const response = await this.sendMessage(
BrokerType.UnsubscribeEntityFields, brokerEntityFields,
{ name: entityName, fields: unsubscribed }
).firstThenClose(BrokerType.EntityFields, brokerEntityFields);
this.knownEntityFields.set(response.name, response.fields);
}
});
}
/**
* Tries to lock an id on the broker. If the id is already locked, it returns immediately undefined without locking anything
*
* ttl (time to life) defines how long the given lock is allowed to stay active. Per default each lock is automatically unlocked
* after 30 seconds. If you haven't released the lock until then, another lock acquisition is allowed to receive it anyways.
* ttl of 0 disables ttl and keeps the lock alive until you manually unlock it (or the process dies).
*/
public async tryLock(id: string, ttl: number = 30): Promise<AsyncSubscription | undefined> {
const subject = this.sendMessage(BrokerType.TryLock, brokerLock, { id, ttl });
const message = await subject.waitNextMessage();
if (message.type === BrokerType.ResponseLockFailed) {
subject.release();
return undefined;
}
if (message.type === BrokerType.ResponseLock) {
return new AsyncSubscription(async () => {
await subject.send(BrokerType.Unlock).ackThenClose();
});
}
throw new Error(`Invalid message returned. Expected Lock, but got ${message.type}`);
}
/**
* Locks an id on the broker. If the id is already locked, it waits until it is released. If timeout is specified,
* the lock acquisition should take maximum `timeout` seconds. 0 means it waits without limit.
*
* ttl (time to life) defines how long the given lock is allowed to stay active. Per default each lock is automatically unlocked
* after 30 seconds. If you haven't released the lock until then, another lock acquisition is allowed to receive it anyways.
* ttl of 0 disables ttl and keeps the lock alive until you manually unlock it (or the process dies).
*/
public async lock(id: string, ttl: number = 30, timeout: number = 0): Promise<AsyncSubscription> {
const subject = this.sendMessage(BrokerType.Lock, brokerLock, { id, ttl, timeout });
await subject.waitNext(BrokerType.ResponseLock); //or throw error
return new AsyncSubscription(async () => {
await subject.send(BrokerType.Unlock).ackThenClose();
subject.release();
});
}
public async isLocked(id: string): Promise<boolean> {
const subject = this.sendMessage(BrokerType.IsLocked, brokerLockId, { id });
const lock = await subject.firstThenClose(BrokerType.ResponseIsLock, brokerResponseIsLock);
return lock.v;
}
public channel<T>(channel: string, decoratorOrSchema: FieldDecoratorResult<T> | ClassSchema<T> | ClassType<T>): BrokerChannel<T> {
let brokerChannel = this.activeChannels.get(channel);
if (!brokerChannel) {
brokerChannel = new BrokerChannel(channel, decoratorOrSchema, this);
this.activeChannels.set(channel, brokerChannel);
}
return brokerChannel;
}
public async getRawOrUndefined<T>(id: string): Promise<Uint8Array | undefined> {
const first: RpcMessage = await this.sendMessage(BrokerType.Get, brokerGet, { n: id }).firstThenClose(BrokerType.ResponseGet);
if (first.buffer && first.buffer.byteLength > first.bodyOffset) {
return first.buffer.slice(first.bodyOffset);
}
return undefined;
}
public async getRaw<T>(id: string): Promise<Uint8Array> {
const v = await this.getRawOrUndefined(id);
if (v === undefined) throw new Error(`Key ${id} is undefined`);
return v;
}
public async setRaw<T>(id: string, data: Uint8Array): Promise<undefined> {
await this.sendMessage(BrokerType.Set, brokerSet, { n: id, v: data })
.ackThenClose();
return undefined;
}
public key<T>(key: string, schema: ClassSchema<T> | ClassType<T>) {
return new BrokerKeyValue(key, getClassSchema(schema), this);
}
public async getIncrement<T>(id: string): Promise<number> {
const v = await this.getRaw(id);
const view = new DataView(v.buffer, v.byteOffset, v.byteLength);
return view.getFloat64(0, true);
}
public async increment<T>(id: string, value?: number): Promise<number> {
const response = await this.sendMessage(BrokerType.Increment, brokerIncrement, { n: id, v: value })
.waitNext(BrokerType.ResponseIncrement, brokerResponseIncrement);
return response.v;
}
public async delete<T>(id: string): Promise<undefined> {
await this.sendMessage(BrokerType.Delete, brokerDelete, { n: id })
.ackThenClose();
return undefined;
}
}
export class BrokerDirectClient extends BrokerClient {
constructor(rpcKernel: BrokerKernel) {
super(new RpcDirectClientAdapter(rpcKernel));
}
} | the_stack |
import type {ComponentType, ReactNode} from 'react'
import type {
ToolkitListNestMode,
ToolkitPortableTextList,
ToolkitPortableTextListItem,
} from '@portabletext/toolkit'
import type {
ArbitraryTypedObject,
PortableTextBlock,
PortableTextBlockStyle,
PortableTextListItemBlock,
PortableTextListItemType,
TypedObject,
} from '@portabletext/types'
/**
* Properties for the Portable Text react component
*
* @template B Types that can appear in the array of blocks
*/
export interface PortableTextProps<
B extends TypedObject = PortableTextBlock | ArbitraryTypedObject
> {
/**
* One or more blocks to render
*/
value: B | B[]
/**
* React components to use for rendering
*/
components?: Partial<PortableTextReactComponents>
/**
* Function to call when encountering unknown unknown types, eg blocks, marks,
* block style, list styles without an associated React component.
*
* Will print a warning message to the console by default.
* Pass `false` to disable.
*/
onMissingComponent?: MissingComponentHandler | false
/**
* Determines whether or not lists are nested inside of list items (`html`)
* or as a direct child of another list (`direct` - for React Native)
*
* You rarely (if ever) need/want to customize this
*/
listNestingMode?: ToolkitListNestMode
}
/**
* Generic type for portable text rendering components that takes blocks/inline blocks
*
* @template N Node types we expect to be rendering (`PortableTextBlock` should usually be part of this)
*/
export type PortableTextComponent<N> = ComponentType<PortableTextComponentProps<N>>
/**
* React component type for rendering portable text blocks (paragraphs, headings, blockquotes etc)
*/
export type PortableTextBlockComponent = PortableTextComponent<PortableTextBlock>
/**
* React component type for rendering (virtual, not part of the spec) portable text lists
*/
export type PortableTextListComponent = PortableTextComponent<ReactPortableTextList>
/**
* React component type for rendering portable text list items
*/
export type PortableTextListItemComponent = PortableTextComponent<PortableTextListItemBlock>
/**
* React component type for rendering portable text marks and/or decorators
*
* @template M The mark type we expect
*/
export type PortableTextMarkComponent<M extends TypedObject = any> = ComponentType<
PortableTextMarkComponentProps<M>
>
export type PortableTextTypeComponent<V extends TypedObject = any> = ComponentType<
PortableTextTypeComponentProps<V>
>
/**
* Object defining the different React components to use for rendering various aspects
* of Portable Text and user-provided types, where only the overrides needs to be provided.
*/
export type PortableTextComponents = Partial<PortableTextReactComponents>
/**
* Object definining the different React components to use for rendering various aspects
* of Portable Text and user-provided types.
*/
export interface PortableTextReactComponents {
/**
* Object of React components that renders different types of objects that might appear
* both as part of the blocks array, or as inline objects _inside_ of a block,
* alongside text spans.
*
* Use the `isInline` property to check whether or not this is an inline object or a block
*
* The object has the shape `{typeName: ReactComponent}`, where `typeName` is the value set
* in individual `_type` attributes.
*/
types: Record<string, PortableTextTypeComponent | undefined>
/**
* Object of React components that renders different types of marks that might appear in spans.
*
* The object has the shape `{markName: ReactComponent}`, where `markName` is the value set
* in individual `_type` attributes, values being stored in the parent blocks `markDefs`.
*/
marks: Record<string, PortableTextMarkComponent | undefined>
/**
* Object of React components that renders blocks with different `style` properties.
*
* The object has the shape `{styleName: ReactComponent}`, where `styleName` is the value set
* in individual `style` attributes on blocks.
*
* Can also be set to a single React component, which would handle block styles of _any_ type.
*/
block:
| Record<PortableTextBlockStyle, PortableTextBlockComponent | undefined>
| PortableTextBlockComponent
/**
* Object of React components used to render lists of different types (bulleted vs numbered,
* for instance, which by default is `<ul>` and `<ol>`, respectively)
*
* There is no actual "list" node type in the Portable Text specification, but a series of
* list item blocks with the same `level` and `listItem` properties will be grouped into a
* virtual one inside of this library.
*
* Can also be set to a single React component, which would handle lists of _any_ type.
*/
list:
| Record<PortableTextListItemType, PortableTextListComponent | undefined>
| PortableTextListComponent
/**
* Object of React components used to render different list item styles.
*
* The object has the shape `{listItemType: ReactComponent}`, where `listItemType` is the value
* set in individual `listItem` attributes on blocks.
*
* Can also be set to a single React component, which would handle list items of _any_ type.
*/
listItem:
| Record<PortableTextListItemType, PortableTextListItemComponent | undefined>
| PortableTextListItemComponent
/**
* Component to use for rendering "hard breaks", eg `\n` inside of text spans
* Will by default render a `<br />`. Pass `false` to render as-is (`\n`)
*/
hardBreak: ComponentType<{}> | false
/**
* React component used when encountering a mark type there is no registered component for
* in the `components.marks` prop.
*/
unknownMark: PortableTextMarkComponent
/**
* React component used when encountering an object type there is no registered component for
* in the `components.types` prop.
*/
unknownType: PortableTextComponent<UnknownNodeType>
/**
* React component used when encountering a block style there is no registered component for
* in the `components.block` prop. Only used if `components.block` is an object.
*/
unknownBlockStyle: PortableTextComponent<PortableTextBlock>
/**
* React component used when encountering a list style there is no registered component for
* in the `components.list` prop. Only used if `components.list` is an object.
*/
unknownList: PortableTextComponent<ReactPortableTextList>
/**
* React component used when encountering a list item style there is no registered component for
* in the `components.listItem` prop. Only used if `components.listItem` is an object.
*/
unknownListItem: PortableTextComponent<PortableTextListItemBlock>
}
/**
* Props received by most Portable Text components
*
* @template T Type of data this component will receive in its `value` property
*/
export interface PortableTextComponentProps<T> {
/**
* Data associated with this portable text node, eg the raw JSON value of a block/type
*/
value: T
/**
* Index within its parent
*/
index: number
/**
* Whether or not this node is "inline" - ie as a child of a text block,
* alongside text spans, or a block in and of itself.
*/
isInline: boolean
/**
* React child nodes of this block/component
*/
children?: ReactNode
/**
* Function used to render any node that might appear in a portable text array or block,
* including virtual "toolkit"-nodes like lists and nested spans. You will rarely need
* to use this.
*/
renderNode: NodeRenderer
}
/**
* Props received by any user-defined type in the input array that is not a text block
*
* @template T Type of data this component will receive in its `value` property
*/
export type PortableTextTypeComponentProps<T> = Omit<PortableTextComponentProps<T>, 'children'>
/**
* Props received by Portable Text mark rendering components
*
* @template M Shape describing the data associated with this mark, if it is an annotation
*/
export interface PortableTextMarkComponentProps<M extends TypedObject = ArbitraryTypedObject> {
/**
* Mark definition, eg the actual data of the annotation. If the mark is a simple decorator, this will be `undefined`
*/
value?: M
/**
* Text content of this mark
*/
text: string
/**
* Key for this mark. The same key can be used amongst multiple text spans within the same block, so don't rely on this for React keys.
*/
markKey?: string
/**
* Type of mark - ie value of `_type` in the case of annotations, or the name of the decorator otherwise - eg `em`, `italic`.
*/
markType: string
/**
* React child nodes of this mark
*/
children: ReactNode
/**
* Function used to render any node that might appear in a portable text array or block,
* including virtual "toolkit"-nodes like lists and nested spans. You will rarely need
* to use this.
*/
renderNode: NodeRenderer
}
/**
* Any node type that we can't identify - eg it has an `_type`,
* but we don't know anything about its other properties
*/
export type UnknownNodeType = {[key: string]: unknown; _type: string} | TypedObject
/**
* Function that renders any node that might appear in a portable text array or block,
* including virtual "toolkit"-nodes like lists and nested spans
*/
export type NodeRenderer = <T extends TypedObject>(options: Serializable<T>) => ReactNode
export type NodeType = 'block' | 'mark' | 'blockStyle' | 'listStyle' | 'listItemStyle'
export type MissingComponentHandler = (
message: string,
options: {type: string; nodeType: NodeType}
) => void
export interface Serializable<T> {
node: T
index: number
isInline: boolean
renderNode: NodeRenderer
}
export interface SerializedBlock {
_key: string
children: ReactNode
index: number
isInline: boolean
node: PortableTextBlock | PortableTextListItemBlock
}
// Re-exporting these as we don't want to refer to "toolkit" outside of this module
/**
* A virtual "list" node for Portable Text - not strictly part of Portable Text,
* but generated by this library to ease the rendering of lists in HTML etc
*/
export type ReactPortableTextList = ToolkitPortableTextList
/**
* A virtual "list item" node for Portable Text - not strictly any different from a
* regular Portable Text Block, but we can guarantee that it has a `listItem` property.
*/
export type ReactPortableTextListItem = ToolkitPortableTextListItem | the_stack |
import * as React from 'react'
import { css } from 'styled-components'
import {
Box,
PrimaryBtn,
Btn,
Flex,
Text,
FONT_SIZE_BODY_2,
DIRECTION_ROW,
SPACING_3,
SPACING_5,
BORDER_SOLID_LIGHT,
FONT_SIZE_HEADER,
FONT_WEIGHT_SEMIBOLD,
JUSTIFY_CENTER,
JUSTIFY_SPACE_BETWEEN,
ALIGN_STRETCH,
FONT_BODY_2_DARK,
ALIGN_CENTER,
TEXT_DECORATION_UNDERLINE,
TEXT_ALIGN_CENTER,
TEXT_TRANSFORM_UPPERCASE,
} from '@opentrons/components'
import * as Sessions from '../../redux/sessions'
import {
JogControls,
HORIZONTAL_PLANE,
VERTICAL_PLANE,
} from '../../molecules/JogControls'
import { formatJogVector } from './utils'
import { useConfirmCrashRecovery } from './useConfirmCrashRecovery'
import { NeedHelpLink } from './NeedHelpLink'
import slot1LeftMultiDemoAsset from '../../assets/videos/cal-movement/SLOT_1_LEFT_MULTI_X-Y.webm'
import slot1LeftSingleDemoAsset from '../../assets/videos/cal-movement/SLOT_1_LEFT_SINGLE_X-Y.webm'
import slot1RightMultiDemoAsset from '../../assets/videos/cal-movement/SLOT_1_RIGHT_MULTI_X-Y.webm'
import slot1RightSingleDemoAsset from '../../assets/videos/cal-movement/SLOT_1_RIGHT_SINGLE_X-Y.webm'
import slot3LeftMultiDemoAsset from '../../assets/videos/cal-movement/SLOT_3_LEFT_MULTI_X-Y.webm'
import slot3LeftSingleDemoAsset from '../../assets/videos/cal-movement/SLOT_3_LEFT_SINGLE_X-Y.webm'
import slot3RightMultiDemoAsset from '../../assets/videos/cal-movement/SLOT_3_RIGHT_MULTI_X-Y.webm'
import slot3RightSingleDemoAsset from '../../assets/videos/cal-movement/SLOT_3_RIGHT_SINGLE_X-Y.webm'
import slot7LeftMultiDemoAsset from '../../assets/videos/cal-movement/SLOT_7_LEFT_MULTI_X-Y.webm'
import slot7LeftSingleDemoAsset from '../../assets/videos/cal-movement/SLOT_7_LEFT_SINGLE_X-Y.webm'
import slot7RightMultiDemoAsset from '../../assets/videos/cal-movement/SLOT_7_RIGHT_MULTI_X-Y.webm'
import slot7RightSingleDemoAsset from '../../assets/videos/cal-movement/SLOT_7_RIGHT_SINGLE_X-Y.webm'
import type { Axis, Sign, StepSize } from '../../molecules/JogControls/types'
import type { CalibrationPanelProps } from './types'
import type {
SessionType,
CalibrationSessionStep,
SessionCommandString,
CalibrationLabware,
} from '../../redux/sessions/types'
import type { Mount } from '@opentrons/components'
const assetMap: Record<
CalibrationLabware['slot'],
Record<Mount, Record<'multi' | 'single', string>>
> = {
'1': {
left: {
multi: slot1LeftMultiDemoAsset,
single: slot1LeftSingleDemoAsset,
},
right: {
multi: slot1RightMultiDemoAsset,
single: slot1RightSingleDemoAsset,
},
},
'3': {
left: {
multi: slot3LeftMultiDemoAsset,
single: slot3LeftSingleDemoAsset,
},
right: {
multi: slot3RightMultiDemoAsset,
single: slot3RightSingleDemoAsset,
},
},
'7': {
left: {
multi: slot7LeftMultiDemoAsset,
single: slot7LeftSingleDemoAsset,
},
right: {
multi: slot7RightMultiDemoAsset,
single: slot7RightSingleDemoAsset,
},
},
}
const SAVE_XY_POINT_HEADER = 'Calibrate the X and Y-axis in'
const CHECK_POINT_XY_HEADER = 'Check the X and Y-axis in'
const SLOT = 'slot'
const JOG_UNTIL = 'Jog the robot until the tip is'
const PRECISELY_CENTERED = 'precisely centered'
const ABOVE_THE_CROSS = 'above the cross in'
const THEN = 'Then press the'
const TO_SAVE = 'button to calibrate the x and y-axis in'
const TO_CHECK =
'button to determine how this position compares to the previously-saved x and y-axis calibration coordinates'
const BASE_BUTTON_TEXT = 'save calibration'
const HEALTH_BUTTON_TEXT = 'check x and y-axis'
const MOVE_TO_POINT_TWO_BUTTON_TEXT = `${BASE_BUTTON_TEXT} and move to slot 3`
const MOVE_TO_POINT_THREE_BUTTON_TEXT = `${BASE_BUTTON_TEXT} and move to slot 7`
const HEALTH_POINT_TWO_BUTTON_TEXT = `${HEALTH_BUTTON_TEXT} and move to slot 3`
const HEALTH_POINT_THREE_BUTTON_TEXT = `${HEALTH_BUTTON_TEXT} and move to slot 7`
const ALLOW_VERTICAL_TEXT = 'Reveal Z jog controls to move up and down'
const contentsBySessionTypeByCurrentStep: {
[sessionType in SessionType]?: {
[step in CalibrationSessionStep]?: {
slotNumber: string
buttonText: string
moveCommand: SessionCommandString | null
finalCommand?: SessionCommandString | null
}
}
} = {
[Sessions.SESSION_TYPE_DECK_CALIBRATION]: {
[Sessions.DECK_STEP_SAVING_POINT_ONE]: {
slotNumber: '1',
buttonText: MOVE_TO_POINT_TWO_BUTTON_TEXT,
moveCommand: Sessions.deckCalCommands.MOVE_TO_POINT_TWO,
},
[Sessions.DECK_STEP_SAVING_POINT_TWO]: {
slotNumber: '3',
buttonText: MOVE_TO_POINT_THREE_BUTTON_TEXT,
moveCommand: Sessions.deckCalCommands.MOVE_TO_POINT_THREE,
},
[Sessions.DECK_STEP_SAVING_POINT_THREE]: {
slotNumber: '7',
buttonText: BASE_BUTTON_TEXT,
moveCommand: Sessions.sharedCalCommands.MOVE_TO_TIP_RACK,
},
},
[Sessions.SESSION_TYPE_PIPETTE_OFFSET_CALIBRATION]: {
[Sessions.PIP_OFFSET_STEP_SAVING_POINT_ONE]: {
slotNumber: '1',
buttonText: BASE_BUTTON_TEXT,
moveCommand: null,
},
},
[Sessions.SESSION_TYPE_CALIBRATION_HEALTH_CHECK]: {
[Sessions.CHECK_STEP_COMPARING_POINT_ONE]: {
slotNumber: '1',
buttonText: HEALTH_POINT_TWO_BUTTON_TEXT,
moveCommand: Sessions.deckCalCommands.MOVE_TO_POINT_TWO,
finalCommand: Sessions.sharedCalCommands.MOVE_TO_TIP_RACK,
},
[Sessions.CHECK_STEP_COMPARING_POINT_TWO]: {
slotNumber: '3',
buttonText: HEALTH_POINT_THREE_BUTTON_TEXT,
moveCommand: Sessions.deckCalCommands.MOVE_TO_POINT_THREE,
},
[Sessions.CHECK_STEP_COMPARING_POINT_THREE]: {
slotNumber: '7',
buttonText: HEALTH_BUTTON_TEXT,
moveCommand: Sessions.sharedCalCommands.MOVE_TO_TIP_RACK,
},
},
}
export function SaveXYPoint(props: CalibrationPanelProps): JSX.Element {
const {
isMulti,
mount,
sendCommands,
currentStep,
sessionType,
activePipette,
instruments,
checkBothPipettes,
} = props
const {
// @ts-expect-error(sa, 2021-05-27): avoiding src code change, need to type narrow
slotNumber,
// @ts-expect-error(sa, 2021-05-27): avoiding src code change, need to type narrow
buttonText,
// @ts-expect-error(sa, 2021-05-27): avoiding src code change, need to type narrow
moveCommand,
// @ts-expect-error(sa, 2021-05-27): avoiding src code change, need to type narrow
finalCommand,
// @ts-expect-error(sa, 2021-05-27): avoiding src code change, need to type narrow
} = contentsBySessionTypeByCurrentStep[sessionType][currentStep]
const demoAsset = React.useMemo(
() =>
slotNumber && assetMap[slotNumber][mount][isMulti ? 'multi' : 'single'],
[slotNumber, mount, isMulti]
)
const isHealthCheck =
sessionType === Sessions.SESSION_TYPE_CALIBRATION_HEALTH_CHECK
const jog = (axis: Axis, dir: Sign, step: StepSize): void => {
sendCommands({
command: Sessions.sharedCalCommands.JOG,
data: {
vector: formatJogVector(axis, dir, step),
},
})
}
const savePoint = (): void => {
let commands = null
if (isHealthCheck) {
commands = [{ command: Sessions.checkCommands.COMPARE_POINT }]
} else {
commands = [{ command: Sessions.sharedCalCommands.SAVE_OFFSET }]
}
if (
finalCommand &&
checkBothPipettes &&
activePipette?.rank === Sessions.CHECK_PIPETTE_RANK_FIRST
) {
commands = [...commands, { command: finalCommand }]
} else if (moveCommand) {
commands = [...commands, { command: moveCommand }]
}
sendCommands(...commands)
}
const [confirmLink, confirmModal] = useConfirmCrashRecovery({
requiresNewTip: true,
...props,
})
const continueButtonText =
isHealthCheck &&
instruments?.length &&
activePipette?.rank === Sessions.CHECK_PIPETTE_RANK_FIRST
? HEALTH_BUTTON_TEXT
: buttonText
const [allowVertical, setAllowVertical] = React.useState(false)
const AllowVerticalPrompt = (): JSX.Element => (
<Flex
justifyContent={JUSTIFY_CENTER}
alignItems={ALIGN_CENTER}
flex={1}
alignSelf={ALIGN_STRETCH}
>
<Btn
onClick={() => setAllowVertical(true)}
css={FONT_BODY_2_DARK}
textDecoration={TEXT_DECORATION_UNDERLINE}
textAlign={TEXT_ALIGN_CENTER}
>
{ALLOW_VERTICAL_TEXT}
</Btn>
</Flex>
)
return (
<>
<Flex width="100%" justifyContent={JUSTIFY_SPACE_BETWEEN}>
<Text
fontSize={FONT_SIZE_HEADER}
fontWeight={FONT_WEIGHT_SEMIBOLD}
textTransform={TEXT_TRANSFORM_UPPERCASE}
>
{isHealthCheck ? CHECK_POINT_XY_HEADER : SAVE_XY_POINT_HEADER}
{` ${SLOT} ${slotNumber || ''}`}
</Text>
<NeedHelpLink />
</Flex>
<Flex
flexDirection={DIRECTION_ROW}
padding={SPACING_3}
border={BORDER_SOLID_LIGHT}
marginTop={SPACING_3}
>
<Text fontSize={FONT_SIZE_BODY_2} alignSelf={JUSTIFY_CENTER}>
{JOG_UNTIL}
<b>{` ${PRECISELY_CENTERED} `}</b>
{ABOVE_THE_CROSS}
<b>{` ${SLOT} ${slotNumber || ''}`}.</b>
<br />
<br />
{THEN}
<b>{` '${continueButtonText}' `}</b>
{isHealthCheck ? TO_CHECK : `${TO_SAVE} ${SLOT} ${slotNumber}`}.
</Text>
<video
key={String(demoAsset)}
css={css`
max-width: 100%;
max-height: 15rem;
`}
autoPlay={true}
loop={true}
controls={false}
>
<source src={demoAsset} />
</video>
</Flex>
<JogControls
jog={jog}
stepSizes={[0.1, 1]}
planes={
allowVertical
? [HORIZONTAL_PLANE, VERTICAL_PLANE]
: [HORIZONTAL_PLANE]
}
auxiliaryControl={allowVertical ? null : <AllowVerticalPrompt />}
/>
<Flex
width="100%"
justifyContent={JUSTIFY_CENTER}
marginBottom={SPACING_3}
>
<PrimaryBtn
title="save"
onClick={savePoint}
flex="1"
marginX={SPACING_5}
>
{continueButtonText}
</PrimaryBtn>
</Flex>
<Box width="100%">{confirmLink}</Box>
{confirmModal}
</>
)
} | the_stack |
import BigNumber from 'bignumber.js';
import { DODOContext, getDODOContext } from '../utils-v1/ProxyContextV1';
import { decimalStr, MAX_UINT256, fromWei, mweiStr } from '../utils-v1/Converter';
import { logGas } from '../utils-v1/Log';
import * as contracts from '../utils-v1/Contracts';
import { assert } from 'chai';
let lp: string;
let trader: string;
async function initDODO_USDT(ctx: DODOContext): Promise<void> {
await ctx.setOraclePrice(ctx.DODO_USDT_ORACLE, mweiStr("0.1"));
lp = ctx.spareAccounts[0];
trader = ctx.spareAccounts[1];
let DODO = ctx.DODO;
let USDT = ctx.USDT;
let DODO_USDT = ctx.DODO_USDT;
await ctx.approvePair(DODO, USDT, DODO_USDT.options.address, lp);
await ctx.approvePair(DODO, USDT, DODO_USDT.options.address, trader);
await ctx.mintToken(DODO, USDT, lp, decimalStr("10000000"), mweiStr("1000000"));
await ctx.mintToken(DODO, USDT, trader, decimalStr("1000"), mweiStr("0"));
await DODO_USDT.methods
.depositBaseTo(lp, decimalStr("10000000"))
.send(ctx.sendParam(lp));
await DODO_USDT.methods
.depositQuoteTo(lp, mweiStr("1000000"))
.send(ctx.sendParam(lp));
}
async function initUSDT_USDC(ctx: DODOContext): Promise<void> {
await ctx.setOraclePrice(ctx.USDT_USDC_ORACLE, decimalStr("1"));
lp = ctx.spareAccounts[0];
trader = ctx.spareAccounts[1];
let USDT = ctx.USDT;
let USDC = ctx.USDC;
let USDT_USDC = ctx.USDT_USDC;
await ctx.approvePair(USDT, USDC, USDT_USDC.options.address, lp);
await ctx.mintToken(USDT, USDC, lp, mweiStr("1000000"), mweiStr("1000000"));
await USDT_USDC.methods
.depositBaseTo(lp, mweiStr("1000000"))
.send(ctx.sendParam(lp));
await USDT_USDC.methods
.depositQuoteTo(lp, mweiStr("1000000"))
.send(ctx.sendParam(lp));
}
async function initWETH_USDC(ctx: DODOContext): Promise<void> {
await ctx.setOraclePrice(ctx.WETH_USDC_ORACLE, mweiStr("450"));
lp = ctx.spareAccounts[0];
trader = ctx.spareAccounts[1];
let WETH = ctx.WETH;
let USDC = ctx.USDC;
let WETH_USDC = ctx.WETH_USDC;
await ctx.approvePair(WETH, USDC, WETH_USDC.options.address, lp);
await ctx.mintToken(null, USDC, lp, decimalStr("0"), mweiStr("3600"));
await WETH.methods.deposit().send(ctx.sendParam(lp, '8'));
await WETH_USDC.methods
.depositBaseTo(lp, decimalStr("8"))
.send(ctx.sendParam(lp));
await WETH_USDC.methods
.depositQuoteTo(lp, mweiStr("3600"))
.send(ctx.sendParam(lp));
}
//mock sdk logic
async function calcRoute(ctx: DODOContext, fromTokenAmount: string, slippage: number, routes: any[], pairs: any[]) {
let swapAmount = fromTokenAmount
let directions: number[] = []
let dodoPairs: string[] = []
for (let i = 0; i < pairs.length; i++) {
let curPair = pairs[i]
dodoPairs.push(curPair.pair)
let curContact = pairs[i].pairContract
if (routes[i].address == '0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE') {
directions[i] = 0;
swapAmount = await curContact.methods.querySellBaseToken(swapAmount).call();
// console.log(i + "-swapAmount:", swapAmount);
} else if (curPair.base === routes[i].address) {
directions[i] = 0;
swapAmount = await curContact.methods.querySellBaseToken(swapAmount).call();
// console.log(i + "-swapAmount:", swapAmount);
} else {
directions[i] = 1;
swapAmount = await ctx.DODOSellHelper.methods.querySellQuoteToken(curPair.pair, swapAmount).call();
// console.log(i + "-swapAmount:", swapAmount);
}
}
var [returmAmount, midPrices] = await ctx.DODOSwapCalcHelper.methods.calcReturnAmountV1(
fromTokenAmount,
dodoPairs,
directions,
).call();
console.log("returnAmount:", returmAmount)
console.log("localAmount:", swapAmount)
console.log("midPrices:", midPrices)
let toAmount = new BigNumber(swapAmount).multipliedBy(1 - slippage).toFixed(0, BigNumber.ROUND_DOWN)
// console.log("minAmount:",toAmount);
let deadline = Math.floor(new Date().getTime() / 1000 + 60 * 10);
return ctx.DODOV1Proxy01.methods.dodoSwapV1(
routes[0].address,
routes[routes.length - 1].address,
fromTokenAmount,
toAmount,
dodoPairs,
directions,
deadline
)
}
describe("Trader", () => {
let snapshotId: string;
let ctx: DODOContext;
before(async () => {
console.log("Confirm DODOApprove's current Proxy version!");
let ETH = await contracts.newContract(
contracts.WETH_CONTRACT_NAME
);
ctx = await getDODOContext(ETH.options.address);
await initDODO_USDT(ctx);
await initUSDT_USDC(ctx);
await initWETH_USDC(ctx);
});
beforeEach(async () => {
snapshotId = await ctx.EVM.snapshot();
});
afterEach(async () => {
// await ctx.EVM.reset(snapshotId);
});
describe("route calc test", () => {
it("DODO to USDT directly swap", async () => {
var b_DODO = await ctx.DODO.methods.balanceOf(trader).call()
var b_USDT = await ctx.USDT.methods.balanceOf(trader).call()
var c_b_CHI = await ctx.CHI.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
console.log("Before DODO:" + fromWei(b_DODO, 'ether') + "; USDT:" + fromWei(b_USDT, 'mwei'));
//approve DODO entry
await ctx.DODO.methods.approve(ctx.DODOApprove.options.address, MAX_UINT256).send(ctx.sendParam(trader))
//set route path
var routes = [{
address: ctx.DODO.options.address,
decimals: 18
},
{
address: ctx.USDT.options.address,
decimals: 6
}];
var pairs = [{
pair: ctx.DODO_USDT.options.address,
base: ctx.DODO.options.address,
pairContract: ctx.DODO_USDT
}];
await logGas(await calcRoute(ctx, decimalStr('10'), 0.1, routes, pairs), ctx.sendParam(trader), "directly swap")
var tx = await logGas(await calcRoute(ctx, decimalStr('10'), 0.1, routes, pairs), ctx.sendParam(trader), "directly swap")
console.log(tx.transactionHash);
var a_DODO = await ctx.DODO.methods.balanceOf(trader).call()
var a_USDT = await ctx.USDT.methods.balanceOf(trader).call()
console.log("After DODO:" + fromWei(a_DODO, 'ether') + "; USDT:" + fromWei(a_USDT, 'mwei'));
console.log("===============================================")
var c_DODO = await ctx.DODO.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDT = await ctx.USDT.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_a_CHI = await ctx.CHI.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
console.log("Contract DODO:" + fromWei(c_DODO, 'ether') + "; USDT:" + fromWei(c_USDT, 'mwei'));
console.log("Contract gas Token Before:" + c_b_CHI + " ;After:" + c_a_CHI);
// console.log("USDT:" + a_USDT);
assert(a_USDT, "1994000");
});
it("DODO to USDC two hops swap", async () => {
var b_DODO = await ctx.DODO.methods.balanceOf(trader).call()
var b_USDC = await ctx.USDC.methods.balanceOf(trader).call()
console.log("Before DODO:" + fromWei(b_DODO, 'ether') + "; USDC:" + fromWei(b_USDC, 'mwei'));
//approve DODO entry
await ctx.DODO.methods.approve(ctx.DODOApprove.options.address, MAX_UINT256).send(ctx.sendParam(trader))
//set route path
var routes = [{
address: ctx.DODO.options.address,
decimals: 18
}, {
address: ctx.USDT.options.address,
decimals: 6
}, {
address: ctx.USDC.options.address,
decimals: 6
}];
var pairs = [{
pair: ctx.DODO_USDT.options.address,
base: ctx.DODO.options.address,
pairContract: ctx.DODO_USDT
}, {
pair: ctx.USDT_USDC.options.address,
base: ctx.USDT.options.address,
pairContract: ctx.USDT_USDC
}];
var tx = await logGas(await calcRoute(ctx, decimalStr('10'), 0.1, routes, pairs), ctx.sendParam(trader), "two hops swap")
var tx = await logGas(await calcRoute(ctx, decimalStr('10'), 0.1, routes, pairs), ctx.sendParam(trader), "two hops swap")
// console.log(tx.events['Swapped']);
var a_DODO = await ctx.DODO.methods.balanceOf(trader).call()
var a_USDC = await ctx.USDC.methods.balanceOf(trader).call()
console.log("After DODO:" + fromWei(a_DODO, 'ether') + "; USDC:" + fromWei(a_USDC, 'mwei'));
console.log("===============================================")
var c_DODO = await ctx.DODO.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDT = await ctx.USDT.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDC = await ctx.USDC.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
console.log("Contract DODO:" + fromWei(c_DODO, 'ether') + "; USDT:" + fromWei(c_USDT, 'mwei') + "; USDC:" + fromWei(c_USDC, 'mwei'));
// console.log("USDC:" + a_USDC);
assert(a_USDC, "1988019");
});
it("DODO to WETH three hops swap", async () => {
var b_DODO = await ctx.DODO.methods.balanceOf(trader).call()
var b_WETH = await ctx.WETH.methods.balanceOf(trader).call()
console.log("Before DODO:" + fromWei(b_DODO, 'ether') + "; WETH:" + fromWei(b_WETH, 'ether'));
//approve DODO entry
await ctx.DODO.methods.approve(ctx.DODOApprove.options.address, MAX_UINT256).send(ctx.sendParam(trader))
//set route path
var routes = [{
address: ctx.DODO.options.address,
decimals: 18
}, {
address: ctx.USDT.options.address,
decimals: 6
}, {
address: ctx.USDC.options.address,
decimals: 6
}, {
address: ctx.WETH.options.address,
decimals: 18
}];
var pairs = [{
pair: ctx.DODO_USDT.options.address,
base: ctx.DODO.options.address,
pairContract: ctx.DODO_USDT
}, {
pair: ctx.USDT_USDC.options.address,
base: ctx.USDT.options.address,
pairContract: ctx.USDT_USDC
}, {
pair: ctx.WETH_USDC.options.address,
base: ctx.WETH.options.address,
pairContract: ctx.WETH_USDC
}];
var tx = await logGas(await calcRoute(ctx, decimalStr('10'), 0.1, routes, pairs), ctx.sendParam(trader), "three hops swap")
var tx = await logGas(await calcRoute(ctx, decimalStr('10'), 0.1, routes, pairs), ctx.sendParam(trader), "three hops swap")
console.log(tx.events['TestAmount']);
var a_DODO = await ctx.DODO.methods.balanceOf(trader).call()
var a_WETH = await ctx.WETH.methods.balanceOf(trader).call()
console.log("After DODO:" + fromWei(a_DODO, 'ether') + "; WETH:" + fromWei(a_WETH, 'ether'));
console.log("===============================================")
var c_DODO = await ctx.DODO.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDT = await ctx.USDT.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDC = await ctx.USDC.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_WETH = await ctx.WETH.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
console.log("Contract DODO:" + fromWei(c_DODO, 'ether') + "; USDT:" + fromWei(c_USDT, 'mwei') + "; USDC:" + fromWei(c_USDC, 'mwei') + "; WETH:" + fromWei(c_WETH, 'ether'));
// console.log("WETH:" + a_WETH);
assert(a_WETH, "4404365055045800");
});
it("ETH to USDC wrap eth and directly swap", async () => {
var b_ETH = await ctx.Web3.eth.getBalance(trader)
var b_WETH = await ctx.WETH.methods.balanceOf(trader).call()
var b_USDC = await ctx.USDC.methods.balanceOf(trader).call()
console.log("Before ETH:" + fromWei(b_ETH, 'ether') + "; WETH:" + fromWei(b_WETH, 'ether') + "; USDC:" + fromWei(b_USDC, 'mwei'));
var b_w_eth = await ctx.Web3.eth.getBalance(ctx.WETH.options.address)
console.log("weth contract Before:" + fromWei(b_w_eth, 'ether'))
//set route path
var routes = [{
address: "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE",
decimals: 18
}, {
address: ctx.USDC.options.address,
decimals: 6
}];
var pairs = [{
pair: ctx.WETH_USDC.options.address,
base: ctx.WETH.options.address,
pairContract: ctx.WETH_USDC
}];
var tx = await logGas(await calcRoute(ctx, decimalStr('1'), 0.1, routes, pairs), ctx.sendParam(trader, '1'), "wrap eth and directly swap")
var tx = await logGas(await calcRoute(ctx, decimalStr('1'), 0.1, routes, pairs), ctx.sendParam(trader, '1'), "wrap eth and directly swap")
var a_ETH = await ctx.Web3.eth.getBalance(trader)
var a_WETH = await ctx.WETH.methods.balanceOf(trader).call()
var a_USDC = await ctx.USDC.methods.balanceOf(trader).call()
console.log("After ETH:" + fromWei(a_ETH, 'ether') + "; WETH:" + fromWei(a_WETH, 'ether') + "; USDC:" + fromWei(a_USDC, 'mwei'));
console.log("===============================================")
var c_ETH = await ctx.Web3.eth.getBalance(ctx.DODOV1Proxy01.options.address)
var c_WETH = await ctx.WETH.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDT = await ctx.USDT.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDC = await ctx.USDC.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
console.log("Contract ETH:" + fromWei(c_ETH, 'ether') + "; WETH:" + fromWei(c_WETH, 'ether') + "; USDT:" + fromWei(c_USDT, 'mwei') + "; USDC:" + fromWei(c_USDC, 'mwei'));
var a_w_eth = await ctx.Web3.eth.getBalance(ctx.WETH.options.address)
console.log("weth contract After:" + fromWei(a_w_eth, 'ether'))
assert(a_USDC, "869508322");
});
it("ETH to USDT wrap eth and two hops swap", async () => {
var b_ETH = await ctx.Web3.eth.getBalance(trader)
var b_WETH = await ctx.WETH.methods.balanceOf(trader).call()
var b_USDT = await ctx.USDT.methods.balanceOf(trader).call()
console.log("Before ETH:" + fromWei(b_ETH, 'ether') + "; WETH:" + fromWei(b_WETH, 'ether') + "; USDT:" + fromWei(b_USDT, 'mwei'));
var b_w_eth = await ctx.Web3.eth.getBalance(ctx.WETH.options.address)
console.log("weth contract Before:" + fromWei(b_w_eth, 'ether'))
//set route path
var routes = [{
address: "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE",
decimals: 18
}, {
address: ctx.USDC.options.address,
decimals: 6
}, {
address: ctx.USDT.options.address,
decimals: 6
}];
var pairs = [{
pair: ctx.WETH_USDC.options.address,
base: ctx.WETH.options.address,
pairContract: ctx.WETH_USDC
}, {
pair: ctx.USDT_USDC.options.address,
base: ctx.USDT.options.address,
pairContract: ctx.USDT_USDC
}];
var tx = await logGas(await calcRoute(ctx, decimalStr('1'), 0.1, routes, pairs), ctx.sendParam(trader, '1'), "wrap eth and two hops swap")
var tx = await logGas(await calcRoute(ctx, decimalStr('1'), 0.1, routes, pairs), ctx.sendParam(trader, '1'), "wrap eth and two hops swap")
var a_ETH = await ctx.Web3.eth.getBalance(trader)
var a_WETH = await ctx.WETH.methods.balanceOf(trader).call()
var a_USDT = await ctx.USDT.methods.balanceOf(trader).call()
console.log("After ETH:" + fromWei(a_ETH, 'ether') + "; WETH:" + fromWei(a_WETH, 'ether') + "; USDT:" + fromWei(a_USDT, 'mwei'));
console.log("===============================================")
var c_ETH = await ctx.Web3.eth.getBalance(ctx.DODOV1Proxy01.options.address)
var c_WETH = await ctx.WETH.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDT = await ctx.USDT.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDC = await ctx.USDC.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
console.log("Contract ETH:" + fromWei(c_ETH, 'ether') + "; WETH:" + fromWei(c_WETH, 'ether') + "; USDT:" + fromWei(c_USDT, 'mwei') + "; USDC:" + fromWei(c_USDC, 'mwei'));
var a_w_eth = await ctx.Web3.eth.getBalance(ctx.WETH.options.address)
console.log("weth contract After:" + fromWei(a_w_eth, 'ether'))
// console.log("USDT:" + a_USDT);
assert(a_USDT, "866832169");
});
it("DODO to ETH unwrap eth and three hops swap", async () => {
var b_DODO = await ctx.DODO.methods.balanceOf(trader).call()
var b_ETH = await ctx.Web3.eth.getBalance(trader)
var b_WETH = await ctx.WETH.methods.balanceOf(trader).call()
console.log("User Before ETH:" + fromWei(b_ETH, 'ether') + "; WETH:" + fromWei(b_WETH, 'ether') + "; DODO:" + fromWei(b_DODO, 'ether'));
var b_w_eth = await ctx.Web3.eth.getBalance(ctx.WETH.options.address)
console.log("weth contract Before:" + fromWei(b_w_eth, 'ether'))
//approve DODO entry
await ctx.DODO.methods.approve(ctx.DODOApprove.options.address, MAX_UINT256).send(ctx.sendParam(trader))
//set route path
var routes = [{
address: ctx.DODO.options.address,
decimals: 18
}, {
address: ctx.USDT.options.address,
decimals: 6
}, {
address: ctx.USDC.options.address,
decimals: 6
}, {
address: "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE",
decimals: 18
}];
var pairs = [{
pair: ctx.DODO_USDT.options.address,
base: ctx.DODO.options.address,
pairContract: ctx.DODO_USDT
}, {
pair: ctx.USDT_USDC.options.address,
base: ctx.USDT.options.address,
pairContract: ctx.USDT_USDC
}, {
pair: ctx.WETH_USDC.options.address,
base: ctx.WETH.options.address,
pairContract: ctx.WETH_USDC
}];
var tx = await logGas(await calcRoute(ctx, decimalStr('100'), 0.1, routes, pairs), ctx.sendParam(trader), "unwrap eth and three hops swap")
var tx = await logGas(await calcRoute(ctx, decimalStr('100'), 0.1, routes, pairs), ctx.sendParam(trader), "unwrap eth and three hops swap")
var a_ETH = await ctx.Web3.eth.getBalance(trader)
var a_WETH = await ctx.WETH.methods.balanceOf(trader).call()
var a_DODO = await ctx.DODO.methods.balanceOf(trader).call()
console.log("After ETH:" + fromWei(a_ETH, 'ether') + "; WETH:" + fromWei(a_WETH, 'ether') + "; DODO:" + fromWei(a_DODO, 'ether'));
console.log("===============================================")
var c_ETH = await ctx.Web3.eth.getBalance(ctx.DODOV1Proxy01.options.address)
var c_WETH = await ctx.WETH.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDT = await ctx.USDT.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_USDC = await ctx.USDC.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
var c_DODO = await ctx.DODO.methods.balanceOf(ctx.DODOV1Proxy01.options.address).call()
console.log("Contract ETH:" + fromWei(c_ETH, 'ether') + "; WETH:" + fromWei(c_WETH, 'ether') + "; USDT:" + fromWei(c_USDT, 'mwei') + "; USDC:" + fromWei(c_USDC, 'mwei') + "; DODO:" + fromWei(c_DODO, "ether"));
var w_eth = await ctx.Web3.eth.getBalance(ctx.WETH.options.address)
console.log("weth contract After:" + fromWei(w_eth, 'ether'))
// console.log("ETH returmAmount:" + tx.events['OrderHistory'].returnValues['returnAmount']);
assert(tx.events['OrderHistory'].returnValues['returnAmount'], "22004556829826281");
});
});
}); | the_stack |
"use strict";
import Q = require("q");
import { IRequestHandler } from "vso-node-api/interfaces/common/VsoBaseInterfaces";
import { SoapClient } from "./soapclient";
import { UserAgentProvider } from "../helpers/useragentprovider";
import * as xmldoc from "xmldoc";
import * as url from "url";
// This class is the 'bridge' between the calling RepositoryInfoClient (which uses the
// async/await pattern) and the SoapClient which (has to) implement the callback pattern
export class TfsCatalogSoapClient {
private soapClient: SoapClient;
private serverUrl: string;
private endpointUrl: string;
/* tslint:disable:variable-name */
private static readonly SingleRecurseStar: string = "*";
private static readonly QueryOptionsNone: string = "0";
private static readonly QueryOptionsExpandDependencies: string = "1";
// These guids brought over from our friends at vso-intellij...
// https://github.com/Microsoft/vso-intellij/blob/master/plugin/src/com/microsoft/alm/plugin/context/soap/CatalogServiceImpl.java#L56-L58
// Ensure that they rename lower-case
private static readonly OrganizationalRoot: string = "69a51c5e-c093-447e-a177-a09e47a60974";
private static readonly TeamFoundationServerInstance: string = "b36f1bda-df2d-482b-993a-f194a31a1fa2";
private static readonly ProjectCollection: string = "26338d9e-d437-44aa-91f2-55880a328b54";
// Xml nodes in SOAP envelopes are case-sensitive (so don't change the values below)
private static readonly XmlSoapBody: string = "soap:Body";
private static readonly XmlQueryNodesResponse: string = "QueryNodesResponse";
private static readonly XmlQueryNodesResult: string = "QueryNodesResult";
private static readonly XmlCatalogResources: string = "CatalogResources";
private static readonly XmlNodeReferencesPaths: string = "NodeReferencePaths";
/* tslint:enable:variable-name */
constructor(serverUrl: string, handlers: IRequestHandler[]) {
this.serverUrl = serverUrl;
this.endpointUrl = url.resolve(serverUrl, "TeamFoundation/Administration/v3.0/CatalogService.asmx");
this.soapClient = new SoapClient(UserAgentProvider.UserAgent, handlers);
}
/*
Sample value of the parameter sent to this function:
<?xml version="1.0" encoding="UTF-8"?>
<soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<soap:Body>
<QueryNodesResponse xmlns="http://microsoft.com/webservices/">
<QueryNodesResult>
<CatalogResourceTypes>
<CatalogResourceType Identifier="69a51c5e-c093-447e-a177-a09e47a60974" DisplayName="Organizational Root">
<Description>The root of the catalog tree that describes the organizational makeup of the TFS deployment.</Description>
</CatalogResourceType>
<CatalogResourceType Identifier="14f04669-6779-42d5-8975-184b93650c83" DisplayName="Infrastructure Root">
<Description>The root of the catalog tree that describes the physical makeup of the TFS deployment.</Description>
</CatalogResourceType>
</CatalogResourceTypes>
<CatalogResources>
<CatalogResource Identifier="0bb849d3-fd55-41eb-9b74-45d974a0fc03" DisplayName="Organizational Root" ResourceTypeIdentifier="69a51c5e-c093-447e-a177-a09e47a60974" TempCorrelationId="0bb849d3-fd55-41eb-9b74-45d974a0fc03" ctype="0" MatchedQuery="true">
<Description>The root of the catalog tree that describes the organizational makeup of the TFS deployment.</Description>
<CatalogServiceReferences />
<Properties />
<NodeReferencePaths>
<string>3eYRYkJOok6GHrKam0AcAA==</string>
</NodeReferencePaths>
</CatalogResource>
<CatalogResource Identifier="0d3bdb54-52cb-49b4-ac3d-6530f330cfac" DisplayName="Infrastructure Root" ResourceTypeIdentifier="14f04669-6779-42d5-8975-184b93650c83" TempCorrelationId="0d3bdb54-52cb-49b4-ac3d-6530f330cfac" ctype="0" MatchedQuery="true">
<Description>The root of the catalog tree that describes the physical makeup of the TFS deployment.</Description>
<CatalogServiceReferences />
<Properties />
<NodeReferencePaths>
<string>Vc1S6XwnTEe/isOiPfhmxw==</string>
</NodeReferencePaths>
</CatalogResource>
</CatalogResources>
<CatalogNodes>
<CatalogNode FullPath="3eYRYkJOok6GHrKam0AcAA==" default="false" ResourceIdentifier="0bb849d3-fd55-41eb-9b74-45d974a0fc03" ParentPath="" ChildItem="3eYRYkJOok6GHrKam0AcAA==" NodeDependenciesIncluded="false" ctype="0" MatchedQuery="true">
<NodeDependencies />
</CatalogNode>
<CatalogNode FullPath="Vc1S6XwnTEe/isOiPfhmxw==" default="false" ResourceIdentifier="0d3bdb54-52cb-49b4-ac3d-6530f330cfac" ParentPath="" ChildItem="Vc1S6XwnTEe/isOiPfhmxw==" NodeDependenciesIncluded="false" ctype="0" MatchedQuery="true">
<NodeDependencies />
</CatalogNode>
</CatalogNodes>
<DeletedResources />
<DeletedNodeResources />
<DeletedNodes />
<LocationServiceLastChangeId>4006</LocationServiceLastChangeId>
</QueryNodesResult>
</QueryNodesResponse>
</soap:Body>
</soap:Envelope>
*/
private parseOrganizationRootPath(envelopeXml: any): string {
if (!envelopeXml) {
throw new Error(`No SOAP envelope was received for OrganizationRoot from ${this.endpointUrl}`);
}
const organizationDocument: xmldoc.XmlDocument = new xmldoc.XmlDocument(envelopeXml);
const soapBody: xmldoc.XmlElement = organizationDocument.childNamed(TfsCatalogSoapClient.XmlSoapBody);
const nodesResponse: xmldoc.XmlElement = soapBody.childNamed(TfsCatalogSoapClient.XmlQueryNodesResponse);
const nodesResult: xmldoc.XmlElement = nodesResponse.childNamed(TfsCatalogSoapClient.XmlQueryNodesResult);
const catalogResources: any = nodesResult.childNamed(TfsCatalogSoapClient.XmlCatalogResources);
if (!catalogResources) {
throw new Error(`No CatalogResources were received for OrganizationRoot from ${this.endpointUrl}`);
}
//Spin through children doing insensitive check
let orgRoot: any;
for (let idx: number = 0; idx < catalogResources.children.length; idx++) {
if (catalogResources.children[idx].attr.ResourceTypeIdentifier.toLowerCase() === TfsCatalogSoapClient.OrganizationalRoot) {
orgRoot = catalogResources.children[idx];
break;
}
}
if (!orgRoot) {
throw new Error(`No organizationRoot was found in SOAP envelope from ${this.endpointUrl}`);
}
const nodeRefPaths: any = orgRoot.childNamed(TfsCatalogSoapClient.XmlNodeReferencesPaths);
const nodeRefPath: string = nodeRefPaths.children[0].val;
return nodeRefPath;
}
/*
Sample value of the parameter sent to this function:
<?xml version="1.0" encoding="UTF-8"?>
<soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<soap:Body>
<QueryNodesResponse xmlns="http://microsoft.com/webservices/">
<QueryNodesResult>
<CatalogResourceTypes>
<CatalogResourceType Identifier="b36f1bda-df2d-482b-993a-f194a31a1fa2" DisplayName="Team Foundation Server Instance">
<Description>A deployed instance of Team Foundation Server.</Description>
</CatalogResourceType>
<CatalogResourceType Identifier="ffaf34bb-aded-4507-9e52-fca85e91ba63" DisplayName="Team Foundation Server Web Application">
<Description>The web application that hosts a Team Foundation Server</Description>
</CatalogResourceType>
</CatalogResourceTypes>
<CatalogResources>
<CatalogResource Identifier="0cc8419b-da0f-4b0a-a816-c00d11d83558" DisplayName="Team Foundation Server Instance" ResourceTypeIdentifier="b36f1bda-df2d-482b-993a-f194a31a1fa2" TempCorrelationId="0cc8419b-da0f-4b0a-a816-c00d11d83558" ctype="0" MatchedQuery="true">
<CatalogServiceReferences>
<CatalogServiceReference ResourceIdentifier="0cc8419b-da0f-4b0a-a816-c00d11d83558" AssociationKey="Location">
<ServiceDefinition serviceType="LocationService" identifier="bf9cf1d0-24ac-4d35-aeca-6cd18c69c1fe" displayName="Location Service" relativeToSetting="0" relativePath="/TeamFoundation/Administration/v3.0/LocationService.asmx" description="Location Service for Visual Studio Team Foundation Server." toolId="Framework">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
</CatalogServiceReferences>
<Properties />
<NodeReferencePaths>
<string>3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==</string>
</NodeReferencePaths>
</CatalogResource>
<CatalogResource Identifier="0640fed7-84f8-45c5-b9bf-062045cae5c8" DisplayName="Team Foundation Server Web Application" ResourceTypeIdentifier="ffaf34bb-aded-4507-9e52-fca85e91ba63" TempCorrelationId="0640fed7-84f8-45c5-b9bf-062045cae5c8" ctype="0" MatchedQuery="false">
<CatalogServiceReferences />
<Properties />
<NodeReferencePaths>
<string>Vc1S6XwnTEe/isOiPfhmxw==TKuP+6nWJkWp5U9GA3ovcA==Dw5+eHh8ykK/e8nLXC8QyA==</string>
</NodeReferencePaths>
</CatalogResource>
</CatalogResources>
<CatalogNodes>
<CatalogNode FullPath="3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==" default="false" ResourceIdentifier="0cc8419b-da0f-4b0a-a816-c00d11d83558" ParentPath="3eYRYkJOok6GHrKam0AcAA==" ChildItem="GJQSi7i010yMVKSDvyLgHQ==" NodeDependenciesIncluded="true" ctype="0" MatchedQuery="true">
<NodeDependencies>
<CatalogNodeDependency FullPath="3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==" AssociationKey="WebApplication" RequiredNodeFullPath="Vc1S6XwnTEe/isOiPfhmxw==TKuP+6nWJkWp5U9GA3ovcA==Dw5+eHh8ykK/e8nLXC8QyA==" IsSingleton="false" />
<CatalogNodeDependency FullPath="3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==" AssociationKey="WebApplication" RequiredNodeFullPath="Vc1S6XwnTEe/isOiPfhmxw==TKuP+6nWJkWp5U9GA3ovcA==Dw5+eHh8ykK/e8nLXC8QyA==" IsSingleton="false" />
</NodeDependencies>
</CatalogNode>
<CatalogNode FullPath="Vc1S6XwnTEe/isOiPfhmxw==TKuP+6nWJkWp5U9GA3ovcA==Dw5+eHh8ykK/e8nLXC8QyA==" default="false" ResourceIdentifier="0640fed7-84f8-45c5-b9bf-062045cae5c8" ParentPath="Vc1S6XwnTEe/isOiPfhmxw==TKuP+6nWJkWp5U9GA3ovcA==" ChildItem="Dw5+eHh8ykK/e8nLXC8QyA==" NodeDependenciesIncluded="true" ctype="0" MatchedQuery="false">
<NodeDependencies />
</CatalogNode>
</CatalogNodes>
<DeletedResources />
<DeletedNodeResources />
<DeletedNodes />
<LocationServiceLastChangeId>4006</LocationServiceLastChangeId>
</QueryNodesResult>
</QueryNodesResponse>
</soap:Body>
</soap:Envelope>
*/
private parseFoundationServerRootPath(envelopeXml: any): string {
if (!envelopeXml) {
throw new Error(`No SOAP envelope was received for FoundationServer from ${this.endpointUrl}`);
}
const foundationServerDocument: xmldoc.XmlDocument = new xmldoc.XmlDocument(envelopeXml);
const soapBody: xmldoc.XmlElement = foundationServerDocument.childNamed(TfsCatalogSoapClient.XmlSoapBody);
const nodesResponse: xmldoc.XmlElement = soapBody.childNamed(TfsCatalogSoapClient.XmlQueryNodesResponse);
const nodesResult: xmldoc.XmlElement = nodesResponse.childNamed(TfsCatalogSoapClient.XmlQueryNodesResult);
const catalogResources: any = nodesResult.childNamed(TfsCatalogSoapClient.XmlCatalogResources);
if (!catalogResources) {
throw new Error(`No CatalogResources were received for FoundationServer from ${this.endpointUrl}`);
}
let serverInstance: xmldoc.XmlElement;
//Spin through children doing insensitive check
for (let idx: number = 0; idx < catalogResources.children.length; idx++) {
if (catalogResources.children[idx].attr.ResourceTypeIdentifier.toLowerCase() === TfsCatalogSoapClient.TeamFoundationServerInstance) {
serverInstance = catalogResources.children[idx];
break;
}
}
if (!serverInstance) {
throw new Error(`No serverInstance was found in SOAP envelope from ${this.endpointUrl}`);
}
const nodeRefPaths: any = serverInstance.childNamed(TfsCatalogSoapClient.XmlNodeReferencesPaths);
const nodeRefPath: string = nodeRefPaths.children[0].val;
return nodeRefPath;
}
/*
Sample value of the parameter sent to this function:
<?xml version="1.0" encoding="UTF-8"?>
<soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<soap:Body>
<QueryNodesResponse xmlns="http://microsoft.com/webservices/">
<QueryNodesResult>
<CatalogResourceTypes>
<CatalogResourceType Identifier="47fa57a4-8157-4fb5-9a64-a7a4954bd284" DisplayName="Team Web Access">
<Description>Team Web Access Location</Description>
</CatalogResourceType>
<CatalogResourceType Identifier="26338d9e-d437-44aa-91f2-55880a328b54" DisplayName="Team Project Collection">
<Description>A Team Project Collection that exists within the TFS deployment.</Description>
</CatalogResourceType>
</CatalogResourceTypes>
<CatalogResources>
<CatalogResource Identifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" DisplayName="Team Web Access" ResourceTypeIdentifier="47fa57a4-8157-4fb5-9a64-a7a4954bd284" TempCorrelationId="f1a834e9-15e6-4c2e-916e-1e536b8666b0" ctype="0" MatchedQuery="true">
<CatalogServiceReferences>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="Annotate">
<ServiceDefinition serviceType="Annotate" identifier="74b15e02-0ac2-414f-a9b9-30268659d3b5" displayName="Team Web Access (Annotate)" relativeToSetting="0" relativePath="/web/ann.aspx" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="AnnotateSourceControlItem">
<ServiceDefinition serviceType="AnnotateSourceControlItem" identifier="d271e722-c261-4bc2-b0f7-1c8a9e13f907" displayName="Team Web Access (AnnotateSourceControlItem)" relativeToSetting="0" relativePath="/web/ann.aspx?pcguid={projectCollectionGuid}&path={itemPath}&cs={itemChangeset}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ChangesetDetail">
<ServiceDefinition serviceType="ChangesetDetail" identifier="d40ef625-cca7-4e73-b9ec-86cbe1534ce0" displayName="Team Web Access (ChangesetDetail)" relativeToSetting="0" relativePath="/web/cs.aspx" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="CreateWorkItem">
<ServiceDefinition serviceType="CreateWorkItem" identifier="14cd69c6-88f9-4c8c-a259-d2441d77d1af" displayName="Team Web Access (CreateWorkItem)" relativeToSetting="0" relativePath="/web/wi.aspx?puri={projectUri}&wit={workItemType}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="Difference">
<ServiceDefinition serviceType="Difference" identifier="2b84d900-1f08-486c-9c47-0e6af371d03c" displayName="Team Web Access (Difference)" relativeToSetting="0" relativePath="/web/diff.aspx" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="DiffSourceControlItems">
<ServiceDefinition serviceType="DiffSourceControlItems" identifier="5e91c4da-0013-4ebb-943d-cc77f5adb82d" displayName="Team Web Access (DiffSourceControlItems)" relativeToSetting="0" relativePath="/web/diff.aspx?pcguid={projectCollectionGuid}&opath={originalItemPath}&ocs={originalItemChangeset}&mpath={modifiedItemPath}&mcs={modifiedItemChangeset}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="DiffSourceControlShelvedItem">
<ServiceDefinition serviceType="DiffSourceControlShelvedItem" identifier="57768903-455f-4001-a956-baff869fef83" displayName="Team Web Access (DiffSourceControlShelvedItem)" relativeToSetting="0" relativePath="/web/diff.aspx?pcguid={projectCollectionGuid}&opath={originalItemPath}&ocs={originalItemChangeset}&mpath={shelvedItemPath}&mss={shelvesetName};{shelvesetOwner}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ExploreSourceControlPath">
<ServiceDefinition serviceType="ExploreSourceControlPath" identifier="ac0770bc-1dd6-4b8e-a811-5a03690df44f" displayName="Team Web Access (ExploreSourceControlPath)" relativeToSetting="0" relativePath="/web/scc.aspx?pcguid={projectCollectionGuid}&path={sourceControlPath}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="Home">
<ServiceDefinition serviceType="TSWAHome" identifier="0f9ced5d-89f9-4743-bab8-fa511ff09a8c" displayName="Team Web Access (TSWAHome)" relativeToSetting="0" relativePath="/web" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="OpenWorkItem">
<ServiceDefinition serviceType="OpenWorkItem" identifier="85a61ff8-0af0-44f1-8d9a-2fabd351a26a" displayName="Team Web Access (OpenWorkItem)" relativeToSetting="0" relativePath="/web/wi.aspx?pcguid={projectCollectionGuid}&id={workItemId}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="QueryResults">
<ServiceDefinition serviceType="QueryResults" identifier="42acdf9b-f814-4e10-abaa-0f7b5d5df45f" displayName="Team Web Access (QueryResults)" relativeToSetting="0" relativePath="/web/qr.aspx" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ShelvesetDetail">
<ServiceDefinition serviceType="ShelvesetDetail" identifier="b5c6e965-ca8d-4dc6-a6fc-f25af0c71d19" displayName="Team Web Access (ShelvesetDetail)" relativeToSetting="0" relativePath="/web/ss.aspx" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="SourceExplorer">
<ServiceDefinition serviceType="SourceExplorer" identifier="56b61720-a7e1-4962-af6c-a1484bdfa92c" displayName="Team Web Access (SourceExplorer)" relativeToSetting="0" relativePath="/web/scc.aspx" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ViewBuildDetails">
<ServiceDefinition serviceType="ViewBuildDetails" identifier="3a90493b-068d-4f1e-ad35-6f43c967a0d8" displayName="Team Web Access (ViewBuildDetails)" relativeToSetting="0" relativePath="/web/build.aspx?pcguid={projectCollectionGuid}&builduri={buildUri}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ViewChangesetDetails">
<ServiceDefinition serviceType="ViewChangesetDetails" identifier="91f567e1-087b-4ded-ad2b-54099a60fdae" displayName="Team Web Access (ViewChangesetDetails)" relativeToSetting="0" relativePath="/web/cs.aspx?pcguid={projectCollectionGuid}&cs={changesetId}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ViewItem">
<ServiceDefinition serviceType="ViewItem" identifier="3b2cea6d-c926-46c5-8660-e0d265705be0" displayName="Team Web Access (ViewItem)" relativeToSetting="0" relativePath="/web/view.aspx" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ViewServerQueryResults">
<ServiceDefinition serviceType="ViewServerQueryResults" identifier="062ad1b2-b1e6-4f72-ba32-391b5f5474e4" displayName="Team Web Access (ViewServerQueryResults)" relativeToSetting="0" relativePath="/web/qr.aspx?puri={projectUri}&path={storedQueryPath}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ViewShelvesetDetails">
<ServiceDefinition serviceType="ViewShelvesetDetails" identifier="5f9a6d4f-766e-4a70-9ddf-bfde6c90741e" displayName="Team Web Access (ViewShelvesetDetails)" relativeToSetting="0" relativePath="/web/ss.aspx?pcguid={projectCollectionGuid}&ssname={shelvesetName}&ssowner={shelvesetOwner}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ViewSourceControlItem">
<ServiceDefinition serviceType="ViewSourceControlItem" identifier="0fdc7b8f-0294-43ec-a98f-ca65213914da" displayName="Team Web Access (ViewSourceControlItem)" relativeToSetting="0" relativePath="/web/view.aspx?pcguid={projectCollectionGuid}&path={itemPath}&cs={itemChangeset}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ViewSourceControlItemHistory">
<ServiceDefinition serviceType="ViewSourceControlItemHistory" identifier="ee15e514-d6c7-4aac-96f4-7c334c9459fc" displayName="Team Web Access (ViewSourceControlItemHistory)" relativeToSetting="0" relativePath="/web/history.aspx?pcguid={projectCollectionGuid}&path={itemPath}&cs={itemChangeset}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ViewSourceControlShelvedItem">
<ServiceDefinition serviceType="ViewSourceControlShelvedItem" identifier="4c81a44d-67ab-4d23-9cbe-339c9102993b" displayName="Team Web Access (ViewSourceControlShelvedItem)" relativeToSetting="0" relativePath="/web/view.aspx?pcguid={projectCollectionGuid}&path={itemPath}&ss={shelvesetName};{shelvesetOwner}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="ViewWiqlQueryResults">
<ServiceDefinition serviceType="ViewWiqlQueryResults" identifier="0f9ced5d-89f9-4743-bab8-fa511ff09a8c" displayName="Team Web Access (ViewWiqlQueryResults)" relativeToSetting="0" relativePath="/web/qr.aspx?puri={projectUri}&wiql={queryText}&name={queryDisplayName}" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
<CatalogServiceReference ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" AssociationKey="WorkItemEditor">
<ServiceDefinition serviceType="WorkItemEditor" identifier="7bbe4c9c-268b-4175-8979-a06878149aef" displayName="Team Web Access (WorkItemEditor)" relativeToSetting="0" relativePath="/web/wi.aspx" toolId="TSWebAccess">
<RelativeToSetting>Context</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
</CatalogServiceReferences>
<Properties />
<NodeReferencePaths>
<string>3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==5WM1lP72kkiwOcTd6ZWclw==</string>
</NodeReferencePaths>
</CatalogResource>
<CatalogResource Identifier="0910fe90-d0b2-4748-a535-3bbe65f908ec" DisplayName="DefaultCollection" ResourceTypeIdentifier="26338d9e-d437-44aa-91f2-55880a328b54" TempCorrelationId="0910fe90-d0b2-4748-a535-3bbe65f908ec" ctype="0" MatchedQuery="true">
<Description />
<CatalogServiceReferences>
<CatalogServiceReference ResourceIdentifier="0910fe90-d0b2-4748-a535-3bbe65f908ec" AssociationKey="Location">
<ServiceDefinition serviceType="LocationService" identifier="4a3d32f1-f8f4-42bc-9fea-57e547e7463d" displayName="Location Service" relativeToSetting="2" relativePath="/DefaultCollection/Services/v3.0/LocationService.asmx" description="Location Service for Visual Studio Team Foundation Server." toolId="Framework">
<RelativeToSetting>WebApplication</RelativeToSetting>
<ServiceOwner>00000000-0000-0000-0000-000000000000</ServiceOwner>
<LocationMappings />
<ParentIdentifier>00000000-0000-0000-0000-000000000000</ParentIdentifier>
<InheritLevel>None</InheritLevel>
</ServiceDefinition>
</CatalogServiceReference>
</CatalogServiceReferences>
<Properties>
<KeyValueOfStringString>
<Key>InstanceId</Key>
<Value>4a3d32f1-f8f4-42bc-9fea-57e547e7463d</Value>
</KeyValueOfStringString>
</Properties>
<NodeReferencePaths>
<string>3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==pH3F9yLMlUOsZc43M2a04A==</string>
</NodeReferencePaths>
</CatalogResource>
</CatalogResources>
<CatalogNodes>
<CatalogNode FullPath="3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==5WM1lP72kkiwOcTd6ZWclw==" default="false" ResourceIdentifier="f1a834e9-15e6-4c2e-916e-1e536b8666b0" ParentPath="3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==" ChildItem="5WM1lP72kkiwOcTd6ZWclw==" NodeDependenciesIncluded="true" ctype="0" MatchedQuery="true">
<NodeDependencies />
</CatalogNode>
<CatalogNode FullPath="3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==pH3F9yLMlUOsZc43M2a04A==" default="false" ResourceIdentifier="0910fe90-d0b2-4748-a535-3bbe65f908ec" ParentPath="3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==" ChildItem="pH3F9yLMlUOsZc43M2a04A==" NodeDependenciesIncluded="true" ctype="0" MatchedQuery="true">
<NodeDependencies />
</CatalogNode>
</CatalogNodes>
<DeletedResources />
<DeletedNodeResources />
<DeletedNodes />
<LocationServiceLastChangeId>4006</LocationServiceLastChangeId>
</QueryNodesResult>
</QueryNodesResponse>
</soap:Body>
</soap:Envelope>
*/
private parseProjectCollections(envelopeXml: any): any[] {
if (!envelopeXml) {
throw new Error(`No SOAP envelope was received for ProjectCollections from ${this.endpointUrl}`);
}
const projectCollectionsDocument: xmldoc.XmlDocument = new xmldoc.XmlDocument(envelopeXml);
const soapBody: xmldoc.XmlElement = projectCollectionsDocument.childNamed(TfsCatalogSoapClient.XmlSoapBody);
const nodesResponse: xmldoc.XmlElement = soapBody.childNamed(TfsCatalogSoapClient.XmlQueryNodesResponse);
const nodesResult: xmldoc.XmlElement = nodesResponse.childNamed(TfsCatalogSoapClient.XmlQueryNodesResult);
const catalogResources: any = nodesResult.childNamed(TfsCatalogSoapClient.XmlCatalogResources);
if (!catalogResources) {
throw new Error(`No CatalogResources were received for ProjectCollections from ${this.endpointUrl}`);
}
const collectionNodes: any[] = [];
catalogResources.eachChild(function(catalogResource) {
if (catalogResource.attr.ResourceTypeIdentifier.toLowerCase() === TfsCatalogSoapClient.ProjectCollection) {
collectionNodes.push(catalogResource);
}
});
return collectionNodes;
}
// Based on the passed in collectionName, it queries the TFS Catalog Service to find
// the collection's display name, id (guid), and API URL (_apis/projectCollections/)
// This method returns 'any' (of the _shape_ TeamProjectCollectionReference) which will
// match "good enough" to the type expected in repositoryinfoclient.
public GetProjectCollection(collectionName: string): Q.Promise<any> {
const deferred: Q.Deferred<any> = Q.defer<any>();
//Get the organizational root
this.getCatalogDataFromServer(TfsCatalogSoapClient.SingleRecurseStar, TfsCatalogSoapClient.QueryOptionsNone).then((catalogDataXml: any) => {
const orgRootPath: string = this.parseOrganizationRootPath(catalogDataXml);
//Get the foundationServer, orgRootPath looks something like 3eYRYkJOok6GHrKam0AcAA==
this.getCatalogDataFromServer(orgRootPath + TfsCatalogSoapClient.SingleRecurseStar, TfsCatalogSoapClient.QueryOptionsExpandDependencies).then((catalogDataXml:any) => {
const foundationServerRootPath: string = this.parseFoundationServerRootPath(catalogDataXml);
//Get the project collections, foundationServerRootPath looks something like 3eYRYkJOok6GHrKam0AcAA==GJQSi7i010yMVKSDvyLgHQ==
this.getCatalogDataFromServer(foundationServerRootPath + TfsCatalogSoapClient.SingleRecurseStar, TfsCatalogSoapClient.QueryOptionsExpandDependencies).then((catalogDataXml:any) => {
const collectionNodes: any[] = this.parseProjectCollections(catalogDataXml);
//Now go and find the project collection we're looking for
let foundTeamProject: any;
for (let idx: number = 0; idx < collectionNodes.length; idx++) {
if (collectionNodes[idx].attr.DisplayName.toLowerCase() === collectionName.toLowerCase()) {
foundTeamProject = collectionNodes[idx];
break;
}
}
if (foundTeamProject) {
const props: any = foundTeamProject.childNamed("Properties");
const strstr: any = props.childNamed("KeyValueOfStringString");
const id: any = strstr.childNamed("Value");
//Resolve an object that +looks_ like a TeamProjectCollectionReference object
deferred.resolve({ name: foundTeamProject.attr.DisplayName, id: id.val, url: url.resolve(this.serverUrl, "_apis/projectCollections/" + id.val)});
} else {
deferred.resolve(undefined);
}
});
});
}).fail((err) => {
//Apparently, we will fail if auth fails when getting organizational root
deferred.reject(err);
});
return deferred.promise;
}
private getCatalogDataFromServer(pathSpecs: string, queryOptions: string) : Q.Promise<any> {
const deferred: Q.Deferred<any> = Q.defer<any>();
const onResult = (err: any, statusCode: number, responseEnvelope: any) => {
if (err) {
err.statusCode = statusCode;
deferred.reject(err);
} else {
deferred.resolve(responseEnvelope);
}
};
const envelope: string = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<soap:Envelope xmlns:soap=\"http://www.w3.org/2003/05/soap-envelope\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" "
+ "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\">"
+ "<soap:Body xmlns=\"http://microsoft.com/webservices/\">"
+ "<QueryNodes>"
+ "<pathSpecs>"
+ "<string>" + pathSpecs + "</string>"
+ "</pathSpecs>"
+ "<queryOptions>" + queryOptions + "</queryOptions>"
+ "</QueryNodes>"
+ "</soap:Body>"
+ "</soap:Envelope>";
this.soapClient.post(this.endpointUrl, envelope, onResult);
return deferred.promise;
}
} | the_stack |
/// <reference path="../metricsPlugin.ts"/>
/// <reference path="../../includes.ts"/>
module HawkularMetrics {
export enum AlertType {
AVAILABILITY,
THRESHOLD,
RANGE
}
export interface IHawkularAlertCriteria {
startTime?: TimestampInMillis;
endTime?: TimestampInMillis;
alertIds?: string;
triggerIds?: string;
statuses?: string;
severities?: string;
tags?: string;
thin?: boolean;
currentPage?: number;
perPage?: number;
sort?: string;
order?: string;
}
export interface IHawkularActionCriteria {
startTime?: TimestampInMillis;
endTime?: TimestampInMillis;
actionPlugins?: string;
actionIds?: string;
alertIds?: string;
results?: string;
thin?: boolean;
currentPage?: number;
perPage?: number;
sort?: string;
order?: string;
}
export interface IHawkularTriggerCriteria {
triggerIds?: string;
tags?: string;
thin?: boolean;
currentPage?: number;
perPage?: number;
sort?: string;
order?: string;
}
export interface IHawkularAlertQueryResult {
alertList: IAlert[];
headers: any;
}
export interface IHawkularTriggerQueryResult {
triggerList: IAlertTrigger[];
headers: any;
}
export interface IHawkularAlertsManager {
// Alerts
/**
* @name addAction
* @desc Check if a previous action exists, or it creates a new one
* @param action - action
*/
addAction(action: ITriggerAction): ng.IPromise<void>;
/**
* @name queryAlerts
* @desc Fetch Alerts with different criterias
* @param criteria - Filter for alerts query
* @returns {ng.IPromise} with a list of Alerts
*/
queryAlerts(criteria?: IHawkularAlertCriteria):
ng.IPromise<IHawkularAlertQueryResult>;
/**
* @name getAlert
* @desc Single alert fetch
* @param alertId - Alert to query
*/
getAlert(alertId: string): ng.IPromise<IAlert>;
/**
* @name queryActionsHistory
* @desc Fetch Actions from history via criteria
* @param criteria - Filter for actions query
*/
queryActionsHistory(criteria?: IHawkularActionCriteria): ng.IPromise<any>;
/**
* @name resolveAlerts
* @desc Mark as resolved a list of alerts*
* @param resolvedAlerts - An object with the description of the resolution of the alerts, in the form
*
* resolvedAlerts = {
* alertIds: A string with a comma separated list of Alert ids,
* resolvedBy: The user responsible for the resolution of the alerts,
* resolvedNotes: Additional notes to add in the resolved state
* }
*
* @returns {ng.IPromise}
*/
resolveAlerts(resolvedAlerts: any): ng.IPromise<any>;
/**
* @name addNote
* @desc Add a note on an alert
* @param alertNote - An object with the user and the text of the note in the form
*
* alertNote = {
* alertId: A string with the alertId to place the note,
* user: The user author of the note,
* text: the content of the note
* }
*/
addNote(alertNote: any): ng.IPromise<any>;
/**
* @name ackAlerts
* @param ackAlerts
* @param ackAlerts - An object with the description of the acknowledge of the alerts, in the form
*
* ackAlerts = {
* alertIds: A string with a comma separated list of Alert ids,
* ackBy: The user responsible for the acknowledgement of the alerts,
* ackNotes: Additional notes to add in the acknowledged state
* }
*
* @returns {ng.IPromise}
*/
ackAlerts(ackAlerts: any): ng.IPromise<any>;
// Triggers
/**
* @name existTrigger
* @desc Check if a trigger exists
* @param {TriggerId} triggerId - The id of the trigger to check
* @returns {ng.IPromise}
*/
existTrigger(triggerId: TriggerId): any;
/**
* @name getTrigger
* @desc Fetch a full Trigger with Dampening and Conditions object attached
* @param {TriggerId} triggerId - The id of the trigger to fetch
* @returns {ng.IPromise} with value:
*
* promiseValue = {
* trigger: <The trigger object>,
* dampenings: <List of dampenings linked with the trigger>,
* conditions: <List of conditions linked with the trigger>
* }
*/
getTrigger(triggerId: TriggerId): any;
/**
* @name queryTriggers
* @desc Fetch Triggers with different criterias
* @param criteria - Filter for triggers query
* @returns {ng.IPromise} with a list of Triggers
*/
queryTriggers(criteria?: IHawkularTriggerCriteria):
ng.IPromise<IHawkularTriggerQueryResult>;
/**
* @name getTriggerConditions
* @desc Fetch only Conditions for a specified trigger
* @param {TriggerId} triggerId - The id of the trigger to fetch Conditions
* @returns {ng.IPromise} with a list of conditions as a value
*/
getTriggerConditions(triggerId: TriggerId): ng.IPromise<any>;
/**
* @name createTrigger
* @desc Create a Trigger with Dampenings and Conditions
* @param fullTrigger - A full trigger representation where
*
* fullTrigger = {
* trigger: <The trigger object>,
* dampenings: <List of dampenings linked with the trigger>,
* conditions: <List of conditions linked with the trigger>
* }
*
* @param errorCallback - Function to be called on error
*/
createTrigger(fullTrigger: any, errorCallback: any): ng.IPromise<void>;
/**
* @name deleteTrigger
* @desc Delete a Trigger with associated Dampenings and Conditions
* @param {TriggerId} triggerId - The id of the trigger to delete
*/
deleteTrigger(triggerId: TriggerId): ng.IPromise<any>;
/**
* @name updateTrigger
* @desc Update an existing Trigger with Dampenings and Conditions
* @param fullTrigger - An existing full trigger representation where
*
* fullTrigger = {
* trigger: <The trigger object>,
* dampenings: <List of dampenings linked with the trigger>,
* conditions: <List of conditions linked with the trigger>
* }
*
* @param errorCallback - Function to be called on error
* @param backup - A backup of the fullTrigger, it updates only the trigger, dampenings or conditions
* that have been changed
*
* backupTrigger = {
* trigger: <The trigger object>,
* dampenings: <List of dampenings linked with the trigger>,
* conditions: <List of conditions linked with the trigger>
* }
*/
updateTrigger(fullTrigger: any, errorCallback: any, backupTrigger?: any): ng.IPromise<any>;
}
export class HawkularAlertsManager implements IHawkularAlertsManager {
constructor(private HawkularAlert: any,
private $q: ng.IQService,
private $log: ng.ILogService,
private $moment: any,
private ErrorsManager: IErrorsManager) {
}
public queryAlerts(criteria: IHawkularAlertCriteria): ng.IPromise<IHawkularAlertQueryResult> {
let alertList = [];
let headers;
/* Format of Alerts:
alert: {
type: 'THRESHOLD' or 'AVAILABILITY',
avg: Average value based on the evalSets 'values',
start: The time of the first data ('dataTimestamp') in evalSets,
threshold: The threshold taken from condition.threshold,
end: The time when the alert was sent ('ctime')
}
*/
let queryParams = {};
if (criteria && criteria.startTime) {
queryParams['startTime'] = criteria.startTime;
}
if (criteria && criteria.endTime) {
queryParams['endTime'] = criteria.endTime;
}
if (criteria && criteria.alertIds) {
queryParams['alertIds'] = criteria.alertIds;
}
if (criteria && criteria.triggerIds) {
queryParams['triggerIds'] = criteria.triggerIds;
}
if (criteria && criteria.statuses) {
queryParams['statuses'] = criteria.statuses;
}
if (criteria && criteria.severities) {
queryParams['severities'] = criteria.severities;
}
if (criteria && criteria.tags) {
queryParams['tags'] = criteria.tags;
}
if (criteria && criteria.thin) {
queryParams['thin'] = criteria.thin;
}
if (criteria && criteria.currentPage && criteria.currentPage !== 0) {
queryParams['page'] = criteria.currentPage;
}
if (criteria && criteria.perPage) {
queryParams['per_page'] = criteria.perPage;
}
if (criteria && criteria.sort) {
queryParams['sort'] = criteria.sort;
}
if (criteria && criteria.order) {
queryParams['order'] = criteria.order;
}
return this.HawkularAlert.Alert.query(queryParams, (serverAlerts: any, getHeaders: any) => {
headers = getHeaders();
let momentNow = this.$moment();
for (let i = 0; i < serverAlerts.length; i++) {
let serverAlert = serverAlerts[i];
let consoleAlert: any = serverAlert;
consoleAlert.id = serverAlert.id;
consoleAlert.triggerId = serverAlert.triggerId;
if (serverAlert.evalSets && serverAlert.evalSets[0] && serverAlert.evalSets[0][0]) {
consoleAlert.dataId = serverAlert.evalSets[0][0].condition.dataId;
}
consoleAlert.end = serverAlert.ctime;
let sum: number = 0.0;
let count: number = 0.0;
if (serverAlert.evalSets) {
if (serverAlert.context.triggerType !== 'Event') {
for (let j = 0; j < serverAlert.evalSets.length; j++) {
let evalItem = serverAlert.evalSets[j][0];
if (!consoleAlert.start && evalItem.dataTimestamp) {
consoleAlert.start = evalItem.dataTimestamp;
}
if (!consoleAlert.threshold && evalItem.condition.threshold) {
consoleAlert.threshold = evalItem.condition.threshold;
}
if (!consoleAlert.type && evalItem.condition.type) {
consoleAlert.type = evalItem.condition.type;
}
let momentAlert = this.$moment(consoleAlert.end);
if (momentAlert.year() === momentNow.year()) {
consoleAlert.isThisYear = true;
if (momentAlert.dayOfYear() === momentNow.dayOfYear()) {
consoleAlert.isToday = true;
}
}
if (undefined !== evalItem.rate) {
// handle rate conditions
sum += evalItem.rate;
} else {
// handle 'value' conditions and also compare conditions ('value1')
sum += ((undefined !== evalItem.value) ? evalItem.value : evalItem.value1);
}
count++;
}
consoleAlert.avg = sum / count;
consoleAlert.durationTime = consoleAlert.end - consoleAlert.start;
} else {
let evalItem = serverAlert.evalSets[0][0];
let event = evalItem.value;
consoleAlert.message = event.context.Message;
}
}
alertList.push(consoleAlert);
}
}, (error) => {
this.$log.debug('querying data error', error);
}).$promise.then((): IHawkularAlertQueryResult => {
return {
alertList: alertList,
headers: headers
};
});
}
public getAlert(alertId: string): ng.IPromise<IAlert> {
return this.HawkularAlert.Alert.get({ alertId: alertId }).$promise;
}
public queryActionsHistory(criteria?: IHawkularActionCriteria): ng.IPromise<any> {
let actionHistoryList = [];
let headers;
let queryParams = {};
if (criteria && criteria.alertIds) {
queryParams['alertIds'] = criteria.alertIds;
}
if (criteria && criteria.actionPlugins) {
queryParams['actionPlugins'] = criteria.actionPlugins;
}
if (criteria && criteria.actionIds) {
queryParams['actionIds'] = criteria.actionIds;
}
if (criteria && criteria.results) {
queryParams['results'] = criteria.results;
}
if (criteria) {
queryParams['thin'] = criteria.thin;
} else {
queryParams['thin'] = true;
}
if (criteria && criteria.startTime) {
queryParams['startTime'] = criteria.startTime;
}
if (criteria && criteria.endTime) {
queryParams['endTime'] = criteria.endTime;
}
if (criteria && criteria.currentPage && criteria.currentPage !== 0) {
queryParams['page'] = criteria.currentPage;
}
if (criteria && criteria.perPage) {
queryParams['per_page'] = criteria.perPage;
}
if (criteria && criteria.sort) {
queryParams['sort'] = criteria.sort;
}
if (criteria && criteria.order) {
queryParams['order'] = criteria.order;
}
return this.HawkularAlert.Action.queryHistory(queryParams, (serverActionsHistory: any, getHeaders: any) => {
headers = getHeaders();
actionHistoryList = serverActionsHistory;
}, (error) => {
this.$log.debug('querying data error', error);
}).$promise.then(() => {
return {
actionsList: actionHistoryList,
headers: headers
};
});
}
public resolveAlerts(resolvedAlerts: any): ng.IPromise<any> {
return this.HawkularAlert.Alert.resolvemany(resolvedAlerts, {}).$promise;
}
public addNote(alertNote: any): ng.IPromise<any> {
return this.HawkularAlert.Alert.note(alertNote).$promise;
}
public ackAlerts(ackAlerts: any): ng.IPromise<any> {
return this.HawkularAlert.Alert.ackmany(ackAlerts, {}).$promise;
}
public existTrigger(triggerId: TriggerId): any {
return this.HawkularAlert.Trigger.get({ triggerId: triggerId }).$promise;
}
public getTrigger(triggerId: TriggerId): any {
return this.HawkularAlert.Trigger.full({ triggerId: triggerId }).$promise;
}
public getTriggerConditions(triggerId: TriggerId): ng.IPromise<any> {
return this.HawkularAlert.Conditions.query({ triggerId: triggerId }).$promise;
}
public createTrigger(fullTrigger: any, errorCallback: any): ng.IPromise<void> {
let triggerDefaults = {
description: 'Created on ' + Date(),
firingMatch: 'ALL',
autoResolveMatch: 'ALL',
enabled: true,
autoResolve: true,
actions: {}
};
let trigger: IAlertTrigger = angular.extend(triggerDefaults, fullTrigger.trigger);
return this.HawkularAlert.Trigger.save(trigger).$promise.then((savedTrigger) => {
let dampeningPromises = [];
for (let i = 0; fullTrigger.dampenings && i < fullTrigger.dampenings.length; i++) {
if (fullTrigger.dampenings[i]) {
let dampeningPromise = this.HawkularAlert.Dampening.save({ triggerId: savedTrigger.id },
fullTrigger.dampenings[i]).$promise.then(null, (error) => {
return this.ErrorsManager.errorHandler(error, 'Error creating dampening.', errorCallback);
});
dampeningPromises.push(dampeningPromise);
}
}
let firingConditions = [];
let autoResolveConditions = [];
for (let j = 0; fullTrigger.conditions && j < fullTrigger.conditions.length; j++) {
if (fullTrigger.conditions[j]) {
if (fullTrigger.conditions[j].triggerMode && fullTrigger.conditions[j].triggerMode === 'AUTORESOLVE') {
autoResolveConditions.push(fullTrigger.conditions[j]);
} else {
// A condition without triggerMode is treated as FIRING
firingConditions.push(fullTrigger.conditions[j]);
}
}
}
let conditionPromises = [];
if (firingConditions.length > 0) {
let conditionPromise = this.HawkularAlert.Conditions.save({
triggerId: savedTrigger.id,
triggerMode: 'FIRING'
},
firingConditions).$promise.then(null, (error) => {
return this.ErrorsManager.errorHandler(error, 'Error creating firing conditions.', errorCallback);
});
conditionPromises.push(conditionPromise);
}
if (autoResolveConditions.length > 0) {
let conditionPromise = this.HawkularAlert.Conditions.save({
triggerId: savedTrigger.id,
triggerMode: 'AUTORESOLVE'
},
autoResolveConditions).$promise.then(null, (error) => {
return this.ErrorsManager.errorHandler(error, 'Error creating autoresolve conditions.', errorCallback);
});
conditionPromises.push(conditionPromise);
}
return this.$q.all(Array.prototype.concat(dampeningPromises, conditionPromises));
});
}
public deleteTrigger(triggerId: TriggerId): ng.IPromise<void> {
return this.HawkularAlert.Trigger.delete({ triggerId: triggerId }).$promise;
}
// This is a temporary solution. For now, the UI allows only member-level editing. But we want to
// provide only group-level update. So, using the member, fetch the group trigger and then
// prepare the group update. Because at the moment we don't allow ad-hoc condition addition or removal
// (so basically the condition set remains the same, only values, like thresholds, change) we don't need
// to supply a dataMemberIdMap, instead we can rely on alerts to use the previously
// supplied mappings for each member (yay!).
public updateTrigger(fullTrigger: any, errorCallback: any, backupTrigger?: any): ng.IPromise<any> {
let groupPromise;
let actionPromises = [];
let dampeningPromises = [];
let conditionPromises = [];
// First, fetch the *full* group trigger, update it if necessary
let groupId = fullTrigger.trigger.memberOf;
this.getTrigger(groupId).then((groupTrigger) => {
let changedAttrs = !angular.equals(groupTrigger.trigger.enabled, fullTrigger.trigger.enabled);
changedAttrs = changedAttrs || !angular.equals(groupTrigger.trigger.severity, fullTrigger.trigger.severity);
if (changedAttrs) {
groupTrigger.trigger.enabled = fullTrigger.trigger.enabled;
// don't allow update on name or description at the group level because those have instance-info in them
// groupTrigger.trigger.name = fullTrigger.trigger.name;
// groupTrigger.trigger.description = fullTrigger.trigger.description;
groupTrigger.trigger.severity = fullTrigger.trigger.severity;
}
let changedActions = !angular.equals(groupTrigger.trigger.actions, fullTrigger.trigger.actions);
if ( changedActions ) {
groupTrigger.trigger.actions = fullTrigger.trigger.actions;
// Ensure the actions exist
for (let i = 0; groupTrigger.trigger.actions && i < groupTrigger.trigger.actions.length; i++) {
if (groupTrigger.trigger.actions[i]) {
let actionPromise = this.addAction(groupTrigger.trigger.actions[i]).then(null, (error) => {
return this.ErrorsManager.errorHandler(error, 'Error adding action.', errorCallback);
});
actionPromises.push(actionPromise);
}
}
}
if (changedAttrs || changedActions) {
this.$q.all(actionPromises).then(() => {
groupPromise = this.HawkularAlert.Trigger.putGroup({ groupId: groupId }, groupTrigger.trigger);
}, (error) => {
return this.ErrorsManager.errorHandler(error, 'Error saving group.', errorCallback);
});
}
for (let i = 0; fullTrigger.dampenings && i < fullTrigger.dampenings.length; i++) {
if (fullTrigger.dampenings[i] && !angular.equals(fullTrigger.dampenings[i], backupTrigger.dampenings[i])) {
fullTrigger.dampenings[i].triggerId = groupTrigger.dampenings[i].triggerId;
fullTrigger.dampenings[i].dampeningId = groupTrigger.dampenings[i].dampeningId;
let dampeningId = groupTrigger.dampenings[i].dampeningId;
let dampeningPromise = this.HawkularAlert.Dampening.putGroup({ groupId: groupId, dampeningId: dampeningId },
fullTrigger.dampenings[i]).$promise.then(null, (error) => {
return this.ErrorsManager.errorHandler(error, 'Error saving dampening.', errorCallback);
});
dampeningPromises.push(dampeningPromise);
}
}
let firingConditions = [];
let autoResolveConditions = [];
let updateConditions = false;
for (let j = 0; fullTrigger.conditions && j < fullTrigger.conditions.length; j++) {
if (fullTrigger.conditions[j]) {
updateConditions = updateConditions ||
!angular.equals(fullTrigger.conditions[j],backupTrigger.conditions[j]);
let groupCondition = fullTrigger.conditions[j];
groupCondition.dataId = groupTrigger.conditions[j].dataId;
if ( groupCondition.data2Id ) {
groupCondition.data2Id = groupTrigger.conditions[j].data2Id;
}
if (groupCondition.triggerMode && groupCondition.triggerMode === 'AUTORESOLVE') {
autoResolveConditions.push(groupCondition);
} else {
// A condition without triggerMode is treated as FIRING
firingConditions.push(groupCondition);
}
}
}
if (updateConditions && firingConditions.length > 0) {
// don't need dataMemberIdMap because we're not introducing any new dataIds
let groupConditionsInfo = {
conditions: firingConditions };
let conditionPromise = this.HawkularAlert.Conditions.saveGroup({
groupId: groupId,
triggerMode: 'FIRING'
}, groupConditionsInfo).$promise.then(null, (error) => {
return this.ErrorsManager.errorHandler(error, 'Error creating firing conditions.', errorCallback);
});
conditionPromises.push(conditionPromise);
}
if (updateConditions && autoResolveConditions.length > 0) {
// don't need dataMemberIdMap because we're not introducing any new dataIds
let groupConditionsInfo = {
conditions: autoResolveConditions };
let conditionPromise = this.HawkularAlert.Conditions.saveGroup({
groupId: groupId,
triggerMode: 'AUTORESOLVE'
}, groupConditionsInfo).$promise.then(null, (error) => {
return this.ErrorsManager.errorHandler(error, 'Error creating autoresolve conditions.', errorCallback);
});
conditionPromises.push(conditionPromise);
}
}, (error) => {
return this.ErrorsManager.errorHandler(error, 'Error fetching group trigger.', errorCallback);
});
return this.$q.all(Array.prototype.concat(groupPromise, dampeningPromises, conditionPromises));
}
public queryTriggers(criteria: IHawkularTriggerCriteria): ng.IPromise<IHawkularTriggerQueryResult> {
let triggerList = [];
let headers;
/* Format of Triggers:
trigger: {
}
*/
let queryParams = {};
if (criteria && criteria.triggerIds) {
queryParams['triggerIds'] = criteria.triggerIds;
}
if (criteria && criteria.tags) {
queryParams['tags'] = criteria.tags;
}
if (criteria && criteria.thin) {
queryParams['thin'] = criteria.thin;
}
if (criteria && criteria.currentPage && criteria.currentPage !== 0) {
queryParams['page'] = criteria.currentPage;
}
if (criteria && criteria.perPage) {
queryParams['per_page'] = criteria.perPage;
}
if (criteria && criteria.sort) {
queryParams['sort'] = criteria.sort;
}
if (criteria && criteria.order) {
queryParams['order'] = criteria.order;
}
return this.HawkularAlert.Trigger.query(queryParams, (serverTriggers: any, getHeaders: any) => {
headers = getHeaders();
for (let i = 0; i < serverTriggers.length; i++) {
let serverTrigger = serverTriggers[i];
let consoleTrigger: any = serverTrigger;
triggerList.push(consoleTrigger);
}
}, (error) => {
this.$log.debug('querying data error', error);
}).$promise.then((): IHawkularTriggerQueryResult => {
return {
triggerList: triggerList,
headers: headers
};
});
}
private getAction(actionPlugin: ActionPlugin, actionId: ActionId): ng.IPromise<void> {
return this.HawkularAlert.Action.get({
pluginId: actionPlugin,
actionId: actionId
}).$promise;
}
private createAction(action: IActionDefinition): ng.IPromise<void> {
action.properties.description = 'Created on ' + Date();
return this.HawkularAlert.Action.save(action).$promise;
}
public addAction(action: ITriggerAction): ng.IPromise<void> {
return this.getAction(action.actionPlugin, action.actionId).then((promiseValue: any) => {
return promiseValue;
}, (reason: any) => {
// Create a default email action
if (reason.status === 404) {
this.$log.debug('Action does not exist, creating one');
let actionDefinition: IActionDefinition = {
actionPlugin: action.actionPlugin,
actionId: action.actionId,
properties: {
description: 'Created on ' + Date()
}
};
if (action.actionPlugin === 'email') {
actionDefinition.properties.to = action.actionId; // email address
}
return this.createAction(actionDefinition);
}
});
}
public updateAction(action: IActionDefinition): ng.IPromise<void> {
action.properties.description = 'Created on ' + Date();
return this.HawkularAlert.Action.put({
actionPlugin: action.actionPlugin,
actionId: action.actionId
},
action).$promise;
}
}
_module.service('HawkularAlertsManager', HawkularAlertsManager);
} | the_stack |
import { mount, shallow } from 'enzyme';
import { ActionElement, ValueEditor, ValueSelector } from '../controls/index';
import { standardClassnames } from '../defaults';
import { Rule } from '../Rule';
import {
ActionProps,
Classnames,
Controls,
Field,
FieldSelectorProps,
NameLabelPair,
OperatorSelectorProps,
RuleProps,
RuleType,
Schema,
ValidationResult,
ValueEditorProps
} from '../types';
const defaultFields: Field[] = [
{ name: 'field1', label: 'Field 1' },
{ name: 'field2', label: 'Field 2' }
];
const fieldMap: { [k: string]: Field } = {};
defaultFields.forEach((f) => {
fieldMap[f.name] = f;
});
describe('<Rule />', () => {
let controls: Partial<Controls>,
classNames: Partial<Classnames>,
schema: Partial<Schema>,
props: RuleProps;
beforeEach(() => {
//set defaults
controls = {
cloneRuleAction: (props: ActionProps) => (
<button onClick={(e) => props.handleOnClick(e)}>⧉</button>
),
fieldSelector: (props: FieldSelectorProps) => (
<select onChange={(e) => props.handleOnChange(e.target.value)}>
<option value="field">Field</option>
<option value="any_field">Any Field</option>
</select>
),
operatorSelector: (props: OperatorSelectorProps) => (
<select onChange={(e) => props.handleOnChange(e.target.value)}>
<option value="operator">Operator</option>
<option value="any_operator">Any Operator</option>
</select>
),
valueEditor: (props: ValueEditorProps) => (
<input type="text" onChange={(e) => props.handleOnChange(e.target.value)} />
),
removeRuleAction: (props: ActionProps) => (
<button onClick={(e) => props.handleOnClick(e)}>x</button>
)
};
classNames = {
cloneRule: 'custom-cloneRule-class',
fields: 'custom-fields-class',
operators: 'custom-operators-class',
removeRule: 'custom-removeRule-class'
};
schema = {
fields: defaultFields,
fieldMap,
controls: controls as Controls,
classNames: classNames as Classnames,
getOperators: (_field) => [
{ name: '=', label: 'is' },
{ name: '!=', label: 'is not' }
],
getValueEditorType: (_field, _operator) => 'text',
getInputType: (_field, _operator) => 'text',
getValues: (_field, _operator) => [
{ name: 'one', label: 'One' },
{ name: 'two', label: 'Two' }
],
onPropChange: (_field, _value, _path) => {},
onRuleRemove: (_path) => {},
showCloneButtons: false,
validationMap: {}
};
props = {
id: 'id',
field: 'field', // note that this is not a valid field name based on the defaultFields
value: 'value',
operator: 'operator',
schema: schema as Schema,
path: [0],
translations: {
fields: {
title: 'Fields'
},
operators: {
title: 'Operators'
},
value: {
title: 'Value'
},
removeRule: {
label: 'x',
title: 'Remove rule'
},
removeGroup: {
label: 'x',
title: 'Remove group'
},
addRule: {
label: '+Rule',
title: 'Add rule'
},
addGroup: {
label: '+Group',
title: 'Add group'
},
combinators: {
title: 'Combinators'
},
notToggle: {
label: 'Not',
title: 'Invert this group'
},
cloneRule: {
label: '⧉',
title: 'Clone rule'
},
cloneRuleGroup: {
label: '⧉',
title: 'Clone group'
}
}
};
});
it('should exist', () => {
expect(Rule).toBeDefined();
});
it('should have correct className', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find('div').hasClass(standardClassnames.rule)).toBe(true);
});
describe('field selector as <ValueSelector />', () => {
beforeEach(() => {
controls.fieldSelector = ValueSelector;
});
it('should have options set to expected fields', () => {
const expected_fields: Field[] = [
{ name: 'firstName', label: 'First Label' },
{ name: 'secondName', label: 'Second Label' }
];
schema.fields = expected_fields;
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueSelector).props().options).toEqual(expected_fields);
});
behavesLikeASelector('field', standardClassnames.fields, 'custom-fields-class');
});
describe('operator selector as <ValueSelector />', () => {
beforeEach(() => {
controls.operatorSelector = ValueSelector;
});
it('should have options set to fields returned from "getOperators"', () => {
const expected_operators: NameLabelPair[] = [
{ name: '=', label: '=' },
{ name: '!=', label: '!=' }
];
schema.getOperators = () => expected_operators;
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueSelector).props().options).toEqual(expected_operators);
});
it('should have field set to selected field', () => {
props.field = 'selected_field';
const dom = shallow(<Rule {...props} />);
expect((dom.find(ValueSelector).props() as OperatorSelectorProps).field).toBe(
'selected_field'
);
});
behavesLikeASelector('operator', standardClassnames.operators, 'custom-operators-class');
});
describe('value editor as <ValueEditor />', () => {
beforeEach(() => {
controls.valueEditor = ValueEditor;
});
it('should have field set to selected field', () => {
props.field = 'selected_field';
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueEditor).props().field).toBe('selected_field');
});
it('should have fieldData set to selected field data', () => {
props.field = 'field1';
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueEditor).props().fieldData.name).toBe('field1');
expect(dom.find(ValueEditor).props().fieldData.label).toBe('Field 1');
});
it('should have operator set to selected operator', () => {
props.operator = 'selected_operator';
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueEditor).props().operator).toBe('selected_operator');
});
it('should have value set to specified value', () => {
props.value = 'specified_value';
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueEditor).props().value).toBe('specified_value');
});
it('should have the onChange method handler', () => {
const dom = shallow(<Rule {...props} />);
expect(typeof dom.find(ValueEditor).props().handleOnChange).toBe('function');
});
it('should trigger change handler', () => {
const mockEvent = { target: { value: 'foo' } };
const onChange = jest.fn();
const dom = shallow(
<ValueEditor
level={0}
handleOnChange={onChange}
field="test"
fieldData={{ name: 'test', label: 'Test' }}
operator="and"
/>
);
dom.find('input').simulate('change', mockEvent);
expect(onChange).toHaveBeenCalled();
});
//TODO spy on value change handler and verify it is triggered
});
describe('rule remove action as <ActionElement />', () => {
beforeEach(() => {
controls.removeRuleAction = ActionElement;
});
it('should have label set to "x"', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find(ActionElement).props().label).toBe('x');
});
it('should have the default className', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find(ActionElement).props().className).toContain(standardClassnames.removeRule);
});
it('should have the custom className', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find(ActionElement).props().className).toContain('custom-removeRule-class');
});
it('should have the onChange method handler', () => {
const dom = shallow(<Rule {...props} />);
expect(typeof dom.find(ActionElement).props().handleOnClick).toBe('function');
});
//TODO spy on value change handler and verify it is triggered
});
describe('clone rule action as <ActionElement />', () => {
beforeEach(() => {
schema.showCloneButtons = true;
controls.cloneRuleAction = ActionElement;
});
it('should have label set to "⧉"', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find(ActionElement).props().label).toBe('⧉');
});
it('should have the default className', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find(ActionElement).props().className).toContain(standardClassnames.cloneRule);
});
it('should have the custom className', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find(ActionElement).props().className).toContain('custom-cloneRule-class');
});
it('should have the onChange method handler', () => {
const dom = shallow(<Rule {...props} />);
expect(typeof dom.find(ActionElement).props().handleOnClick).toBe('function');
});
//TODO spy on value change handler and verify it is triggered
});
describe('onElementChanged methods', () => {
let actualProperty: string, actualValue: any, actualPath: number[];
beforeEach(() => {
schema.onPropChange = (property, value, path) => {
actualProperty = property;
actualValue = value;
actualPath = path;
};
});
describe('onFieldChanged', () => {
it('should call onPropChange with the rule path', () => {
const dom = mount(<Rule {...props} />);
dom
.find(`.${standardClassnames.fields}`)
.simulate('change', { target: { value: 'any_field' } });
expect(actualProperty).toBe('field');
expect(actualValue).toBe('any_field');
expect(actualPath).toEqual([0]);
});
});
describe('onOperatorChanged', () => {
it('should call onPropChange with the rule path', () => {
const dom = mount(<Rule {...props} />);
dom
.find(`.${standardClassnames.operators}`)
.simulate('change', { target: { value: 'any_operator' } });
expect(actualProperty).toBe('operator');
expect(actualValue).toBe('any_operator');
expect(actualPath).toEqual([0]);
});
});
describe('onValueChanged', () => {
it('should call onPropChange with the rule path', () => {
const dom = mount(<Rule {...props} />);
dom
.find(`.${standardClassnames.value}`)
.simulate('change', { target: { value: 'any_value' } });
expect(actualProperty).toBe('value');
expect(actualValue).toBe('any_value');
expect(actualPath).toEqual([0]);
});
});
});
describe('cloneRule', () => {
beforeEach(() => {
schema.showCloneButtons = true;
});
it('should call onRuleAdd with the rule and parent path', () => {
let myRule: RuleType, myParentPath: number[];
schema.onRuleAdd = (rule, parentPath) => {
myRule = rule;
myParentPath = parentPath;
};
const dom = mount(<Rule {...props} />);
dom.find(`.${standardClassnames.cloneRule}`).simulate('click');
expect(myRule).toBeDefined();
expect(myParentPath).toEqual([]);
});
});
describe('removeRule', () => {
it('should call onRuleRemove with the rule and path', () => {
let myPath: number[];
schema.onRuleRemove = (path) => {
myPath = path;
};
const dom = mount(<Rule {...props} />);
dom.find(`.${standardClassnames.removeRule}`).simulate('click');
expect(myPath).toEqual([0]);
});
});
describe('validation', () => {
it('should not validate if no validationMap[id] value exists and no validator function is provided', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find('div').first().hasClass(standardClassnames.valid)).toBe(false);
expect(dom.find('div').first().hasClass(standardClassnames.invalid)).toBe(false);
});
it('should validate to false if validationMap[id] = false even if a validator function is provided', () => {
const validator = jest.fn(() => true);
schema.fieldMap = { field1: { name: 'field1', label: 'Field 1', validator } };
schema.validationMap = { id: false };
const dom = shallow(<Rule {...props} />);
expect(dom.find('div').first().hasClass(standardClassnames.valid)).toBe(false);
expect(dom.find('div').first().hasClass(standardClassnames.invalid)).toBe(true);
expect(validator).not.toHaveBeenCalled();
});
it('should validate to true if validationMap[id] = true', () => {
schema.validationMap = { id: true };
const dom = shallow(<Rule {...props} />);
expect(dom.find('div').first().hasClass(standardClassnames.valid)).toBe(true);
expect(dom.find('div').first().hasClass(standardClassnames.invalid)).toBe(false);
});
it('should validate if validationMap[id] does not exist and a validator function is provided', () => {
const validator = jest.fn(() => true);
props.field = 'field1';
schema.fieldMap = { field1: { name: 'field1', label: 'Field 1', validator } };
const dom = shallow(<Rule {...props} />);
expect(dom.find('div').first().hasClass(standardClassnames.valid)).toBe(true);
expect(dom.find('div').first().hasClass(standardClassnames.invalid)).toBe(false);
expect(validator).toHaveBeenCalled();
});
it('should pass down validationResult as validation to children', () => {
const valRes: ValidationResult = { valid: false, reasons: ['invalid'] };
schema.controls.fieldSelector = ValueSelector;
schema.validationMap = { id: valRes };
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueSelector).props().validation).toEqual(valRes);
});
});
function behavesLikeASelector(value: string, defaultClassName: string, customClassName: string) {
it('should have the selected value set correctly', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueSelector).props().value).toBe(value);
});
it('should have the default className', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueSelector).props().className).toContain(defaultClassName);
});
it('should have the custom className', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueSelector).props().className).toContain(customClassName);
});
it('should have the onChange method handler', () => {
const dom = shallow(<Rule {...props} />);
expect(typeof dom.find(ValueSelector).props().handleOnChange).toBe('function');
});
it('should have the level of the Rule', () => {
const dom = shallow(<Rule {...props} />);
expect(dom.find(ValueSelector).props().level).toBe(1);
});
}
}); | the_stack |
import * as fs from "fs"
import * as os from "os"
import * as subproc from "child_process"
import { Writable, Readable, PassThrough as PassThroughStream } from "stream"
import * as io from "./io"
import { createTimeout } from "./timeout"
import { repr, expandTildePath, isWindows } from "./util"
import log from "./log"
export interface Pipes<In=io.Writer|null, Out=io.Reader|null, Err=io.Reader|null> {
readonly stdin :In // valid if Cmd.stdin=="pipe"
readonly stdout :Out // valid if Cmd.stdout=="pipe"
readonly stderr :Err // valid if Cmd.stderr=="pipe"
readonly extraFiles :(io.Reader|io.Writer|null)[] // where extraFiles[N]=="pipe"
}
interface CmdOptions {
dir? :string // working directory. If empty, uses current working directory
env? :{[name:string]:string|undefined} // process environment
shell? :boolean | string // run command in the system-default shell
stdin? :Readable | "inherit" | "pipe" | Buffer | io.Reader | null // fd 0
stdout? :Writable | "inherit" | "pipe" | null // fd 1
stderr? :Writable | "inherit" | "pipe" | null // fd 2
extraFiles? :(Readable | "pipe" | null)[] // fd 3...
windowsHide? :boolean
}
// startCmd launches an external command process.
// It's a convenience function around c=new Cmd();c.start() with strengthened TypeScript types.
//
// Form 1/2a: When no stdio options are provided, no pipes are returned
export function startCmd(command :string, args? :string[]) :[Cmd]
export function startCmd(command :string, args :string[], options :CmdOptions & {
stdin?: never,
stdout?: never,
stderr?: never,
extraFiles? :never,
}) :[Cmd]
//
// Form 1/2b: no args
export function startCmd(command :string, options :CmdOptions & {
stdin?: never,
stdout?: never,
stderr?: never,
extraFiles? :never,
}) :[Cmd]
//
// Form 2/2a: When stdio options are provided, the pipe ends and cmd are returned as a tuple:
export function startCmd<
// at least one stdio input is defined
Options extends CmdOptions & (
{ stdin: CmdOptions["stdin"] } |
{ stdout: CmdOptions["stdout"] } |
{ stderr: CmdOptions["stderr"] } |
{ extraFiles: CmdOptions["extraFiles"] }
),
I = Options extends {stdin: "pipe"} ? io.Writer : null,
O = Options extends {stdout:"pipe"} ? io.Reader : null,
E = Options extends {stderr:"pipe"} ? io.Reader : null,
>(
command :string,
args :string[],
options :Options,
) :[Cmd, Pipes<I,O,E>]
//
// Form 2/2b: no args
export function startCmd<
// at least one stdio input is defined
Options extends CmdOptions & (
{ stdin: CmdOptions["stdin"] } |
{ stdout: CmdOptions["stdout"] } |
{ stderr: CmdOptions["stderr"] } |
{ extraFiles: CmdOptions["extraFiles"] }
),
I = Options extends {stdin: "pipe"} ? io.Writer : null,
O = Options extends {stdout:"pipe"} ? io.Reader : null,
E = Options extends {stderr:"pipe"} ? io.Reader : null,
>(
command :string,
options :Options,
) :[Cmd, Pipes<I,O,E>]
//
// Implementation:
export function startCmd(command :string, args? :string[]|CmdOptions, options? :CmdOptions) {
if (!args || !Array.isArray(args)) {
if (args && typeof args == "object") {
options = args as CmdOptions
}
args = []
}
if (!options) {
options = {}
}
const cmd = new Cmd(command, ...args)
for (let k in options) {
;(cmd as any)[k] = (options as any)[k]
}
const cmdio = cmd.start()
if (options && (
"stdin" in options ||
"stdout" in options ||
"stderr" in options ||
"extraFiles" in options
)) {
return [ cmd, cmdio ]
}
return cmd
}
const notStartedError = "process not started"
export type SignalMode = "standard" | "group"
// Cmd represents an external command being prepared or run
export class Cmd implements Required<CmdOptions> {
command :string
args :string[]
dir :string = "" // working directory. If empty, uses current working directory
env :{[name:string]:string|undefined} = {...process.env} // process environment
shell :boolean | string = false // run command in the system-default shell
stdin :Readable | "inherit" | "pipe" | Buffer | io.Reader | null = null // fd 0
stdout :Writable | "inherit" | "pipe" | null = null // fd 1
stderr :Writable | "inherit" | "pipe" | null = null // fd 2
extraFiles :(Readable | "pipe" | null)[] = [] // fd 3...
windowsHide :boolean = true
readonly process :subproc.ChildProcess | null = null // underlying process
readonly promise :Promise<number> // resolves with status code when process exits
readonly running :boolean = false // true while the underlying process is running
readonly pid :number = 0 // pid, valid after start() has been called
readonly exitCode :number = -1
// exit code of the exited process, or -1 if the process hasn't exited or was
// terminated by a signal.
constructor(command :string, ...args :string[]) {
this.command = command
this.args = args
this.promise = Promise.reject(new Error(notStartedError))
this.promise.catch(_=>{}) // avoid uncaught promise
}
// start launches the command process.
// If the process fails to launch, this function throws an error.
// Returns caller's end of I/O pipes. Returns null if no stdio pipes were configured.
// See startCmd() function as an alternative with stronger TypeScript typings.
start() :Pipes|null { return null } // separate impl
// run starts the specified command and waits for it to complete.
// Returns process exit status code.
run(timeout? :number) :Promise<number> {
this.start()
return this.wait(timeout)
}
// output runs the specified command and returns its standard output.
// If the program does not exit with status 0, an error is thrown.
output(encoding :null|undefined, timeout? :number|null) :Promise<Buffer>
output(encoding :BufferEncoding, timeout? :number|null) :Promise<string>
output(encoding? :BufferEncoding|null, timeout? :number|null) :Promise<Buffer|string> {
this.stdout = "pipe"
if (!this.stderr) {
this.stderr = "pipe"
}
const { stdout, stderr } = this.start()!
const stdoutBuf = io.createWriteBuffer()
const stderrBuf = io.createWriteBuffer()
stdout!.stream.on("data", chunk => {
stdoutBuf.push(chunk)
})
if (stderr) {
stderr.stream.on("data", chunk => {
stderrBuf.push(chunk)
})
}
return this.wait(timeout as number || 0).then(exitCode => {
if (exitCode != 0) {
let errstr = ""
const errbuf = stderrBuf.buffer()
try {
errstr = errbuf.toString("utf8")
} catch (_) {
errstr = errbuf.toString("ascii")
}
if (errstr.length > 0) {
errstr = ". stderr output:\n" + errstr
}
throw new Error(`command exited with status ${exitCode}${errstr}`)
}
const buf = stdoutBuf.buffer()
return encoding ? buf.toString(encoding) : buf
})
}
// wait for process to exit, with an optional timeout expressed in milliseconds.
// Returns the exit status. Throws TIMEOUT on timeout.
wait(timeout? :number, timeoutSignal? :Signal) :Promise<number> {
if (timeout === undefined || timeout <= 0) {
return this.promise
}
return this._waitTimeout(timeout, (err, _resolve, reject) => {
log.debug(()=>`${this} wait timeout reached; killing process`)
err.message = "Cmd.wait timeout"
return this.kill(timeoutSignal).then(() => reject(err))
})
}
// signal sends sig to the underlying process and returns true if sending the signal worked.
// mode defaults to "standard"
//
// If the signal is successfully sent (not neccessarily delivered) true is returned.
// If the process is not running, false is returned (no effect.)
// If the process has not been started, an exception is thrown.
// If the signal is not supported by the platform, an exception is thrown.
// If another error occur, like signalling permissions, false is returned.
//
signal(sig :Signal, mode? :SignalMode) :boolean {
const p = this._checkproc()
if (mode == "group") {
// Signalling process groups via negative pid is supported on most POSIX systems.
// This causes subprocesses that the command process may have started to also receive
// the signal.
try {
process.kill(-p.pid, sig)
return true
} catch (_) {
// will fail if the process is not in its own group or if its is already dead.
// fall through to "proc" mode:
}
}
return p.kill(sig)
}
// kill terminates the command by sending signal sig to the process and waiting for it to exit.
// mode defaults to "group".
//
// If the process has not exited within timeout milliseconds, SIGKILL is sent.
// The timeout should be reasonably large to allow well-behaved processed to run atexit code but
// small enough so that an ill-behaved process is killed within a reasonable timeframe.
// If timeout <= 0 then the returned promise will only resolve if and when the process exits,
// which could be never if the process ignores sig.
//
async kill(sig :Signal="SIGTERM", timeout :number=500, mode? :SignalMode) :Promise<number> {
const p = this._checkproc()
if (!this.signal(sig, mode || "group")) {
return p.exitCode || 0
}
if (timeout <= 0) {
return this.promise
}
return this._waitTimeout(timeout, (_, resolve) => {
log.debug(()=>`${this} kill timeout reached; sending SIGKILL`)
p.kill("SIGKILL")
return this.promise.then(resolve)
})
}
toString() :string {
return this.process ? `Cmd[${this.pid}]` : "Cmd"
}
// -------- internal --------
_resolve :(exitStatus:number)=>void = ()=>{}
_reject :(reason?:any)=>void = ()=>{}
_checkproc() :subproc.ChildProcess {
if (!this.process) {
throw new Error(notStartedError)
}
return this.process
}
_rejectAndKill(reason? :any) {
this._reject(reason)
}
_onerror = (err :Error) => {
log.debug(()=>`${this} error:\n${err.stack||err}`)
this._reject(err)
}
_onexit = (code: number, signal: NodeJS.Signals) => {
// run after process exits
const cmd = this as Mutable<Cmd>
log.debug(()=>`${cmd} exited status=${code} signal=${signal}`)
cmd.running = false
if (code === null || signal !== null) {
assert(typeof signal == "string")
cmd.exitCode = -(os.constants.signals[signal] || 1)
} else {
cmd.exitCode = code || 0
}
cmd._resolve(cmd.exitCode)
}
// _waitTimeout starts a timer which is cancelled when the process exits.
// If the timer expires before the process exits, onTimeout is called with a mutable
// TimeoutError that you can pass to reject and a set of promise resolution functions,
// which control the promise returned by this function.
_waitTimeout(
timeout :number,
onTimeout :(
timeoutErr :Error,
resolve: (code?:number)=>void,
reject: (reason?:any)=>void,
)=>Promise<any>,
) {
return new Promise<number>((resolve, reject) => {
let timeoutOccured = false
this.promise.then(exitCode => {
if (!timeoutOccured) {
resolve(exitCode)
}
})
return createTimeout(this.promise, timeout, timeoutErr => {
timeoutOccured = true
// now, even if the process exits and calls cmd._resolve, the timeout-enabled
// promise returned will not resolve. Instead, we call the onTimeout handler
// which can take its sweet time and eventually, when it's done, call either
// resolve or reject.
onTimeout(timeoutErr, resolve as (code?:number)=>void, reject)
})
})
}
}
Cmd.prototype.start = function start(this :Cmd) :Pipes|null {
const cmd = this as Mutable<Cmd>
if (cmd.running) {
throw new Error("start() called while command is running")
}
// reset exit code
cmd.exitCode = -1
// create a new promise
cmd.promise = new Promise<number>((res,rej) => {
cmd._resolve = res
cmd._reject = rej
})
// configure stdin which may be a buffer
let stdin :Readable | "inherit" | "pipe" | null = null
let stdinStreamNeedsPiping :Readable | null = null
if (cmd.stdin instanceof Buffer) {
stdin = "pipe"
} else if (io.isReader(cmd.stdin)) {
if (typeof (cmd.stdin.stream as any).fd == "string") {
// Nodejs' child_process module can handle "Socket" type of streams directly.
// "Socket" really is just the name for a stream around a file descriptor.
stdin = cmd.stdin.stream
} else {
stdin = "pipe"
stdinStreamNeedsPiping = cmd.stdin.stream
}
} else {
stdin = cmd.stdin
}
// spawn a process
const spawnOptions :subproc.SpawnOptions = {
stdio: [
stdin || 'ignore',
(
cmd.stdout === process.stdout ? 1 :
cmd.stdout || 'ignore'
),
(
cmd.stderr === process.stderr ? 2 :
cmd.stderr ? cmd.stderr : 'ignore'
),
...cmd.extraFiles
],
cwd: cmd.dir ? expandTildePath(cmd.dir) : undefined,
env: cmd.env,
shell: cmd.shell,
windowsHide: cmd.windowsHide,
// On non-windows platforms, set detached so that p gets its own process group, allowing us to
// signal its process tree.
// Note that this option has a different meaning on Windows and screws with stdio inheritance.
detached: !isWindows,
}
// log.debug(()=> `exec spawn ${repr(cmd.command)}, ${repr(cmd.args)} ${repr(spawnOptions)}`)
const p = subproc.spawn(cmd.command, cmd.args, spawnOptions)
// This is a bit of a hack, working around an awkward design choice in nodejs' child_process
// module where spawn errors are deliberately delayed until the next runloop iteration.
// The effect of this choice means that we don't know if creating a new process, which is a
// synchronous operation, succeeded until the next runloop frame.
// We have one thing going for us here: p.pid is undefined when spawn failed, so we can
// look at p.pid to know if there will be an error event in the next runoop frame or not, but
// we don't know anything about the error yet; not until the next runloop frame.
// See https://github.com/nodejs/node/blob/v14.12.0/lib/internal/child_process.js#L379-L390
if (p.pid === undefined) {
cmd.process = null
cmd.pid = 0
// guesstimate the actual error by checking status of command file
const err = guessSpawnError(cmd)
cmd._reject(err)
throw err
}
// set process & running state
cmd.running = true
cmd.process = p
cmd.pid = p.pid
// attach event listeners
p.on("exit", cmd._onexit)
p.on('error', cmd._reject)
log.debug(()=>`${cmd} started (${repr(cmd.command)})`)
// stdin buffer?
if (p.stdin) {
if (cmd.stdin instanceof Buffer) {
const r = new PassThroughStream()
r.end(cmd.stdin)
r.pipe(p.stdin)
p.stdin = null
} else if (stdinStreamNeedsPiping) {
stdinStreamNeedsPiping.pipe(p.stdin)
p.stdin = null
}
}
// if there are no pipes, return no pipes
if (!p.stdin && !p.stdout && !p.stderr && p.stdio.length < 4) {
return null
}
// TODO figure out how to make this properly TypeScript typed.
// Ideally the return type of start() should depend on the values of Cmd.std{in,out,err}
// but I can't figure out how to do that with TypeScript, so here we are, casting null to
// a non-null type, asking for trouble. All for the sake of not having to do "!" for every
// call to stdio objects returned from start()...
const cmdio :Pipes = {
stdin: p.stdin ? io.createWriter(p.stdin) : null,
stdout: p.stdout ? io.createReader(p.stdout) : null,
stderr: p.stderr ? io.createReader(p.stderr) : null,
extraFiles: p.stdio.slice(3).map(stream =>
io.isReadableStream(stream) ? io.createReader(stream) :
io.isWritableStream(stream) ? io.createWriter(stream) :
null
),
}
return cmdio
}
function guessSpawnError(cmd :Cmd) :Error {
// guesstimate the actual error by checking status of command file
let code = ""
let msg = "unspecified error"
if (cmd.shell == false) {
try {
fs.accessSync(cmd.dir, fs.constants.R_OK | fs.constants.X_OK)
const st = fs.statSync(cmd.command)
if ((st.mode & fs.constants.S_IFREG) == 0) {
// not a regular file
code = "EACCES"
} else {
// very likely some sort of I/O error
code = "EIO"
}
} catch (err) {
code = err.code || "ENOENT"
}
msg = io.errorCodeMsg(code) || msg
}
if (!code) {
// check dir
try {
fs.accessSync(cmd.dir, fs.constants.R_OK | fs.constants.X_OK)
code = "EIO"
} catch (err) {
code = err.code || "ENOENT"
}
msg = io.errorCodeMsg(code) || msg
if (code) {
msg = msg + "; cmd.dir=" + repr(cmd.dir)
}
}
if (!code) {
code = "UNKNOWN"
}
const e = new Error(`failed to spawn process ${repr(cmd.command)} (${code} ${msg})`)
;(e as any).code = code
return e
}
export type Signal = NodeJS.Signals | number
// this function is never used but here to test the complex typescript types of spawn()
function _TEST_typescript_startCmd() {
{
const _empty1 :[Cmd] =
startCmd("a", [])
const _empty2 :[Cmd] =
startCmd("a", [], { dir: "" })
const ____ :[Cmd,Pipes<null,null,null>] =
startCmd("a", [], { stdin:null, stdout:null, stderr:"inherit" })
const ____2 :[Cmd,Pipes<null,null,null>] =
startCmd("a", [], { stdin:null, stdout:null, stderr:null })
const ____3 :[Cmd,Pipes<null,null,null>] =
startCmd("a", [], { stdin:null })
const ____4 :[Cmd,Pipes<null,null,null>] =
startCmd("a", [], { stdout:null })
const ____5 :[Cmd,Pipes<null,null,null>] =
startCmd("a", [], { stderr:null })
const _extraFiles :[Cmd,Pipes<null,null,null>] =
startCmd("a", [], { extraFiles:[] })
const _p__ :[Cmd,Pipes<io.Writer,null,null>] =
startCmd("a", [], { stdin:"pipe", stdout:null, stderr:null })
const _p__2 :[Cmd,Pipes<io.Writer,null,null>] =
startCmd("a", [], { stdin:"pipe" })
const _pp_ :[Cmd,Pipes<io.Writer,io.Reader,null>] =
startCmd("a", [], { stdin:"pipe", stdout:"pipe", stderr:null })
const _pp_2 :[Cmd,Pipes<io.Writer,io.Reader,null>] =
startCmd("a", [], { stdin:"pipe", stdout:"pipe" })
const _ppp :[Cmd,Pipes<io.Writer,io.Reader,io.Reader>] =
startCmd("a", [], { stdin:"pipe", stdout:"pipe", stderr:"pipe" })
const __pp :[Cmd,Pipes<null,io.Reader,io.Reader>] =
startCmd("a", [], { stdin:null, stdout:"pipe", stderr:"pipe" })
const __pp2 :[Cmd,Pipes<null,io.Reader,io.Reader>] =
startCmd("a", [], { stdout:"pipe", stderr:"pipe" })
const _p_p :[Cmd,Pipes<io.Writer,null,io.Reader>] =
startCmd("a", [], { stdin:"pipe", stdout:null, stderr:"pipe" })
const _p_p2 :[Cmd,Pipes<io.Writer,null,io.Reader>] =
startCmd("a", [], { stdin:"pipe", stderr:"pipe" })
const ___p :[Cmd,Pipes<null,null,io.Reader>] =
startCmd("a", [], { stdin:null, stdout:null, stderr:"pipe" })
const ___p2 :[Cmd,Pipes<null,null,io.Reader>] =
startCmd("a", [], { stderr:"pipe" })
}
// ---- copy of above, but args omitted ----
{
const _empty1 :[Cmd] =
startCmd("a")
const _empty2 :[Cmd] =
startCmd("a", { dir: "" })
const ____ :[Cmd,Pipes<null,null,null>] =
startCmd("a", { stdin:null, stdout:null, stderr:"inherit" })
const ____2 :[Cmd,Pipes<null,null,null>] =
startCmd("a", { stdin:null, stdout:null, stderr:null })
const ____3 :[Cmd,Pipes<null,null,null>] =
startCmd("a", { stdin:null })
const ____4 :[Cmd,Pipes<null,null,null>] =
startCmd("a", { stdout:null })
const ____5 :[Cmd,Pipes<null,null,null>] =
startCmd("a", { stderr:null })
const _extraFiles :[Cmd,Pipes<null,null,null>] =
startCmd("a", { extraFiles:[] })
const _p__ :[Cmd,Pipes<io.Writer,null,null>] =
startCmd("a", { stdin:"pipe", stdout:null, stderr:null })
const _p__2 :[Cmd,Pipes<io.Writer,null,null>] =
startCmd("a", { stdin:"pipe" })
const _pp_ :[Cmd,Pipes<io.Writer,io.Reader,null>] =
startCmd("a", { stdin:"pipe", stdout:"pipe", stderr:null })
const _pp_2 :[Cmd,Pipes<io.Writer,io.Reader,null>] =
startCmd("a", { stdin:"pipe", stdout:"pipe" })
const _ppp :[Cmd,Pipes<io.Writer,io.Reader,io.Reader>] =
startCmd("a", { stdin:"pipe", stdout:"pipe", stderr:"pipe" })
const __pp :[Cmd,Pipes<null,io.Reader,io.Reader>] =
startCmd("a", { stdin:null, stdout:"pipe", stderr:"pipe" })
const __pp2 :[Cmd,Pipes<null,io.Reader,io.Reader>] =
startCmd("a", { stdout:"pipe", stderr:"pipe" })
const _p_p :[Cmd,Pipes<io.Writer,null,io.Reader>] =
startCmd("a", { stdin:"pipe", stdout:null, stderr:"pipe" })
const _p_p2 :[Cmd,Pipes<io.Writer,null,io.Reader>] =
startCmd("a", { stdin:"pipe", stderr:"pipe" })
const ___p :[Cmd,Pipes<null,null,io.Reader>] =
startCmd("a", { stdin:null, stdout:null, stderr:"pipe" })
const ___p2 :[Cmd,Pipes<null,null,io.Reader>] =
startCmd("a", { stderr:"pipe" })
}
} | the_stack |
import {
InanoSQLQuery,
InanoSQLInstance,
adapterReadFilter,
adapterReadMultiFilter,
TableQueryResult,
InanoSQLTable,
adapterWriteFilter,
adapterConnectFilter,
adapterDisconnectFilter,
adapterCreateTableFilter,
adapterDropTableFilter,
adapterDeleteFilter,
adapterGetTableIndexFilter,
adapterGetTableIndexLengthFilter,
adapterCreateIndexFilter,
adapterDeleteIndexFilter,
adapterAddIndexValueFilter,
adapterDeleteIndexValueFilter,
adapterReadIndexKeyFilter,
adapterReadIndexKeysFilter,
InanoSQLFunctionResult,
InanoSQLDataModel,
InanoSQLTableColumn,
InanoSQLAdapter
} from "./interfaces";
import { _nanoSQLQuery } from "./query";
import * as leven from "levenshtein-edit-distance";
import * as equal from "fast-deep-equal";
declare var global: any;
export const blankTableDefinition: InanoSQLTable = {
id: "",
count: 0,
name: "",
rowLocks: {},
model: {},
columns: [],
indexes: {},
actions: [],
queries: {},
views: [],
pkType: "string",
pkCol: [],
isPkNum: false,
ai: false
}
/**
* Searches a sorted array for a given value.
*
* @param {any[]} arr
* @param {*} value
* @param {boolean} indexOf
* @param {number} [startVal]
* @param {number} [endVal]
* @returns {number}
*/
export const binarySearch = (arr: any[], value: any, indexOf: boolean, startVal?: number, endVal?: number): number => {
const start = startVal || 0;
const end = endVal || arr.length;
if (arr[start] >= value) return indexOf ? -1 : start;
if (arr[end] <= value) return indexOf ? -1 : end + 1;
const m = Math.floor((start + end) / 2);
if (value == arr[m]) return m;
if (end - 1 == start) return indexOf ? -1 : end;
if (value > arr[m]) return binarySearch(arr, value, indexOf, m, end);
if (value < arr[m]) return binarySearch(arr, value, indexOf, start, m);
return indexOf ? -1 : end;
};
/**
* Converts a word to title case.
*
* @param {string} str
* @returns
*/
export const titleCase = (str: string) => {
return str.charAt(0).toUpperCase() + str.slice(1).toLowerCase();
};
export const slugify = (str: string): string => {
return String(str).replace(/\s+/g, "-").replace(/[^0-9a-z\-]/gi, "").toLowerCase();
}
export const buildQuery = (selectedDB: string | undefined, nSQL: InanoSQLInstance, table: string | any[] | ((where?: any[] | ((row: { [key: string]: any }, i?: number) => boolean)) => Promise<TableQueryResult>), action: string): InanoSQLQuery => {
return {
databaseID: selectedDB,
table: table || nSQL.selectedTable,
parent: nSQL,
action: action,
state: "pending",
result: [],
time: Date.now(),
queryID: fastID(),
extend: [],
comments: [],
tags: []
};
};
export const keyToDate = (nSQL: InanoSQLInstance, type: string, pk: any): any => {
if (!pk) return pk;
if (type === "date") {
return Date.parse(pk);
}
return pk;
}
export const adapterFilters = (selectedDB: string | undefined, nSQL: InanoSQLInstance, query?: InanoSQLQuery) => {
return {
write: (table: string, pk: any, row: { [key: string]: any }, complete: (pk: any) => void, error: (err: any) => void) => {
if (!selectedDB) return;
pk = keyToDate(nSQL, nSQL.getDB(selectedDB)._tables[table].pkType, pk);
nSQL.doFilter<adapterWriteFilter>(selectedDB, "adapterWrite", { res: { table, pk, row, complete, error }, query }, (result) => {
if (!result) return; // filter took over
if (query && query.transactionId) {
nSQL.txs[query.transactionId].push({ table: table, type: "put", data: result.res.row });
result.res.complete(null);
return;
}
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.write(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.pk, result.res.row, (pk) => {
result.res.complete(pk);
}, result.res.error);
}, error as any);
},
read: (table: string, pk: any, complete: (row: { [key: string]: any } | undefined) => void, error: (err: any) => void) => {
if (!selectedDB) return;
pk = keyToDate(nSQL, nSQL.getDB(selectedDB)._tables[table].pkType, pk);
nSQL.doFilter<adapterReadFilter>(selectedDB, "adapterRead", { res: { table, pk, complete, error }, query }, (result) => {
if (!result) return; // filter took over
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.read(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.pk, (row) => {
if (!row) {
result.res.complete(undefined);
return;
}
if (nSQL.getDB(selectedDB)._tables[result.res.table].pkType === "date") {
const setRow = {
...row
};
deepSet(nSQL.getDB(selectedDB)._tables[result.res.table].pkCol, setRow, new Date(result.res.pk).toISOString());
result.res.complete(setRow);
} else {
result.res.complete(row);
}
}, result.res.error);
}, error as any);
},
readMulti: (table: string, type: "range" | "offset" | "all", offsetOrLow: any, limitOrHigh: any, reverse: boolean, onRow: (row: { [key: string]: any }, i: number) => void, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
offsetOrLow = keyToDate(nSQL, nSQL.getDB(selectedDB)._tables[table].pkType, offsetOrLow);
limitOrHigh = keyToDate(nSQL, nSQL.getDB(selectedDB)._tables[table].pkType, limitOrHigh);
nSQL.doFilter<adapterReadMultiFilter>(selectedDB, "adapterReadMulti", { res: { table, type, offsetOrLow, limitOrHigh, reverse, onRow, complete, error }, query }, (result) => {
if (!result) return; // filter took over
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.readMulti(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.type, result.res.offsetOrLow, result.res.limitOrHigh, result.res.reverse, (row, i) => {
if (nSQL.getDB(selectedDB)._tables[result.res.table].pkType === "date") {
const setRow = {
...row
};
const pk = deepGet(nSQL.getDB(selectedDB)._tables[result.res.table].pkCol, setRow);
deepSet(nSQL.getDB(selectedDB)._tables[result.res.table].pkCol, setRow, new Date(pk).toISOString());
result.res.onRow(setRow, i);
} else {
result.res.onRow(row, i)
}
}, () => {
result.res.complete();
}, result.res.error);
}, error);
},
connect: (id: string, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
nSQL.doFilter<adapterConnectFilter>(selectedDB, "adapterConnect", { res: { id, complete, error }, query }, (result) => {
if (!result) return; // filter took over
nSQL.getDB(selectedDB).adapter.connect(result.res.id, result.res.complete, result.res.error);
}, error);
},
disconnect: (complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
nSQL.doFilter<adapterDisconnectFilter>(selectedDB, "adapterDisconnect", { res: { complete, error }, query }, (result) => {
if (!result) return; // filter took over
nSQL.getDB(selectedDB).adapter.disconnect(result.res.complete, result.res.error);
}, error);
},
createTable: (table: string, tableData: InanoSQLTable, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
nSQL.doFilter<adapterCreateTableFilter>(selectedDB, "adapterCreateTable", { res: { table, tableData, complete, error }, query }, (result) => {
if (!result) return; // filter took over
const adapter = tableData.mode || nSQL.getDB(selectedDB).adapter;
adapter.createTable(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.tableData, result.res.complete, result.res.error);
}, error);
},
dropTable: (table: string, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
nSQL.doFilter<adapterDropTableFilter>(selectedDB, "adapterDropTable", { res: { table: table, complete, error }, query }, (result) => {
if (!result) return; // filter took over
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.dropTable(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.complete, result.res.error);
}, error);
},
delete: (table: string, pk: any, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
pk = keyToDate(nSQL, nSQL.getDB(selectedDB)._tables[table].pkType, pk);
nSQL.doFilter<adapterDeleteFilter>(selectedDB, "adapterDelete", { res: { table: table, pk, complete, error }, query }, (result) => {
if (!result) return; // filter took over
if (query && query.transactionId) {
nSQL.txs[query.transactionId].push({ table: table, type: "del", data: result.res.pk });
result.res.complete();
return;
}
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.delete(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.pk, result.res.complete, result.res.error);
}, error);
},
getTableIndex: (table: string, complete: (index: any[]) => void, error: (err: any) => void) => {
if (!selectedDB) return;
nSQL.doFilter<adapterGetTableIndexFilter>(selectedDB, "adapterGetTableIndex", { res: { table: table, complete, error }, query }, (result) => {
if (!result) return; // filter took over
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.getTableIndex(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.complete, result.res.error);
}, error);
},
getTableIndexLength: (table: string, complete: (length: number) => void, error: (err: any) => void) => {
if (!selectedDB) return;
nSQL.doFilter<adapterGetTableIndexLengthFilter>(selectedDB, "adapterGetTableIndexLength", { res: { table: table, complete, error }, query }, (result) => {
if (!result) return; // filter took over
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.getTableIndexLength(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.complete, result.res.error);
}, error);
},
createIndex: (table: string, indexName: string, type: string, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
nSQL.doFilter<adapterCreateIndexFilter>(selectedDB, "adapterCreateIndex", { res: { table: table, indexName, type, complete, error }, query }, (result) => {
if (!result) return; // filter took over
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.createIndex(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.indexName, result.res.type, result.res.complete, result.res.error);
}, error);
},
deleteIndex: (table: string, indexName: string, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
if (!nSQL.getDB(selectedDB)._tables[table].indexes[indexName]) {
error({ error: `Index ${indexName} not found!` });
return;
}
nSQL.doFilter<adapterDeleteIndexFilter>(selectedDB, "adapterDeleteIndex", { res: { table: table, indexName, complete, error }, query }, (result) => {
if (!result) return; // filter took over
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.deleteIndex(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.indexName, result.res.complete, result.res.error);
}, error);
},
addIndexValue: (table: string, indexName: string, key: any, value: any, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
if (!nSQL.getDB(selectedDB)._tables[table].indexes[indexName]) {
error({ error: `Index ${indexName} not found!` });
return;
}
let value2 = value === undefined || value === "undefined" ? "__NULL__" : value;
// shift primary key query by offset
if (typeof value2 === "number" && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.offset) {
value2 += nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.offset || 0;
}
if (nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.ignore_case) {
value2 = String(value2 || "").toUpperCase();
}
value2 = keyToDate(nSQL, nSQL.getDB(selectedDB)._tables[table].indexes[indexName].isDate ? "date" : "", value2);
nSQL.doFilter<adapterAddIndexValueFilter>(selectedDB, "adapterAddIndexValue", { res: { table: table, indexName, key, value: value2, complete, error }, query }, (result) => {
if (!result) return; // filter took over
if (query && query.transactionId) {
nSQL.txs[query.transactionId].push({ table: table, type: "idx-put", data: { indexName: result.res.indexName, tableId: nSQL.getDB(selectedDB)._tableIds[result.res.table], key: result.res.key, value: result.res.value } });
result.res.complete();
return;
}
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.addIndexValue(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.indexName, result.res.key, result.res.value, result.res.complete, result.res.error);
}, error);
},
deleteIndexValue: (table: string, indexName: string, key: any, value: any, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
if (!nSQL.getDB(selectedDB)._tables[table].indexes[indexName]) {
error({ error: `Index ${indexName} not found!` });
return;
}
let value2 = value === undefined || value === "undefined" ? "__NULL__" : value;
// shift primary key query by offset
if (typeof value2 === "number" && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.offset) {
value2 += nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.offset || 0;
}
if (nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.ignore_case) {
value2 = String(value2 || "").toUpperCase();
}
value2 = keyToDate(nSQL, nSQL.getDB(selectedDB)._tables[table].indexes[indexName].isDate ? "date" : "", value2);
nSQL.doFilter<adapterDeleteIndexValueFilter>(selectedDB, "adapterDeleteIndexValue", { res: { table: table, indexName, key, value: value2, complete, error }, query }, (result) => {
if (!result) return; // filter took over
if (query && query.transactionId) {
nSQL.txs[query.transactionId].push({ table: table, type: "idx-del", data: { indexName: result.res.indexName, tableId: nSQL.getDB(selectedDB)._tableIds[result.res.table], key: result.res.key, value: result.res.value } });
result.res.complete();
return;
}
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.deleteIndexValue(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.indexName, result.res.key, result.res.value, result.res.complete, result.res.error);
}, error);
},
readIndexKey: (table: string, indexName: string, pk: any, onRowPK: (key: any) => void, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
if (!nSQL.getDB(selectedDB)._tables[table].indexes[indexName]) {
error({ error: `Index ${indexName} not found!` });
return;
}
let key = pk === "NULL" ? "__NULL__" : pk;
// shift primary key query by offset
if (typeof key === "number" && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.offset) {
key += nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.offset || 0;
}
key = keyToDate(nSQL, nSQL.getDB(selectedDB)._tables[table].indexes[indexName].isDate ? "date" : "", key);
if (nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.ignore_case) {
key = String(key || "").toUpperCase();
}
nSQL.doFilter<adapterReadIndexKeyFilter>(selectedDB, "adapterReadIndexKey", { res: { table: table, indexName, pk: key, onRowPK, complete, error }, query }, (result) => {
if (!result) return; // filter took over
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.readIndexKey(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.indexName, result.res.pk, result.res.onRowPK, result.res.complete, result.res.error);
}, error);
},
readIndexKeys: (table: string, indexName: string, type: "range" | "offset" | "all", offsetOrLow: any, limitOrHigh: any, reverse: boolean, onRowPK: (key: any, id: any) => void, complete: () => void, error: (err: any) => void) => {
if (!selectedDB) return;
let lower = offsetOrLow;
let higher = limitOrHigh;
if (!nSQL.getDB(selectedDB)._tables[table].indexes[indexName]) {
error({ error: `Index ${indexName} not found!` });
return;
}
// shift range query by offset
if (typeof lower === "number" && typeof higher === "number" && type === "range") {
if (nSQL.getDB(selectedDB)._tables[table].indexes[indexName] && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.offset) {
lower += nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.offset || 0;
higher += nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.offset || 0;
}
}
lower = keyToDate(nSQL, nSQL.getDB(selectedDB)._tables[table].indexes[indexName].isDate ? "date" : "", lower);
higher = keyToDate(nSQL, nSQL.getDB(selectedDB)._tables[table].indexes[indexName].isDate ? "date" : "", higher);
if (type === "range" && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props && nSQL.getDB(selectedDB)._tables[table].indexes[indexName].props.ignore_case) {
lower = String(lower || "").toUpperCase();
higher = String(higher || "").toUpperCase();
}
nSQL.doFilter<adapterReadIndexKeysFilter>(selectedDB, "adapterReadIndexKeys", { res: { table: table, indexName, type, offsetOrLow: lower, limitOrHigh: higher, reverse, onRowPK, complete, error }, query }, (result) => {
if (!result) return; // filter took over
const adapter = nSQL.getDB(selectedDB)._tables[result.res.table].mode || nSQL.getDB(selectedDB).adapter;
adapter.readIndexKeys(nSQL.getDB(selectedDB)._tableIds[result.res.table], result.res.indexName, result.res.type, result.res.offsetOrLow, result.res.limitOrHigh, result.res.reverse, (key, id) => {
if (key !== "__NULL__") result.res.onRowPK(key, id);
}, result.res.complete, result.res.error);
}, error);
}
};
};
export const maybeDate = (value: any): any => {
const parsed = Date.parse(value);
return isNaN(parsed) ? value : parsed;
}
export const mutateRowTypes = (selectedDB: string | undefined, replaceObj: any, table: string, nSQL: InanoSQLInstance): any => {
if (!selectedDB) return replaceObj;
const dbObj = nSQL.getDB(selectedDB);
const tableObj = nSQL.getDB(selectedDB)._tables[table];
if (!tableObj) {
throw new Error(`nSQL: Table "${table}" not found!`);
}
const customTypes = dbObj.config.types || {};
const resolveModel = (cols: InanoSQLTableColumn[], useObj: any, nestedModel?: string): any => {
if (!useObj) return useObj;
if (nestedModel && nestedModel.length) {
if (nestedModel.indexOf("[]") !== -1) {
if (Array.isArray(useObj)) {
return useObj.map(a => resolveModel(cols, a, nestedModel.slice(0, nestedModel.lastIndexOf("[]"))));
} else {
return [];
}
}
}
cols.forEach((m) => {
if (m.model) {
useObj[m.key] = resolveModel(m.model, typeof useObj !== "undefined" ? useObj[m.key] : undefined);
} else {
const checkType = m.type.replace(/\[\]/gmi, "");
const custType = customTypes[checkType];
if (custType && custType.onSelect) { // converting custom types
useObj[m.key] = custType.onSelect(useObj[m.key]);
} else {
// converting normal types
switch (m.type) {
case "date":
useObj[m.key] = new Date(useObj[m.key]).toISOString();
break;
default:
// useObj[m.key] = useObj[m.key];
}
}
}
});
return useObj;
};
const useRow = tableObj.select ? tableObj.select(replaceObj) : replaceObj;
return resolveModel(nSQL.getDB(selectedDB)._tables[table].columns, useRow);
}
export const noop = () => { };
export const throwErr = (err: any) => {
throw new Error(err);
};
export const nan = (input: any): number => {
return isNaN(input) || input === null ? 0 : parseFloat(input)
}
/**
* Object.assign, but faster.
*
* @param {*} obj
* @returns
*/
export const assign = (obj: any) => {
return obj ? JSON.parse(JSON.stringify(obj)) : obj;
};
/**
* Compare two javascript variables for equality.
* Works with primitives, arrays and objects recursively.
*
* @param {*} obj1
* @param {*} obj2
* @returns {boolean}
*/
export const objectsEqual = (obj1: any, obj2: any): boolean => {
if (obj1 === obj2) return true;
if (typeof obj1 !== "object") return false; // primitives will always pass === when they're equal, so we have primitives that don't match.
if (!obj1 || !obj2) return false; // if either object is undefined they don't match
return equal(obj1, obj2);
};
// tslint:disable-next-line
export class _nanoSQLQueue {
private _items: [any, undefined | ((item: any, complete: () => void, err?: (err: any) => void) => void)][] = [];
private _going: boolean = false;
private _done: boolean = false;
private _count: number = 0;
private _triggeredComplete: boolean = false;
constructor(
public processItem?: (item: any, count: number, complete: () => void, error: (err: any) => void) => void,
public onError?: (err: any) => void,
public onComplete?: () => void
) {
this._progressBuffer = this._progressBuffer.bind(this);
}
private _progressBuffer() {
if (this._triggeredComplete) {
return;
}
// quueue as finished
if (this._done && !this._items.length) {
this._triggeredComplete = true;
if (this.onComplete) this.onComplete();
return;
}
// queue has paused
if (!this._items.length) {
this._going = false;
return;
}
const next = () => {
this._count++;
this._count % 100 === 0 ? setFast(this._progressBuffer) : this._progressBuffer();
};
// process queue
const item = this._items.shift() || [];
if (item[1]) {
item[1](item[0], next, this.onError ? this.onError : noop);
} else if (this.processItem) {
this.processItem(item[0], this._count, next, this.onError ? this.onError : noop);
}
}
public finished() {
this._done = true;
if (this._triggeredComplete) {
return;
}
if (!this._going && !this._items.length) {
this._triggeredComplete = true;
if (this.onComplete) this.onComplete();
}
}
public newItem(item: any, processFn?: (item: any, complete: () => void, err?: (error: any) => void) => void) {
this._items.push([item, processFn]);
if (!this._going) {
this._going = true;
this._progressBuffer();
}
}
}
/**
* Quickly and efficiently fire asynchronous operations in sequence, returns once all operations complete.
*
* @param {any[]} items
* @param {(item: any, i: number, next: (result?: any) => void) => void} callback
* @returns {Promise<any[]>}
*/
export const chainAsync = <T>(items: T[], callback: (item: T, i: number, next: (value?: any) => void, err: (err?: any) => void) => void): Promise<any[]> => {
return new Promise((res, rej) => {
if (!items || !items.length) {
res([]);
return;
}
let results: any[] = [];
let i = 0;
const step = () => {
if (i < items.length) {
callback(items[i], i, (result) => {
results.push(result || 0);
i++;
i % 250 === 0 ? setFast(() => {
step();
}) : step();
}, (err) => {
rej(err);
});
} else {
res(results);
}
};
step();
});
};
/**
* Quickly and efficiently fire asynchronous operations in parallel, returns once all operations are complete.
*
* @param {any[]} items
* @param {(item: any, i: number, done: (result?: any) => void) => void} callback
* @returns {Promise<any[]>}
*/
export const allAsync = <T>(items: T[], callback: (item: T, i: number, next: (value?: any) => void, err: (err: any) => void) => void): Promise<any[]> => {
if (!items || !items.length) {
return Promise.resolve([]);
}
return Promise.all((items || []).map((item, i) => {
return new Promise((res, rej) => {
callback(item, i, res, rej);
});
}));
};
const ua = typeof window === "undefined" ? "" : (navigator.userAgent || "");
// Detects iOS device OR Safari running on desktop
export const isSafari: boolean = ua.length === 0 ? false : (/^((?!chrome|android).)*safari/i.test(ua)) || (/iPad|iPhone|iPod/.test(ua) && !window["MSStream"]);
// Detect Edge or Internet Explorer
export const isMSBrowser: boolean = ua.length === 0 ? false : ua.indexOf("MSIE ") > 0 || ua.indexOf("Trident/") > 0 || ua.indexOf("Edge/") > 0;
// Detect Android Device
export const isAndroid = /Android/.test(ua);
/**
* Generate a random 16 bit number using strongest entropy/crypto available.
*
* @returns {number}
*/
export const random16Bits = (): number => {
if (typeof crypto === "undefined") {
return Math.round(Math.random() * Math.pow(2, 16)); // Less random fallback.
} else {
if (crypto.getRandomValues) { // Browser crypto
let buf = new Uint16Array(1);
crypto.getRandomValues(buf);
return buf[0];
} else if (typeof global !== "undefined" && global._crypto.randomBytes) { // NodeJS crypto
return global._crypto.randomBytes(2).reduce((prev: number, cur: number) => cur * prev);
} else {
return Math.round(Math.random() * Math.pow(2, 16)); // Less random fallback.
}
}
};
export const throttle = (scope: any, func: any, limit: number) => {
let waiting = false;
return (...args: any[]) => {
if (waiting) return;
waiting = true;
setTimeout(() => {
func.apply(scope, args);
waiting = false;
}, limit);
};
};
/**
* Generate a TimeID for use in the database.
*
* @param {boolean} [ms]
* @returns {string}
*/
export const timeid = (ms?: boolean): string => {
let time = Math.round((new Date().getTime()) / (ms ? 1 : 1000)).toString();
while (time.length < (ms ? 13 : 10)) {
time = "0" + time;
}
let seed = (random16Bits() + random16Bits()).toString(16);
while (seed.length < 5) {
seed = "0" + seed;
}
return time + "-" + seed;
};
/**
* See if two arrays intersect.
*
* @param {any[]} arr1
* @param {any[]} arr2
* @returns {boolean}
*/
export const intersect = (arr1: any[], arr2: any[]): boolean => {
if (!arr1 || !arr2) return false;
if (!arr1.length || !arr2.length) return false;
return (arr1 || []).filter(item => (arr2 || []).indexOf(item) !== -1).length > 0;
};
export const fastID = (): string => {
return [0, 0].map(s => Math.round(Math.random() * 1024).toString(16)).join("");
}
/**
* Generates a valid V4 UUID using the strongest crypto available.
*
* @returns {string}
*/
export const uuid = (): string => {
let r, s, b = "";
return [b, b, b, b, b, b, b, b].reduce((prev: string, cur: any, i: number): string => {
r = random16Bits();
s = (i === 3 ? 4 : (i === 4 ? (r % 16 & 0x3 | 0x8).toString(16) : b));
r = r.toString(16);
while (r.length < 4) r = "0" + r;
return prev + ([2, 3, 4, 5].indexOf(i) > -1 ? "-" : b) + (s + r).slice(0, 4);
}, b);
};
/**
* A quick and dirty hashing function, turns a string into a md5 style hash.
* Stolen from https://github.com/darkskyapp/string-hash
*
* @param {string} str
* @returns {string}
*/
export const hash = (str: string): string => {
let hash = 5381, i = str.length;
while (i) {
hash = (hash * 33) ^ str.charCodeAt(--i);
}
return (hash >>> 0).toString(16);
};
/**
* Generate a row ID given the primary key type.
*
* @param {string} primaryKeyType
* @param {number} [incrimentValue]
* @returns {*}
*/
export const generateID = (primaryKeyType: string, incrimentValue?: number): any => {
const idTypes = {
"int": (value) => value,
"date": (value) => value,
"float": (value) => value,
"uuid": uuid,
"timeId": () => timeid(),
"timeIdms": () => timeid(true)
}
return idTypes[primaryKeyType] ? idTypes[primaryKeyType](incrimentValue || 1) : undefined;
};
export const cleanArgs2 = (selectedDB: string, args: any, dataModel: { [colAndType: string]: InanoSQLDataModel } | string, nSQL: InanoSQLInstance): any => {
let returnObj = {};
const conformType = (strType: string, obj: any, dModel: { [colAndType: string]: InanoSQLDataModel } | string): any => {
if (strType.indexOf("[]") !== -1) {
const arrayOf = strType.slice(0, strType.lastIndexOf("[]"));
// value should be array but isn't, cast it to one
if (!Array.isArray(obj)) return [];
// we have an array, cast array of types
return obj.map((v) => conformType(arrayOf, v, dModel));
}
if (typeof dModel === "string") {
let findModel = dModel.replace(/\[\]/gmi, "");
let typeModel = Object.keys(nSQL.getDB(selectedDB).config.types || {}).reduce((prev, cur) => {
if (cur === findModel) return (nSQL.getDB(selectedDB).config.types || {})[cur];
return prev;
}, undefined);
if (!typeModel) {
throw new Error(`Can't find type ${findModel}!`);
}
const customType = (cType: string) => {
if (cType.indexOf("[]") !== -1) {
const arrayOf = cType.slice(0, cType.lastIndexOf("[]"));
if (!Array.isArray(args)) return [];
return args.map(v => customType(arrayOf));
}
if (!typeModel) {
throw new Error(`Can't find type ${findModel}!`);
}
if (typeModel.model) {
return conformType(dModel, args, typeModel.model);
} else {
return obj;
}
}
customType(dModel);
} else {
let returnObj = {};
let getOtherCols: boolean = false;
let definedCols: string[] = [];
Object.keys(dModel).forEach((colAndType) => {
const split = colAndType.split(":");
if (split[0] === "*") {
getOtherCols = true;
} else {
definedCols.push(split[0]);
returnObj[split[0]] = cast(selectedDB, split[1], obj[split[0]], false, nSQL);
}
});
if (getOtherCols && isObject(obj)) {
Object.keys(obj).filter(k => definedCols.indexOf(k) === -1).forEach((key) => {
returnObj[key] = obj[key];
});
}
return returnObj;
}
};
return conformType(typeof dataModel === "string" ? dataModel : "", args, dataModel);
}
/**
* Clean the arguments from an object given an array of arguments and their types.
*
* @param {string[]} argDeclarations
* @param {StdObject<any>} args
* @returns {StdObject<any>}
*/
export const cleanArgs = (selectedDB: string, argDeclarations: string[], args: { [key: string]: any }, nSQL: InanoSQLInstance): { [key: string]: any } => {
let a: { [key: string]: any } = {};
let i = argDeclarations.length;
const customTypes = Object.keys(nSQL.getDB(selectedDB).config.types || {});
while (i--) {
let k2: string[] = argDeclarations[i].split(":");
if (k2.length > 1) {
a[k2[0]] = cast(selectedDB, k2[1], args[k2[0]] || undefined, true, nSQL);
} else {
a[k2[0]] = args[k2[0]] || undefined;
}
}
return a;
};
/**
* Determine if a given value is a javascript object or not. Exludes Arrays, Functions, Null, Undefined, etc.
*
* @param {*} val
* @returns {boolean}
*/
export const isObject = (val: any): boolean => {
return Object.prototype.toString.call(val) === "[object Object]";
};
export const objSort = (path?: string, rev?: boolean) => {
return (a: any, b: any): number => {
const result = path ? (deepGet(path, a) > deepGet(path, b) ? -1 : 1) : (a > b ? -1 : 1);
return rev ? result * -1 : result;
};
};
/**
* Recursively resolve function values provided a string and row
*
*
* @param {string} fnString // TRIM(UPPER(column))
* @param {*} row // {column: " value "}
* @param {*} prev // aggregate previous value for aggregate functions
* @returns {InanoSQLFunctionResult}
* @memberof _nanoSQLQuery
*/
export const execFunction = (query: InanoSQLQuery, fnString: string, row: any, prev: any): InanoSQLFunctionResult => {
const fnArgs = fnString.match(/\((.*)\)/gmi) as string[];
if (!fnArgs[0]) return { result: undefined }
const args = fnArgs[0].substr(1, fnArgs[0].length - 2).split(/\,\s?(?![^\(]*\))/).map(s => s.trim());
const fnName = fnString.split("(").shift() as string;
const calcArgs = args.map(s => {
if (s.indexOf("(") !== -1) {
const result = execFunction(query, s, row, prev).result;
if (typeof result === "number") return result;
if (typeof result === "string") return '"' + result + '"';
return result;
} else {
return s;
}
});
if (!query.parent.functions[fnName]) {
return { result: undefined }
}
return query.parent.functions[fnName].call(query, row, prev, ...calcArgs);
}
/**
* Cast a javascript variable to a given type. Supports typescript primitives and more specific types.
*
* @param {string} type
* @param {*} [val]
* @returns {*}
*/
export const cast = (selectedDB: string | undefined, type: string, val: any, allowUknownTypes?: boolean, nSQL?: InanoSQLInstance): any => {
if (type === "any" || type === "blob" || type === "*") return val;
// recursively cast arrays
if (type.indexOf("[]") !== -1) {
const arrayOf = type.slice(0, type.lastIndexOf("[]"));
// value should be array but isn't, cast it to one
if (!Array.isArray(val)) return [];
// we have an array, cast array of types
return val.map((v) => cast(selectedDB, arrayOf, v, allowUknownTypes));
}
if (val === undefined || val === null) return undefined;
const t = typeof val;
const entityMap = {
"&": "&",
"<": "<",
">": ">",
"\"": """,
"'": "'",
"/": "/",
"`": "`",
"=": "="
};
const types = nSQL && selectedDB ? nSQL.getDB(selectedDB).config.types || {} : {};
// custom type found
if (Object.keys(types).indexOf(type) !== -1) {
const typeObj = types[type];
if (typeObj.model) {
if (isObject(val)) {
return Object.keys(typeObj.model).reduce((prev, cur) => {
const key = cur.split(":");
prev[key[0]] = cast(selectedDB, key[1], val[key[0]], allowUknownTypes, nSQL);
return prev;
}, {});
}
return {};
} else if (typeObj.onSelect) {
return typeObj.onSelect(val);
} else {
return undefined;
}
}
const doCast = (castType: string, castVal: any) => {
switch (castType) {
case "safestr": return doCast("string", castVal).replace(/[&<>"'`=\/]/gmi, (s) => entityMap[s]);
case "int": return (t !== "number" || castVal % 1 !== 0) ? Math.round(nan(castVal)) : castVal;
case "number":
case "float": return t !== "number" ? nan(castVal) : castVal;
case "array": return Array.isArray(castVal) ? castVal : [];
case "date":
case "uuid":
case "timeId":
case "timeIdms":
case "string": return t !== "string" ? String(castVal) : castVal;
case "object":
case "obj":
case "map": return isObject(castVal) ? castVal : undefined;
case "boolean":
case "bool": return castVal === true || castVal === 1 ? true : false;
}
// doesn't match known types, return null;
return allowUknownTypes ? val : null;
};
if (val === undefined || val === null) return undefined;
const newVal = doCast(String(type || "").toLowerCase(), val);
// force numerical values to be a number and not NaN.
if (newVal !== undefined && ["int", "float", "number"].indexOf(type) !== -1) {
return isNaN(newVal) ? 0 : newVal;
}
return newVal;
};
export const rad2deg = (rad: number): number => {
return rad * 180 / Math.PI;
}
export const deg2rad = (deg: number): number => {
return deg * (Math.PI / 180);
};
/**
* "As the crow flies" or Haversine formula, used to calculate the distance between two points on a sphere.
*
* The unit used for the radius will determine the unit of the answer. If the radius is in km, distance provided will be in km.
*
* The radius is in km by default.
*
* @param {number} lat1
* @param {number} lon1
* @param {number} lat2
* @param {number} lon2
* @param {number} radius
* @returns {number}
*/
export const crowDistance = (lat1: number, lon1: number, lat2: number, lon2: number, radius: number = 6371): number => {
const dLat = deg2rad(lat2 - lat1);
const dLon = deg2rad(lon2 - lon1);
const a =
Math.pow(Math.sin(dLat / 2), 2) +
Math.cos(deg2rad(lat1)) * Math.cos(deg2rad(lat2)) *
Math.pow(Math.sin(dLon / 2), 2);
return radius * (2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)));
};
export const levenshtein = (word1: string, word2: string): number => {
return leven(word1, word2);
}
const objectPathCache: {
[pathQuery: string]: string[];
} = {};
// turn path into array of strings, ie value[hey][there].length => [value, hey, there, length];
export const resolvePath = (pathQuery: string): string[] => {
if (!pathQuery) return [];
if (objectPathCache[pathQuery]) {
return objectPathCache[pathQuery].slice();
}
const path = pathQuery.indexOf("[") !== -1 ?
// handle complex mix of dots and brackets like "users.value[meta][value].length"
pathQuery.split(/\.|\[/gmi).map(v => v.replace(/\]/gmi, "")) :
// handle simple dot paths like "users.meta.value.length"
pathQuery.split(".");
objectPathCache[pathQuery] = path;
return objectPathCache[pathQuery].slice();
};
export const fnRegex = /^[\"|\'](.*)[\"|\']$/gmi;
export const getFnValue = (row: any, valueOrPath: string): any => {
if (typeof valueOrPath === "number") return valueOrPath;
const regexResult = fnRegex.exec(valueOrPath);
return regexResult ? regexResult[1] : deepGet(valueOrPath, row);
};
/**
* Recursively freeze a javascript object to prevent it from being modified.
*
* @param {*} obj
* @returns
*/
export const deepFreeze = (obj: any) => {
Object.getOwnPropertyNames(obj || {}).forEach((name) => {
const prop = obj[name];
if (typeof prop === "object" && prop !== null) {
obj[name] = deepFreeze(prop);
}
});
// Freeze self (no-op if already frozen)
return Object.freeze(obj);
};
export const deepSet = (pathQuery: string | string[], object: any, value: any): any => {
const safeSet = (getPath: string[], pathIdx: number, setObj: any) => {
if (!getPath[pathIdx + 1]) { // end of path
setObj[getPath[pathIdx]] = value;
return;
} else if (!setObj[getPath[pathIdx]] || (!Array.isArray(setObj[getPath[pathIdx]]) && !isObject(setObj[getPath[pathIdx]]))) { // nested value doesn't exist yet
if (isNaN(getPath[pathIdx + 1] as any)) { // assume number queries are for arrays, otherwise an object
setObj[getPath[pathIdx]] = {};
} else {
setObj[getPath[pathIdx]] = [];
}
}
safeSet(getPath, pathIdx + 1, setObj[getPath[pathIdx] as string]);
};
safeSet(Array.isArray(pathQuery) ? pathQuery : resolvePath(pathQuery), 0, object);
return object;
};
/**
* Take an object and a string describing a path like "value.length" or "val[length]" and safely get that value in the object.
*
* objQuery("hello", {hello: 2}) => 2
* objQuery("hello.length", {hello: [0]}) => 1
* objQuery("hello[0]", {hello: ["there"]}) => "there"
* objQuery("hello[0].length", {hello: ["there"]}) => 5
* objQuery("hello.color.length", {"hello.color": "blue"}) => 4
*
* @param {string} pathQuery
* @param {*} object
* @param {boolean} [ignoreFirstPath]
* @returns {*}
*/
export const deepGet = (pathQuery: string | string[], object: any): any => {
const safeGet = (getPath: string[], pathIdx: number, object: any) => {
if (!getPath[pathIdx] || !object) return object;
return safeGet(getPath, pathIdx + 1, object[getPath[pathIdx] as string]);
};
return safeGet(Array.isArray(pathQuery) ? pathQuery : resolvePath(pathQuery), 0, object);
};
export const maybeAssign = (obj: any): any => {
return Object.isFrozen(obj) ? assign(obj) : obj;
};
const fastApply = (args) => {
return args[0].apply(null, Array.prototype.slice.call(args, 1));
};
export const setFast = typeof Promise !== "undefined" ? (...args: any[]) => {
Promise.resolve().then(() => {
fastApply(args);
})
} : (...args: any[]) => {
setTimeout(() => {
fastApply(args);
}, 0);
} | the_stack |
import {dispatchFakeEvent} from '@angular/cdk/testing';
import {Component, ViewChild} from '@angular/core';
import {async, ComponentFixture, fakeAsync, flush, TestBed, tick} from '@angular/core/testing';
import {FormsModule} from '@angular/forms';
import {By} from '@angular/platform-browser';
import {NoopAnimationsModule} from '@angular/platform-browser/animations';
import {CdkTextareaAutosize} from './autosize';
import {TextFieldModule} from './text-field-module';
describe('CdkTextareaAutosize', () => {
let fixture: ComponentFixture<AutosizeTextAreaWithContent>;
let textarea: HTMLTextAreaElement;
let autosize: CdkTextareaAutosize;
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [
FormsModule,
TextFieldModule,
NoopAnimationsModule,
],
declarations: [
AutosizeTextAreaWithContent,
AutosizeTextAreaWithValue,
AutosizeTextareaWithNgModel,
AutosizeTextareaWithoutAutosize,
],
});
TestBed.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(AutosizeTextAreaWithContent);
fixture.detectChanges();
textarea = fixture.nativeElement.querySelector('textarea');
autosize = fixture.debugElement.query(
By.directive(CdkTextareaAutosize)).injector.get<CdkTextareaAutosize>(CdkTextareaAutosize);
});
it('should resize the textarea based on its content', () => {
let previousHeight = textarea.clientHeight;
fixture.componentInstance.content = `
Once upon a midnight dreary, while I pondered, weak and weary,
Over many a quaint and curious volume of forgotten lore—
While I nodded, nearly napping, suddenly there came a tapping,
As of some one gently rapping, rapping at my chamber door.
“’Tis some visitor,” I muttered, “tapping at my chamber door—
Only this and nothing more.”`;
// Manually call resizeToFitContent instead of faking an `input` event.
fixture.detectChanges();
autosize.resizeToFitContent();
expect(textarea.clientHeight)
.toBeGreaterThan(previousHeight, 'Expected textarea to have grown with added content.');
expect(textarea.clientHeight)
.toBe(textarea.scrollHeight, 'Expected textarea height to match its scrollHeight');
previousHeight = textarea.clientHeight;
fixture.componentInstance.content += `
Ah, distinctly I remember it was in the bleak December;
And each separate dying ember wrought its ghost upon the floor.
Eagerly I wished the morrow;—vainly I had sought to borrow
From my books surcease of sorrow—sorrow for the lost Lenore—
For the rare and radiant maiden whom the angels name Lenore—
Nameless here for evermore.`;
fixture.detectChanges();
autosize.resizeToFitContent();
expect(textarea.clientHeight)
.toBeGreaterThan(previousHeight, 'Expected textarea to have grown with added content.');
expect(textarea.clientHeight)
.toBe(textarea.scrollHeight, 'Expected textarea height to match its scrollHeight');
});
it('should set a min-height based on minRows', () => {
expect(textarea.style.minHeight).toBeFalsy();
fixture.componentInstance.minRows = 4;
fixture.detectChanges();
expect(textarea.style.minHeight).toBeDefined('Expected a min-height to be set via minRows.');
let previousMinHeight = parseInt(textarea.style.minHeight as string);
fixture.componentInstance.minRows = 6;
fixture.detectChanges();
expect(parseInt(textarea.style.minHeight as string))
.toBeGreaterThan(previousMinHeight, 'Expected increased min-height with minRows increase.');
});
it('should set a max-height based on maxRows', () => {
expect(textarea.style.maxHeight).toBeFalsy();
fixture.componentInstance.maxRows = 4;
fixture.detectChanges();
expect(textarea.style.maxHeight).toBeDefined('Expected a max-height to be set via maxRows.');
let previousMaxHeight = parseInt(textarea.style.maxHeight as string);
fixture.componentInstance.maxRows = 6;
fixture.detectChanges();
expect(parseInt(textarea.style.maxHeight as string))
.toBeGreaterThan(previousMaxHeight, 'Expected increased max-height with maxRows increase.');
});
it('should reduce textarea height when minHeight decreases', () => {
expect(textarea.style.minHeight).toBeFalsy();
fixture.componentInstance.minRows = 6;
fixture.detectChanges();
expect(textarea.style.minHeight).toBeDefined('Expected a min-height to be set via minRows.');
let previousHeight = parseInt(textarea.style.height!);
fixture.componentInstance.minRows = 3;
fixture.detectChanges();
expect(parseInt(textarea.style.height!))
.toBeLessThan(previousHeight, 'Expected decreased height with minRows decrease.');
});
it('should export the cdkAutosize reference', () => {
expect(fixture.componentInstance.autosize).toBeTruthy();
expect(fixture.componentInstance.autosize.resizeToFitContent).toBeTruthy();
});
it('should initially set the rows of a textarea to one', () => {
expect(textarea.rows)
.toBe(1, 'Expected the directive to initially set the rows property to one.');
fixture.componentInstance.minRows = 1;
fixture.detectChanges();
expect(textarea.rows)
.toBe(1, 'Expected the textarea to have the rows property set to one.');
const previousMinHeight = parseInt(textarea.style.minHeight as string);
fixture.componentInstance.minRows = 2;
fixture.detectChanges();
expect(textarea.rows).toBe(1, 'Expected the rows property to be set to one. ' +
'The amount of rows will be specified using CSS.');
expect(parseInt(textarea.style.minHeight as string))
.toBeGreaterThan(previousMinHeight, 'Expected the textarea to grow to two rows.');
});
it('should calculate the proper height based on the specified amount of max rows', () => {
fixture.componentInstance.content = [1, 2, 3, 4, 5, 6, 7, 8].join('\n');
fixture.detectChanges();
autosize.resizeToFitContent();
expect(textarea.clientHeight)
.toBe(textarea.scrollHeight, 'Expected textarea to not have a vertical scrollbar.');
fixture.componentInstance.maxRows = 5;
fixture.detectChanges();
expect(textarea.clientHeight)
.toBeLessThan(textarea.scrollHeight, 'Expected textarea to have a vertical scrollbar.');
});
it('should properly resize to content on init', () => {
// Manually create the test component in this test, because in this test the first change
// detection should be triggered after a multiline content is set.
fixture = TestBed.createComponent(AutosizeTextAreaWithContent);
textarea = fixture.nativeElement.querySelector('textarea');
autosize = fixture.debugElement.query(By.css('textarea'))
.injector.get<CdkTextareaAutosize>(CdkTextareaAutosize);
fixture.componentInstance.content = `
Line
Line
Line
Line
Line`;
fixture.detectChanges();
expect(textarea.clientHeight)
.toBe(textarea.scrollHeight, 'Expected textarea height to match its scrollHeight');
});
it('should resize when an associated form control value changes', fakeAsync(() => {
const fixtureWithForms = TestBed.createComponent(AutosizeTextareaWithNgModel);
textarea = fixtureWithForms.nativeElement.querySelector('textarea');
fixtureWithForms.detectChanges();
const previousHeight = textarea.clientHeight;
fixtureWithForms.componentInstance.model = `
And the silken, sad, uncertain rustling of each purple curtain
Thrilled me—filled me with fantastic terrors never felt before;
So that now, to still the beating of my heart, I stood repeating
“’Tis some visitor entreating entrance at my chamber door—
Some late visitor entreating entrance at my chamber door;—
This it is and nothing more.” `;
fixtureWithForms.detectChanges();
flush();
fixtureWithForms.detectChanges();
expect(textarea.clientHeight)
.toBeGreaterThan(previousHeight, 'Expected increased height when ngModel is updated.');
}));
it('should resize when the textarea value is changed programmatically', fakeAsync(() => {
const previousHeight = textarea.clientHeight;
textarea.value = `
How much wood would a woodchuck chuck
if a woodchuck could chuck wood?
`;
fixture.detectChanges();
flush();
fixture.detectChanges();
expect(textarea.clientHeight)
.toBeGreaterThan(previousHeight, 'Expected the textarea height to have increased.');
}));
it('should trigger a resize when the window is resized', fakeAsync(() => {
spyOn(autosize, 'resizeToFitContent');
dispatchFakeEvent(window, 'resize');
tick(16);
expect(autosize.resizeToFitContent).toHaveBeenCalled();
}));
it('should not trigger a resize when it is disabled', fakeAsync(() => {
const fixtureWithoutAutosize = TestBed.createComponent(AutosizeTextareaWithoutAutosize);
textarea = fixtureWithoutAutosize.nativeElement.querySelector('textarea');
autosize = fixtureWithoutAutosize.debugElement.query(By.css('textarea'))
.injector.get<CdkTextareaAutosize>(CdkTextareaAutosize);
fixtureWithoutAutosize.detectChanges();
const previousHeight = textarea.clientHeight;
fixtureWithoutAutosize.componentInstance.content = `
Line
Line
Line
Line
Line`;
// Manually call resizeToFitContent instead of faking an `input` event.
fixtureWithoutAutosize.detectChanges();
expect(textarea.clientHeight)
.toEqual(previousHeight, 'Expected textarea to still have the same size.');
expect(textarea.clientHeight)
.toBeLessThan(textarea.scrollHeight, 'Expected textarea to a have scrollbar.');
autosize.enabled = true;
fixtureWithoutAutosize.detectChanges();
expect(textarea.clientHeight)
.toBeGreaterThan(previousHeight,
'Expected textarea to have grown after enabling autosize.');
expect(textarea.clientHeight)
.toBe(textarea.scrollHeight, 'Expected textarea not to have a scrollbar');
autosize.enabled = false;
fixtureWithoutAutosize.detectChanges();
expect(textarea.clientHeight)
.toEqual(previousHeight, 'Expected textarea to have the original size.');
expect(textarea.clientHeight)
.toBeLessThan(textarea.scrollHeight, 'Expected textarea to have a scrollbar.');
}));
});
// Styles to reset padding and border to make measurement comparisons easier.
const textareaStyleReset = `
textarea {
padding: 0;
border: none;
overflow: auto;
}`;
@Component({
template: `
<textarea cdkTextareaAutosize [cdkAutosizeMinRows]="minRows" [cdkAutosizeMaxRows]="maxRows"
#autosize="cdkTextareaAutosize">{{content}}</textarea>`,
styles: [textareaStyleReset],
})
class AutosizeTextAreaWithContent {
@ViewChild('autosize') autosize: CdkTextareaAutosize;
minRows: number | null = null;
maxRows: number | null = null;
content: string = '';
}
@Component({
template: `<textarea cdkTextareaAutosize [value]="value"></textarea>`,
styles: [textareaStyleReset],
})
class AutosizeTextAreaWithValue {
value: string = '';
}
@Component({
template: `<textarea cdkTextareaAutosize [(ngModel)]="model"></textarea>`,
styles: [textareaStyleReset],
})
class AutosizeTextareaWithNgModel {
model = '';
}
@Component({
template: `<textarea [cdkTextareaAutosize]="false">{{content}}</textarea>`,
styles: [textareaStyleReset],
})
class AutosizeTextareaWithoutAutosize {
content: string = '';
} | the_stack |
import {
Circle,
Ellipse,
PI_2,
Point,
Polygon,
Rectangle,
RoundedRectangle,
Matrix,
SHAPES,
} from '@pixi/math';
import { Texture, UniformGroup, State, Renderer, BatchDrawCall, Shader } from '@pixi/core';
import { BezierUtils, QuadraticUtils, ArcUtils } from './utils';
import { hex2rgb } from '@pixi/utils';
import { GraphicsGeometry } from './GraphicsGeometry';
import { FillStyle } from './styles/FillStyle';
import { LineStyle } from './styles/LineStyle';
import { BLEND_MODES } from '@pixi/constants';
import { Container } from '@pixi/display';
import type { IShape, IPointData } from '@pixi/math';
import type { IDestroyOptions } from '@pixi/display';
import { LINE_JOIN, LINE_CAP } from './const';
/**
* Batch element computed from Graphics geometry
*/
export interface IGraphicsBatchElement {
vertexData: Float32Array;
blendMode: BLEND_MODES;
indices: Uint16Array | Uint32Array;
uvs: Float32Array;
alpha: number;
worldAlpha: number;
_batchRGB: number[];
_tintRGB: number;
_texture: Texture;
}
export interface IFillStyleOptions {
color?: number;
alpha?: number;
texture?: Texture;
matrix?: Matrix;
}
export interface ILineStyleOptions extends IFillStyleOptions {
width?: number;
alignment?: number;
native?: boolean;
cap?: LINE_CAP;
join?: LINE_JOIN;
miterLimit?: number;
}
const temp = new Float32Array(3);
// a default shaders map used by graphics..
const DEFAULT_SHADERS: {[key: string]: Shader} = {};
export interface Graphics extends GlobalMixins.Graphics, Container {}
/**
* The Graphics class is primarily used to render primitive shapes such as lines, circles and
* rectangles to the display, and to color and fill them. However, you can also use a Graphics
* object to build a list of primitives to use as a mask, or as a complex hitArea.
*
* Please note that due to legacy naming conventions, the behavior of some functions in this class
* can be confusing. Each call to `drawRect()`, `drawPolygon()`, etc. actually stores that primitive
* in the Geometry class's GraphicsGeometry object for later use in rendering or hit testing - the
* functions do not directly draw anything to the screen. Similarly, the `clear()` function doesn't
* change the screen, it simply resets the list of primitives, which can be useful if you want to
* rebuild the contents of an existing Graphics object.
*
* Once a GraphicsGeometry list is built, you can re-use it in other Geometry objects as
* an optimization, by passing it into a new Geometry object's constructor. Because of this
* ability, it's important to call `destroy()` on Geometry objects once you are done with them, to
* properly dereference each GraphicsGeometry and prevent memory leaks.
*
* @class
* @extends PIXI.Container
* @memberof PIXI
*/
export class Graphics extends Container
{
/**
* Temporary point to use for containsPoint
*
* @static
* @private
*/
static _TEMP_POINT = new Point();
/**
* Represents the vertex and fragment shaders that processes the geometry and runs on the GPU.
* Can be shared between multiple Graphics objects.
*/
public shader: Shader = null;
/** Renderer plugin for batching */
public pluginName = 'batch';
/**
* Current path
*
* @member {PIXI.Polygon}
* @readonly
*/
public currentPath: Polygon = null;
/**
* A collections of batches! These can be drawn by the renderer batch system.
*
* @member {PIXI.IGraphicsBatchElement[]}
*/
protected batches: Array<IGraphicsBatchElement> = [];
/** Update dirty for limiting calculating tints for batches. */
protected batchTint = -1;
/** Update dirty for limiting calculating batches.*/
protected batchDirty = -1;
/** Copy of the object vertex data. */
protected vertexData: Float32Array = null;
/**
* Current fill style
*
* @member {PIXI.FillStyle}
*/
protected _fillStyle: FillStyle = new FillStyle();
/**
* Current line style
*
* @member {PIXI.LineStyle}
*/
protected _lineStyle: LineStyle = new LineStyle();
/**
* Current shape transform matrix.
*
* @member {PIXI.Matrix}
*/
protected _matrix: Matrix = null;
/** Current hole mode is enabled. */
protected _holeMode = false;
protected _transformID: number;
protected _tint: number;
/**
* Represents the WebGL state the Graphics required to render, excludes shader and geometry. E.g.,
* blend mode, culling, depth testing, direction of rendering triangles, backface, etc.
*
* @member {PIXI.State}
*/
private state: State = State.for2d();
private _geometry: GraphicsGeometry;
/**
* Includes vertex positions, face indices, normals, colors, UVs, and
* custom attributes within buffers, reducing the cost of passing all
* this data to the GPU. Can be shared between multiple Mesh or Graphics objects.
*
* @member {PIXI.GraphicsGeometry}
* @readonly
*/
public get geometry(): GraphicsGeometry
{
return this._geometry;
}
/**
* @param {PIXI.GraphicsGeometry} [geometry=null] - Geometry to use, if omitted
* will create a new GraphicsGeometry instance.
*/
constructor(geometry: GraphicsGeometry = null)
{
super();
this._geometry = geometry || new GraphicsGeometry();
this._geometry.refCount++;
/**
* When cacheAsBitmap is set to true the graphics object will be rendered as if it was a sprite.
* This is useful if your graphics element does not change often, as it will speed up the rendering
* of the object in exchange for taking up texture memory. It is also useful if you need the graphics
* object to be anti-aliased, because it will be rendered using canvas. This is not recommended if
* you are constantly redrawing the graphics element.
*
* @name cacheAsBitmap
* @member {boolean}
* @memberof PIXI.Graphics#
* @default false
*/
this._transformID = -1;
// Set default
this.tint = 0xFFFFFF;
this.blendMode = BLEND_MODES.NORMAL;
}
/**
* Creates a new Graphics object with the same values as this one.
* Note that only the geometry of the object is cloned, not its transform (position,scale,etc)
*
* @return {PIXI.Graphics} A clone of the graphics object
*/
public clone(): Graphics
{
this.finishPoly();
return new Graphics(this._geometry);
}
/**
* The blend mode to be applied to the graphic shape. Apply a value of
* `PIXI.BLEND_MODES.NORMAL` to reset the blend mode. Note that, since each
* primitive in the GraphicsGeometry list is rendered sequentially, modes
* such as `PIXI.BLEND_MODES.ADD` and `PIXI.BLEND_MODES.MULTIPLY` will
* be applied per-primitive.
*
* @member {number}
* @default PIXI.BLEND_MODES.NORMAL;
* @see PIXI.BLEND_MODES
*/
public set blendMode(value: BLEND_MODES)
{
this.state.blendMode = value;
}
public get blendMode(): BLEND_MODES
{
return this.state.blendMode;
}
/**
* The tint applied to each graphic shape. This is a hex value. A value of
* 0xFFFFFF will remove any tint effect.
*
* @member {number}
* @default 0xFFFFFF
*/
public get tint(): number
{
return this._tint;
}
public set tint(value: number)
{
this._tint = value;
}
/**
* The current fill style.
*
* @member {PIXI.FillStyle}
* @readonly
*/
public get fill(): FillStyle
{
return this._fillStyle;
}
/**
* The current line style.
*
* @member {PIXI.LineStyle}
* @readonly
*/
public get line(): LineStyle
{
return this._lineStyle;
}
/**
* Specifies the line style used for subsequent calls to Graphics methods such as the lineTo()
* method or the drawCircle() method.
*
* @param {number} [width=0] - width of the line to draw, will update the objects stored style
* @param {number} [color=0x0] - color of the line to draw, will update the objects stored style
* @param {number} [alpha=1] - alpha of the line to draw, will update the objects stored style
* @param {number} [alignment=0.5] - alignment of the line to draw, (0 = inner, 0.5 = middle, 1 = outer).
* WebGL only.
* @param {boolean} [native=false] - If true the lines will be draw using LINES instead of TRIANGLE_STRIP
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public lineStyle(width: number, color?: number, alpha?: number, alignment?: number, native?: boolean): this;
/**
* Specifies the line style used for subsequent calls to Graphics methods such as the lineTo()
* method or the drawCircle() method.
*
* @param {object} [options] - Line style options
* @param {number} [options.width=0] - width of the line to draw, will update the objects stored style
* @param {number} [options.color=0x0] - color of the line to draw, will update the objects stored style
* @param {number} [options.alpha=1] - alpha of the line to draw, will update the objects stored style
* @param {number} [options.alignment=0.5] - alignment of the line to draw, (0 = inner, 0.5 = middle, 1 = outer).
* WebGL only.
* @param {boolean} [options.native=false] - If true the lines will be draw using LINES instead of TRIANGLE_STRIP
* @param {PIXI.LINE_CAP}[options.cap=PIXI.LINE_CAP.BUTT] - line cap style
* @param {PIXI.LINE_JOIN}[options.join=PIXI.LINE_JOIN.MITER] - line join style
* @param {number}[options.miterLimit=10] - miter limit ratio
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public lineStyle(options?: ILineStyleOptions): this;
public lineStyle(options: ILineStyleOptions | number = null,
color = 0x0, alpha = 1, alignment = 0.5, native = false): this
{
// Support non-object params: (width, color, alpha, alignment, native)
if (typeof options === 'number')
{
options = { width: options, color, alpha, alignment, native } as ILineStyleOptions;
}
return this.lineTextureStyle(options);
}
/**
* Like line style but support texture for line fill.
*
* @param {object} [options] - Collection of options for setting line style.
* @param {number} [options.width=0] - width of the line to draw, will update the objects stored style
* @param {PIXI.Texture} [options.texture=PIXI.Texture.WHITE] - Texture to use
* @param {number} [options.color=0x0] - color of the line to draw, will update the objects stored style.
* Default 0xFFFFFF if texture present.
* @param {number} [options.alpha=1] - alpha of the line to draw, will update the objects stored style
* @param {PIXI.Matrix} [options.matrix=null] - Texture matrix to transform texture
* @param {number} [options.alignment=0.5] - alignment of the line to draw, (0 = inner, 0.5 = middle, 1 = outer).
* WebGL only.
* @param {boolean} [options.native=false] - If true the lines will be draw using LINES instead of TRIANGLE_STRIP
* @param {PIXI.LINE_CAP}[options.cap=PIXI.LINE_CAP.BUTT] - line cap style
* @param {PIXI.LINE_JOIN}[options.join=PIXI.LINE_JOIN.MITER] - line join style
* @param {number}[options.miterLimit=10] - miter limit ratio
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public lineTextureStyle(options: ILineStyleOptions): this
{
// Apply defaults
options = Object.assign({
width: 0,
texture: Texture.WHITE,
color: (options && options.texture) ? 0xFFFFFF : 0x0,
alpha: 1,
matrix: null,
alignment: 0.5,
native: false,
cap: LINE_CAP.BUTT,
join: LINE_JOIN.MITER,
miterLimit: 10,
}, options);
if (this.currentPath)
{
this.startPoly();
}
const visible = options.width > 0 && options.alpha > 0;
if (!visible)
{
this._lineStyle.reset();
}
else
{
if (options.matrix)
{
options.matrix = options.matrix.clone();
options.matrix.invert();
}
Object.assign(this._lineStyle, { visible }, options);
}
return this;
}
/**
* Start a polygon object internally
* @protected
*/
protected startPoly(): void
{
if (this.currentPath)
{
const points = this.currentPath.points;
const len = this.currentPath.points.length;
if (len > 2)
{
this.drawShape(this.currentPath);
this.currentPath = new Polygon();
this.currentPath.closeStroke = false;
this.currentPath.points.push(points[len - 2], points[len - 1]);
}
}
else
{
this.currentPath = new Polygon();
this.currentPath.closeStroke = false;
}
}
/**
* Finish the polygon object.
* @protected
*/
finishPoly(): void
{
if (this.currentPath)
{
if (this.currentPath.points.length > 2)
{
this.drawShape(this.currentPath);
this.currentPath = null;
}
else
{
this.currentPath.points.length = 0;
}
}
}
/**
* Moves the current drawing position to x, y.
*
* @param {number} x - the X coordinate to move to
* @param {number} y - the Y coordinate to move to
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public moveTo(x: number, y: number): this
{
this.startPoly();
this.currentPath.points[0] = x;
this.currentPath.points[1] = y;
return this;
}
/**
* Draws a line using the current line style from the current drawing position to (x, y);
* The current drawing position is then set to (x, y).
*
* @param {number} x - the X coordinate to draw to
* @param {number} y - the Y coordinate to draw to
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public lineTo(x: number, y: number): this
{
if (!this.currentPath)
{
this.moveTo(0, 0);
}
// remove duplicates..
const points = this.currentPath.points;
const fromX = points[points.length - 2];
const fromY = points[points.length - 1];
if (fromX !== x || fromY !== y)
{
points.push(x, y);
}
return this;
}
/**
* Initialize the curve
*
* @param {number} [x=0]
* @param {number} [y=0]
*/
protected _initCurve(x = 0, y = 0): void
{
if (this.currentPath)
{
if (this.currentPath.points.length === 0)
{
this.currentPath.points = [x, y];
}
}
else
{
this.moveTo(x, y);
}
}
/**
* Calculate the points for a quadratic bezier curve and then draws it.
* Based on: https://stackoverflow.com/questions/785097/how-do-i-implement-a-bezier-curve-in-c
*
* @param {number} cpX - Control point x
* @param {number} cpY - Control point y
* @param {number} toX - Destination point x
* @param {number} toY - Destination point y
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public quadraticCurveTo(cpX: number, cpY: number, toX: number, toY: number): this
{
this._initCurve();
const points = this.currentPath.points;
if (points.length === 0)
{
this.moveTo(0, 0);
}
QuadraticUtils.curveTo(cpX, cpY, toX, toY, points);
return this;
}
/**
* Calculate the points for a bezier curve and then draws it.
*
* @param {number} cpX - Control point x
* @param {number} cpY - Control point y
* @param {number} cpX2 - Second Control point x
* @param {number} cpY2 - Second Control point y
* @param {number} toX - Destination point x
* @param {number} toY - Destination point y
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public bezierCurveTo(cpX: number, cpY: number, cpX2: number, cpY2: number, toX: number, toY: number): this
{
this._initCurve();
BezierUtils.curveTo(cpX, cpY, cpX2, cpY2, toX, toY, this.currentPath.points);
return this;
}
/**
* The arcTo() method creates an arc/curve between two tangents on the canvas.
*
* "borrowed" from https://code.google.com/p/fxcanvas/ - thanks google!
*
* @param {number} x1 - The x-coordinate of the first tangent point of the arc
* @param {number} y1 - The y-coordinate of the first tangent point of the arc
* @param {number} x2 - The x-coordinate of the end of the arc
* @param {number} y2 - The y-coordinate of the end of the arc
* @param {number} radius - The radius of the arc
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public arcTo(x1: number, y1: number, x2: number, y2: number, radius: number): this
{
this._initCurve(x1, y1);
const points = this.currentPath.points;
const result = ArcUtils.curveTo(x1, y1, x2, y2, radius, points);
if (result)
{
const { cx, cy, radius, startAngle, endAngle, anticlockwise } = result;
this.arc(cx, cy, radius, startAngle, endAngle, anticlockwise);
}
return this;
}
/**
* The arc method creates an arc/curve (used to create circles, or parts of circles).
*
* @param {number} cx - The x-coordinate of the center of the circle
* @param {number} cy - The y-coordinate of the center of the circle
* @param {number} radius - The radius of the circle
* @param {number} startAngle - The starting angle, in radians (0 is at the 3 o'clock position
* of the arc's circle)
* @param {number} endAngle - The ending angle, in radians
* @param {boolean} [anticlockwise=false] - Specifies whether the drawing should be
* counter-clockwise or clockwise. False is default, and indicates clockwise, while true
* indicates counter-clockwise.
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public arc(cx: number, cy: number, radius: number, startAngle: number, endAngle: number, anticlockwise = false): this
{
if (startAngle === endAngle)
{
return this;
}
if (!anticlockwise && endAngle <= startAngle)
{
endAngle += PI_2;
}
else if (anticlockwise && startAngle <= endAngle)
{
startAngle += PI_2;
}
const sweep = endAngle - startAngle;
if (sweep === 0)
{
return this;
}
const startX = cx + (Math.cos(startAngle) * radius);
const startY = cy + (Math.sin(startAngle) * radius);
const eps = this._geometry.closePointEps;
// If the currentPath exists, take its points. Otherwise call `moveTo` to start a path.
let points = this.currentPath ? this.currentPath.points : null;
if (points)
{
// TODO: make a better fix.
// We check how far our start is from the last existing point
const xDiff = Math.abs(points[points.length - 2] - startX);
const yDiff = Math.abs(points[points.length - 1] - startY);
if (xDiff < eps && yDiff < eps)
{
// If the point is very close, we don't add it, since this would lead to artifacts
// during tessellation due to floating point imprecision.
}
else
{
points.push(startX, startY);
}
}
else
{
this.moveTo(startX, startY);
points = this.currentPath.points;
}
ArcUtils.arc(startX, startY, cx, cy, radius, startAngle, endAngle, anticlockwise, points);
return this;
}
/**
* Specifies a simple one-color fill that subsequent calls to other Graphics methods
* (such as lineTo() or drawCircle()) use when drawing.
*
* @param {number} [color=0] - the color of the fill
* @param {number} [alpha=1] - the alpha of the fill
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public beginFill(color = 0, alpha = 1): this
{
return this.beginTextureFill({ texture: Texture.WHITE, color, alpha });
}
/**
* Begin the texture fill
*
* @param {object} [options] - Object object.
* @param {PIXI.Texture} [options.texture=PIXI.Texture.WHITE] - Texture to fill
* @param {number} [options.color=0xffffff] - Background to fill behind texture
* @param {number} [options.alpha=1] - Alpha of fill
* @param {PIXI.Matrix} [options.matrix=null] - Transform matrix
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
beginTextureFill(options?: IFillStyleOptions): this
{
// Apply defaults
options = Object.assign({
texture: Texture.WHITE,
color: 0xFFFFFF,
alpha: 1,
matrix: null,
}, options) as IFillStyleOptions;
if (this.currentPath)
{
this.startPoly();
}
const visible = options.alpha > 0;
if (!visible)
{
this._fillStyle.reset();
}
else
{
if (options.matrix)
{
options.matrix = options.matrix.clone();
options.matrix.invert();
}
Object.assign(this._fillStyle, { visible }, options);
}
return this;
}
/**
* Applies a fill to the lines and shapes that were added since the last call to the beginFill() method.
*
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public endFill(): this
{
this.finishPoly();
this._fillStyle.reset();
return this;
}
/**
* Draws a rectangle shape.
*
* @param {number} x - The X coord of the top-left of the rectangle
* @param {number} y - The Y coord of the top-left of the rectangle
* @param {number} width - The width of the rectangle
* @param {number} height - The height of the rectangle
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public drawRect(x: number, y: number, width: number, height: number): this
{
return this.drawShape(new Rectangle(x, y, width, height));
}
/**
* Draw a rectangle shape with rounded/beveled corners.
*
* @param {number} x - The X coord of the top-left of the rectangle
* @param {number} y - The Y coord of the top-left of the rectangle
* @param {number} width - The width of the rectangle
* @param {number} height - The height of the rectangle
* @param {number} radius - Radius of the rectangle corners
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public drawRoundedRect(x: number, y: number, width: number, height: number, radius: number): this
{
return this.drawShape(new RoundedRectangle(x, y, width, height, radius));
}
/**
* Draws a circle.
*
* @param {number} x - The X coordinate of the center of the circle
* @param {number} y - The Y coordinate of the center of the circle
* @param {number} radius - The radius of the circle
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public drawCircle(x: number, y: number, radius: number): this
{
return this.drawShape(new Circle(x, y, radius));
}
/**
* Draws an ellipse.
*
* @param {number} x - The X coordinate of the center of the ellipse
* @param {number} y - The Y coordinate of the center of the ellipse
* @param {number} width - The half width of the ellipse
* @param {number} height - The half height of the ellipse
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public drawEllipse(x: number, y: number, width: number, height: number): this
{
return this.drawShape(new Ellipse(x, y, width, height));
}
public drawPolygon(...path: Array<number> | Array<Point>): this
public drawPolygon(path: Array<number> | Array<Point> | Polygon): this
/**
* Draws a polygon using the given path.
*
* @param {number[]|PIXI.Point[]|PIXI.Polygon} path - The path data used to construct the polygon.
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public drawPolygon(...path: any[]): this
{
let points: Array<number> | Array<Point>;
let closeStroke = true;// !!this._fillStyle;
const poly = path[0] as Polygon;
// check if data has points..
if (poly.points)
{
closeStroke = poly.closeStroke;
points = poly.points;
}
else
if (Array.isArray(path[0]))
{
points = path[0];
}
else
{
points = path;
}
const shape = new Polygon(points);
shape.closeStroke = closeStroke;
this.drawShape(shape);
return this;
}
/**
* Draw any shape.
*
* @param {PIXI.Circle|PIXI.Ellipse|PIXI.Polygon|PIXI.Rectangle|PIXI.RoundedRectangle} shape - Shape to draw
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public drawShape(shape: IShape): this
{
if (!this._holeMode)
{
this._geometry.drawShape(
shape,
this._fillStyle.clone(),
this._lineStyle.clone(),
this._matrix
);
}
else
{
this._geometry.drawHole(shape, this._matrix);
}
return this;
}
/**
* Clears the graphics that were drawn to this Graphics object, and resets fill and line style settings.
*
* @return {PIXI.Graphics} This Graphics object. Good for chaining method calls
*/
public clear(): this
{
this._geometry.clear();
this._lineStyle.reset();
this._fillStyle.reset();
this._boundsID++;
this._matrix = null;
this._holeMode = false;
this.currentPath = null;
return this;
}
/**
* True if graphics consists of one rectangle, and thus, can be drawn like a Sprite and
* masked with gl.scissor.
*
* @returns {boolean} True if only 1 rect.
*/
public isFastRect(): boolean
{
const data = this._geometry.graphicsData;
return data.length === 1
&& data[0].shape.type === SHAPES.RECT
&& !data[0].matrix
&& !data[0].holes.length
&& !(data[0].lineStyle.visible && data[0].lineStyle.width);
}
/**
* Renders the object using the WebGL renderer
*
* @param {PIXI.Renderer} renderer - The renderer
*/
protected _render(renderer: Renderer): void
{
this.finishPoly();
const geometry = this._geometry;
const hasuint32 = renderer.context.supports.uint32Indices;
// batch part..
// batch it!
geometry.updateBatches(hasuint32);
if (geometry.batchable)
{
if (this.batchDirty !== geometry.batchDirty)
{
this._populateBatches();
}
this._renderBatched(renderer);
}
else
{
// no batching...
renderer.batch.flush();
this._renderDirect(renderer);
}
}
/** Populating batches for rendering. */
protected _populateBatches(): void
{
const geometry = this._geometry;
const blendMode = this.blendMode;
const len = geometry.batches.length;
this.batchTint = -1;
this._transformID = -1;
this.batchDirty = geometry.batchDirty;
this.batches.length = len;
this.vertexData = new Float32Array(geometry.points);
for (let i = 0; i < len; i++)
{
const gI = geometry.batches[i];
const color = gI.style.color;
const vertexData = new Float32Array(this.vertexData.buffer,
gI.attribStart * 4 * 2,
gI.attribSize * 2);
const uvs = new Float32Array(geometry.uvsFloat32.buffer,
gI.attribStart * 4 * 2,
gI.attribSize * 2);
const indices = new Uint16Array(geometry.indicesUint16.buffer,
gI.start * 2,
gI.size);
const batch = {
vertexData,
blendMode,
indices,
uvs,
_batchRGB: hex2rgb(color) as Array<number>,
_tintRGB: color,
_texture: gI.style.texture,
alpha: gI.style.alpha,
worldAlpha: 1 };
this.batches[i] = batch;
}
}
/**
* Renders the batches using the BathedRenderer plugin
*
* @param {PIXI.Renderer} renderer - The renderer
*/
protected _renderBatched(renderer: Renderer): void
{
if (!this.batches.length)
{
return;
}
renderer.batch.setObjectRenderer(renderer.plugins[this.pluginName]);
this.calculateVertices();
this.calculateTints();
for (let i = 0, l = this.batches.length; i < l; i++)
{
const batch = this.batches[i];
batch.worldAlpha = this.worldAlpha * batch.alpha;
renderer.plugins[this.pluginName].render(batch);
}
}
/**
* Renders the graphics direct
*
* @param {PIXI.Renderer} renderer - The renderer
*/
protected _renderDirect(renderer: Renderer): void
{
const shader = this._resolveDirectShader(renderer);
const geometry = this._geometry;
const tint = this.tint;
const worldAlpha = this.worldAlpha;
const uniforms = shader.uniforms;
const drawCalls = geometry.drawCalls;
// lets set the transfomr
uniforms.translationMatrix = this.transform.worldTransform;
// and then lets set the tint..
uniforms.tint[0] = (((tint >> 16) & 0xFF) / 255) * worldAlpha;
uniforms.tint[1] = (((tint >> 8) & 0xFF) / 255) * worldAlpha;
uniforms.tint[2] = ((tint & 0xFF) / 255) * worldAlpha;
uniforms.tint[3] = worldAlpha;
// the first draw call, we can set the uniforms of the shader directly here.
// this means that we can tack advantage of the sync function of pixi!
// bind and sync uniforms..
// there is a way to optimise this..
renderer.shader.bind(shader);
renderer.geometry.bind(geometry, shader);
// set state..
renderer.state.set(this.state);
// then render the rest of them...
for (let i = 0, l = drawCalls.length; i < l; i++)
{
this._renderDrawCallDirect(renderer, geometry.drawCalls[i]);
}
}
/**
* Renders specific DrawCall
*
* @param {PIXI.Renderer} renderer
* @param {PIXI.BatchDrawCall} drawCall
*/
protected _renderDrawCallDirect(renderer: Renderer, drawCall: BatchDrawCall): void
{
const { texArray, type, size, start } = drawCall;
const groupTextureCount = texArray.count;
for (let j = 0; j < groupTextureCount; j++)
{
renderer.texture.bind(texArray.elements[j], j);
}
renderer.geometry.draw(type, size, start);
}
/**
* Resolves shader for direct rendering
*
* @param {PIXI.Renderer} renderer - The renderer
*/
protected _resolveDirectShader(renderer: Renderer): Shader
{
let shader = this.shader;
const pluginName = this.pluginName;
if (!shader)
{
// if there is no shader here, we can use the default shader.
// and that only gets created if we actually need it..
// but may be more than one plugins for graphics
if (!DEFAULT_SHADERS[pluginName])
{
const MAX_TEXTURES = renderer.plugins.batch.MAX_TEXTURES;
const sampleValues = new Int32Array(MAX_TEXTURES);
for (let i = 0; i < MAX_TEXTURES; i++)
{
sampleValues[i] = i;
}
const uniforms = {
tint: new Float32Array([1, 1, 1, 1]),
translationMatrix: new Matrix(),
default: UniformGroup.from({ uSamplers: sampleValues }, true),
};
const program = renderer.plugins[pluginName]._shader.program;
DEFAULT_SHADERS[pluginName] = new Shader(program, uniforms);
}
shader = DEFAULT_SHADERS[pluginName];
}
return shader;
}
/** Retrieves the bounds of the graphic shape as a rectangle object. */
protected _calculateBounds(): void
{
this.finishPoly();
const geometry = this._geometry;
// skipping when graphics is empty, like a container
if (!geometry.graphicsData.length)
{
return;
}
const { minX, minY, maxX, maxY } = geometry.bounds;
this._bounds.addFrame(this.transform, minX, minY, maxX, maxY);
}
/**
* Tests if a point is inside this graphics object
*
* @param {PIXI.IPointData} point - the point to test
* @return {boolean} the result of the test
*/
public containsPoint(point: IPointData): boolean
{
this.worldTransform.applyInverse(point, Graphics._TEMP_POINT);
return this._geometry.containsPoint(Graphics._TEMP_POINT);
}
/** Recalculate the tint by applying tint to batches using Graphics tint. */
protected calculateTints(): void
{
if (this.batchTint !== this.tint)
{
this.batchTint = this.tint;
const tintRGB = hex2rgb(this.tint, temp);
for (let i = 0; i < this.batches.length; i++)
{
const batch = this.batches[i];
const batchTint = batch._batchRGB;
const r = (tintRGB[0] * batchTint[0]) * 255;
const g = (tintRGB[1] * batchTint[1]) * 255;
const b = (tintRGB[2] * batchTint[2]) * 255;
// TODO Ivan, can this be done in one go?
const color = (r << 16) + (g << 8) + (b | 0);
batch._tintRGB = (color >> 16)
+ (color & 0xff00)
+ ((color & 0xff) << 16);
}
}
}
/**
* If there's a transform update or a change to the shape of the
* geometry, recalculate the vertices.
*/
protected calculateVertices(): void
{
const wtID = this.transform._worldID;
if (this._transformID === wtID)
{
return;
}
this._transformID = wtID;
const wt = this.transform.worldTransform;
const a = wt.a;
const b = wt.b;
const c = wt.c;
const d = wt.d;
const tx = wt.tx;
const ty = wt.ty;
const data = this._geometry.points;// batch.vertexDataOriginal;
const vertexData = this.vertexData;
let count = 0;
for (let i = 0; i < data.length; i += 2)
{
const x = data[i];
const y = data[i + 1];
vertexData[count++] = (a * x) + (c * y) + tx;
vertexData[count++] = (d * y) + (b * x) + ty;
}
}
/**
* Closes the current path.
*
* @return {PIXI.Graphics} Returns itself.
*/
public closePath(): this
{
const currentPath = this.currentPath;
if (currentPath)
{
// we don't need to add extra point in the end because buildLine will take care of that
currentPath.closeStroke = true;
// ensure that the polygon is completed, and is available for hit detection
// (even if the graphics is not rendered yet)
this.finishPoly();
}
return this;
}
/**
* Apply a matrix to the positional data.
*
* @param {PIXI.Matrix} matrix - Matrix to use for transform current shape.
* @return {PIXI.Graphics} Returns itself.
*/
public setMatrix(matrix: Matrix): this
{
this._matrix = matrix;
return this;
}
/**
* Begin adding holes to the last draw shape
* IMPORTANT: holes must be fully inside a shape to work
* Also weirdness ensues if holes overlap!
* Ellipses, Circles, Rectangles and Rounded Rectangles cannot be holes or host for holes in CanvasRenderer,
* please use `moveTo` `lineTo`, `quadraticCurveTo` if you rely on pixi-legacy bundle.
* @return {PIXI.Graphics} Returns itself.
*/
public beginHole(): this
{
this.finishPoly();
this._holeMode = true;
return this;
}
/**
* End adding holes to the last draw shape
* @return {PIXI.Graphics} Returns itself.
*/
public endHole(): this
{
this.finishPoly();
this._holeMode = false;
return this;
}
/**
* Destroys the Graphics object.
*
* @param {object|boolean} [options] - Options parameter. A boolean will act as if all
* options have been set to that value
* @param {boolean} [options.children=false] - if set to true, all the children will have
* their destroy method called as well. 'options' will be passed on to those calls.
* @param {boolean} [options.texture=false] - Only used for child Sprites if options.children is set to true
* Should it destroy the texture of the child sprite
* @param {boolean} [options.baseTexture=false] - Only used for child Sprites if options.children is set to true
* Should it destroy the base texture of the child sprite
*/
public destroy(options?: IDestroyOptions|boolean): void
{
this._geometry.refCount--;
if (this._geometry.refCount === 0)
{
this._geometry.dispose();
}
this._matrix = null;
this.currentPath = null;
this._lineStyle.destroy();
this._lineStyle = null;
this._fillStyle.destroy();
this._fillStyle = null;
this._geometry = null;
this.shader = null;
this.vertexData = null;
this.batches.length = 0;
this.batches = null;
super.destroy(options);
}
} | the_stack |
import { distinct } from 'vs/base/common/arrays';
import { IStringDictionary } from 'vs/base/common/collections';
import { Emitter, Event } from 'vs/base/common/event';
import { IJSONSchema } from 'vs/base/common/jsonSchema';
import * as types from 'vs/base/common/types';
import * as nls from 'vs/nls';
import { Extensions as JSONExtensions, IJSONContributionRegistry } from 'vs/platform/jsonschemas/common/jsonContributionRegistry';
import { Registry } from 'vs/platform/registry/common/platform';
export enum EditPresentationTypes {
Multiline = 'multilineText',
Singleline = 'singlelineText'
}
export const Extensions = {
Configuration: 'base.contributions.configuration'
};
export interface IConfigurationRegistry {
/**
* Register a configuration to the registry.
*/
registerConfiguration(configuration: IConfigurationNode): void;
/**
* Register multiple configurations to the registry.
*/
registerConfigurations(configurations: IConfigurationNode[], validate?: boolean): void;
/**
* Deregister multiple configurations from the registry.
*/
deregisterConfigurations(configurations: IConfigurationNode[]): void;
/**
* update the configuration registry by
* - registering the configurations to add
* - dereigstering the configurations to remove
*/
updateConfigurations(configurations: { add: IConfigurationNode[], remove: IConfigurationNode[] }): void;
/**
* Register multiple default configurations to the registry.
*/
registerDefaultConfigurations(defaultConfigurations: IStringDictionary<any>[]): void;
/**
* Deregister multiple default configurations from the registry.
*/
deregisterDefaultConfigurations(defaultConfigurations: IStringDictionary<any>[]): void;
/**
* Signal that the schema of a configuration setting has changes. It is currently only supported to change enumeration values.
* Property or default value changes are not allowed.
*/
notifyConfigurationSchemaUpdated(...configurations: IConfigurationNode[]): void;
/**
* Event that fires whenver a configuration has been
* registered.
*/
onDidSchemaChange: Event<void>;
/**
* Event that fires whenver a configuration has been
* registered.
*/
onDidUpdateConfiguration: Event<string[]>;
/**
* Returns all configuration nodes contributed to this registry.
*/
getConfigurations(): IConfigurationNode[];
/**
* Returns all configurations settings of all configuration nodes contributed to this registry.
*/
getConfigurationProperties(): { [qualifiedKey: string]: IConfigurationPropertySchema };
/**
* Returns all excluded configurations settings of all configuration nodes contributed to this registry.
*/
getExcludedConfigurationProperties(): { [qualifiedKey: string]: IConfigurationPropertySchema };
/**
* Register the identifiers for editor configurations
*/
registerOverrideIdentifiers(identifiers: string[]): void;
}
export const enum ConfigurationScope {
/**
* Application specific configuration, which can be configured only in local user settings.
*/
APPLICATION = 1,
/**
* Machine specific configuration, which can be configured only in local and remote user settings.
*/
MACHINE,
/**
* Window specific configuration, which can be configured in the user or workspace settings.
*/
WINDOW,
/**
* Resource specific configuration, which can be configured in the user, workspace or folder settings.
*/
RESOURCE,
/**
* Resource specific configuration that can be configured in language specific settings
*/
LANGUAGE_OVERRIDABLE,
/**
* Machine specific configuration that can also be configured in workspace or folder settings.
*/
MACHINE_OVERRIDABLE,
}
export interface IConfigurationPropertySchema extends IJSONSchema {
scope?: ConfigurationScope;
/**
* When restricted, value of this configuration will be read only from trusted sources.
* For eg., If the workspace is not trusted, then the value of this configuration is not read from workspace settings file.
*/
restricted?: boolean;
included?: boolean;
tags?: string[];
/**
* When enabled this setting is ignored during sync and user can override this.
*/
ignoreSync?: boolean;
/**
* When enabled this setting is ignored during sync and user cannot override this.
*/
disallowSyncIgnore?: boolean;
enumItemLabels?: string[];
/**
* When specified, controls the presentation format of string settings.
* Otherwise, the presentation format defaults to `singleline`.
*/
editPresentation?: EditPresentationTypes;
}
export interface IConfigurationExtensionInfo {
id: string;
restrictedConfigurations?: string[];
}
export interface IConfigurationNode {
id?: string;
order?: number;
type?: string | string[];
title?: string;
description?: string;
properties?: { [path: string]: IConfigurationPropertySchema; };
allOf?: IConfigurationNode[];
scope?: ConfigurationScope;
extensionInfo?: IConfigurationExtensionInfo;
}
export const allSettings: { properties: IStringDictionary<IConfigurationPropertySchema>, patternProperties: IStringDictionary<IConfigurationPropertySchema> } = { properties: {}, patternProperties: {} };
export const applicationSettings: { properties: IStringDictionary<IConfigurationPropertySchema>, patternProperties: IStringDictionary<IConfigurationPropertySchema> } = { properties: {}, patternProperties: {} };
export const machineSettings: { properties: IStringDictionary<IConfigurationPropertySchema>, patternProperties: IStringDictionary<IConfigurationPropertySchema> } = { properties: {}, patternProperties: {} };
export const machineOverridableSettings: { properties: IStringDictionary<IConfigurationPropertySchema>, patternProperties: IStringDictionary<IConfigurationPropertySchema> } = { properties: {}, patternProperties: {} };
export const windowSettings: { properties: IStringDictionary<IConfigurationPropertySchema>, patternProperties: IStringDictionary<IConfigurationPropertySchema> } = { properties: {}, patternProperties: {} };
export const resourceSettings: { properties: IStringDictionary<IConfigurationPropertySchema>, patternProperties: IStringDictionary<IConfigurationPropertySchema> } = { properties: {}, patternProperties: {} };
export const resourceLanguageSettingsSchemaId = 'vscode://schemas/settings/resourceLanguage';
const contributionRegistry = Registry.as<IJSONContributionRegistry>(JSONExtensions.JSONContribution);
class ConfigurationRegistry implements IConfigurationRegistry {
private readonly defaultValues: IStringDictionary<any>;
private readonly defaultLanguageConfigurationOverridesNode: IConfigurationNode;
private readonly configurationContributors: IConfigurationNode[];
private readonly configurationProperties: { [qualifiedKey: string]: IJSONSchema };
private readonly excludedConfigurationProperties: { [qualifiedKey: string]: IJSONSchema };
private readonly resourceLanguageSettingsSchema: IJSONSchema;
private readonly overrideIdentifiers = new Set<string>();
private readonly _onDidSchemaChange = new Emitter<void>();
readonly onDidSchemaChange: Event<void> = this._onDidSchemaChange.event;
private readonly _onDidUpdateConfiguration: Emitter<string[]> = new Emitter<string[]>();
readonly onDidUpdateConfiguration: Event<string[]> = this._onDidUpdateConfiguration.event;
constructor() {
this.defaultValues = {};
this.defaultLanguageConfigurationOverridesNode = {
id: 'defaultOverrides',
title: nls.localize('defaultLanguageConfigurationOverrides.title', "Default Language Configuration Overrides"),
properties: {}
};
this.configurationContributors = [this.defaultLanguageConfigurationOverridesNode];
this.resourceLanguageSettingsSchema = { properties: {}, patternProperties: {}, additionalProperties: false, errorMessage: 'Unknown editor configuration setting', allowTrailingCommas: true, allowComments: true };
this.configurationProperties = {};
this.excludedConfigurationProperties = {};
contributionRegistry.registerSchema(resourceLanguageSettingsSchemaId, this.resourceLanguageSettingsSchema);
}
public registerConfiguration(configuration: IConfigurationNode, validate: boolean = true): void {
this.registerConfigurations([configuration], validate);
}
public registerConfigurations(configurations: IConfigurationNode[], validate: boolean = true): void {
const properties = this.doRegisterConfigurations(configurations, validate);
contributionRegistry.registerSchema(resourceLanguageSettingsSchemaId, this.resourceLanguageSettingsSchema);
this._onDidSchemaChange.fire();
this._onDidUpdateConfiguration.fire(properties);
}
public deregisterConfigurations(configurations: IConfigurationNode[]): void {
const properties = this.doDeregisterConfigurations(configurations);
contributionRegistry.registerSchema(resourceLanguageSettingsSchemaId, this.resourceLanguageSettingsSchema);
this._onDidSchemaChange.fire();
this._onDidUpdateConfiguration.fire(properties);
}
public updateConfigurations({ add, remove }: { add: IConfigurationNode[], remove: IConfigurationNode[] }): void {
const properties = [];
properties.push(...this.doDeregisterConfigurations(remove));
properties.push(...this.doRegisterConfigurations(add, false));
contributionRegistry.registerSchema(resourceLanguageSettingsSchemaId, this.resourceLanguageSettingsSchema);
this._onDidSchemaChange.fire();
this._onDidUpdateConfiguration.fire(distinct(properties));
}
public registerDefaultConfigurations(defaultConfigurations: IStringDictionary<any>[]): void {
const properties: string[] = [];
const overrideIdentifiers: string[] = [];
for (const defaultConfiguration of defaultConfigurations) {
for (const key in defaultConfiguration) {
properties.push(key);
if (OVERRIDE_PROPERTY_PATTERN.test(key)) {
this.defaultValues[key] = { ...(this.defaultValues[key] || {}), ...defaultConfiguration[key] };
const property: IConfigurationPropertySchema = {
type: 'object',
default: this.defaultValues[key],
description: nls.localize('defaultLanguageConfiguration.description', "Configure settings to be overridden for {0} language.", key),
$ref: resourceLanguageSettingsSchemaId
};
overrideIdentifiers.push(overrideIdentifierFromKey(key));
this.configurationProperties[key] = property;
this.defaultLanguageConfigurationOverridesNode.properties![key] = property;
} else {
this.defaultValues[key] = defaultConfiguration[key];
const property = this.configurationProperties[key];
if (property) {
this.updatePropertyDefaultValue(key, property);
this.updateSchema(key, property);
}
}
}
}
this.registerOverrideIdentifiers(overrideIdentifiers);
this._onDidSchemaChange.fire();
this._onDidUpdateConfiguration.fire(properties);
}
public deregisterDefaultConfigurations(defaultConfigurations: IStringDictionary<any>[]): void {
const properties: string[] = [];
for (const defaultConfiguration of defaultConfigurations) {
for (const key in defaultConfiguration) {
properties.push(key);
delete this.defaultValues[key];
if (OVERRIDE_PROPERTY_PATTERN.test(key)) {
delete this.configurationProperties[key];
delete this.defaultLanguageConfigurationOverridesNode.properties![key];
} else {
const property = this.configurationProperties[key];
if (property) {
this.updatePropertyDefaultValue(key, property);
this.updateSchema(key, property);
}
}
}
}
this.updateOverridePropertyPatternKey();
this._onDidSchemaChange.fire();
this._onDidUpdateConfiguration.fire(properties);
}
public notifyConfigurationSchemaUpdated(...configurations: IConfigurationNode[]) {
this._onDidSchemaChange.fire();
}
public registerOverrideIdentifiers(overrideIdentifiers: string[]): void {
for (const overrideIdentifier of overrideIdentifiers) {
this.overrideIdentifiers.add(overrideIdentifier);
}
this.updateOverridePropertyPatternKey();
}
private doRegisterConfigurations(configurations: IConfigurationNode[], validate: boolean): string[] {
const properties: string[] = [];
configurations.forEach(configuration => {
properties.push(...this.validateAndRegisterProperties(configuration, validate, configuration.extensionInfo)); // fills in defaults
this.configurationContributors.push(configuration);
this.registerJSONConfiguration(configuration);
});
return properties;
}
private doDeregisterConfigurations(configurations: IConfigurationNode[]): string[] {
const properties: string[] = [];
const deregisterConfiguration = (configuration: IConfigurationNode) => {
if (configuration.properties) {
for (const key in configuration.properties) {
properties.push(key);
delete this.configurationProperties[key];
this.removeFromSchema(key, configuration.properties[key]);
}
}
if (configuration.allOf) {
configuration.allOf.forEach(node => deregisterConfiguration(node));
}
};
for (const configuration of configurations) {
deregisterConfiguration(configuration);
const index = this.configurationContributors.indexOf(configuration);
if (index !== -1) {
this.configurationContributors.splice(index, 1);
}
}
return properties;
}
private validateAndRegisterProperties(configuration: IConfigurationNode, validate: boolean = true, extensionInfo?: IConfigurationExtensionInfo, scope: ConfigurationScope = ConfigurationScope.WINDOW): string[] {
scope = types.isUndefinedOrNull(configuration.scope) ? scope : configuration.scope;
let propertyKeys: string[] = [];
let properties = configuration.properties;
if (properties) {
for (let key in properties) {
if (validate && validateProperty(key)) {
delete properties[key];
continue;
}
const property = properties[key];
// update default value
this.updatePropertyDefaultValue(key, property);
// update scope
if (OVERRIDE_PROPERTY_PATTERN.test(key)) {
property.scope = undefined; // No scope for overridable properties `[${identifier}]`
} else {
property.scope = types.isUndefinedOrNull(property.scope) ? scope : property.scope;
property.restricted = types.isUndefinedOrNull(property.restricted) ? !!extensionInfo?.restrictedConfigurations?.includes(key) : property.restricted;
}
// Add to properties maps
// Property is included by default if 'included' is unspecified
if (properties[key].hasOwnProperty('included') && !properties[key].included) {
this.excludedConfigurationProperties[key] = properties[key];
delete properties[key];
continue;
} else {
this.configurationProperties[key] = properties[key];
}
if (!properties[key].deprecationMessage && properties[key].markdownDeprecationMessage) {
// If not set, default deprecationMessage to the markdown source
properties[key].deprecationMessage = properties[key].markdownDeprecationMessage;
}
propertyKeys.push(key);
}
}
let subNodes = configuration.allOf;
if (subNodes) {
for (let node of subNodes) {
propertyKeys.push(...this.validateAndRegisterProperties(node, validate, extensionInfo, scope));
}
}
return propertyKeys;
}
getConfigurations(): IConfigurationNode[] {
return this.configurationContributors;
}
getConfigurationProperties(): { [qualifiedKey: string]: IConfigurationPropertySchema } {
return this.configurationProperties;
}
getExcludedConfigurationProperties(): { [qualifiedKey: string]: IConfigurationPropertySchema } {
return this.excludedConfigurationProperties;
}
private registerJSONConfiguration(configuration: IConfigurationNode) {
const register = (configuration: IConfigurationNode) => {
let properties = configuration.properties;
if (properties) {
for (const key in properties) {
this.updateSchema(key, properties[key]);
}
}
let subNodes = configuration.allOf;
if (subNodes) {
subNodes.forEach(register);
}
};
register(configuration);
}
private updateSchema(key: string, property: IConfigurationPropertySchema): void {
allSettings.properties[key] = property;
switch (property.scope) {
case ConfigurationScope.APPLICATION:
applicationSettings.properties[key] = property;
break;
case ConfigurationScope.MACHINE:
machineSettings.properties[key] = property;
break;
case ConfigurationScope.MACHINE_OVERRIDABLE:
machineOverridableSettings.properties[key] = property;
break;
case ConfigurationScope.WINDOW:
windowSettings.properties[key] = property;
break;
case ConfigurationScope.RESOURCE:
resourceSettings.properties[key] = property;
break;
case ConfigurationScope.LANGUAGE_OVERRIDABLE:
resourceSettings.properties[key] = property;
this.resourceLanguageSettingsSchema.properties![key] = property;
break;
}
}
private removeFromSchema(key: string, property: IConfigurationPropertySchema): void {
delete allSettings.properties[key];
switch (property.scope) {
case ConfigurationScope.APPLICATION:
delete applicationSettings.properties[key];
break;
case ConfigurationScope.MACHINE:
delete machineSettings.properties[key];
break;
case ConfigurationScope.MACHINE_OVERRIDABLE:
delete machineOverridableSettings.properties[key];
break;
case ConfigurationScope.WINDOW:
delete windowSettings.properties[key];
break;
case ConfigurationScope.RESOURCE:
case ConfigurationScope.LANGUAGE_OVERRIDABLE:
delete resourceSettings.properties[key];
break;
}
}
private updateOverridePropertyPatternKey(): void {
for (const overrideIdentifier of this.overrideIdentifiers.values()) {
const overrideIdentifierProperty = `[${overrideIdentifier}]`;
const resourceLanguagePropertiesSchema: IJSONSchema = {
type: 'object',
description: nls.localize('overrideSettings.defaultDescription', "Configure editor settings to be overridden for a language."),
errorMessage: nls.localize('overrideSettings.errorMessage', "This setting does not support per-language configuration."),
$ref: resourceLanguageSettingsSchemaId,
};
this.updatePropertyDefaultValue(overrideIdentifierProperty, resourceLanguagePropertiesSchema);
allSettings.properties[overrideIdentifierProperty] = resourceLanguagePropertiesSchema;
applicationSettings.properties[overrideIdentifierProperty] = resourceLanguagePropertiesSchema;
machineSettings.properties[overrideIdentifierProperty] = resourceLanguagePropertiesSchema;
machineOverridableSettings.properties[overrideIdentifierProperty] = resourceLanguagePropertiesSchema;
windowSettings.properties[overrideIdentifierProperty] = resourceLanguagePropertiesSchema;
resourceSettings.properties[overrideIdentifierProperty] = resourceLanguagePropertiesSchema;
}
this._onDidSchemaChange.fire();
}
private updatePropertyDefaultValue(key: string, property: IConfigurationPropertySchema): void {
let defaultValue = this.defaultValues[key];
if (types.isUndefined(defaultValue)) {
defaultValue = property.default;
}
if (types.isUndefined(defaultValue)) {
defaultValue = getDefaultValue(property.type);
}
property.default = defaultValue;
}
}
const OVERRIDE_PROPERTY = '\\[.*\\]$';
export const OVERRIDE_PROPERTY_PATTERN = new RegExp(OVERRIDE_PROPERTY);
export function overrideIdentifierFromKey(key: string): string {
return key.substring(1, key.length - 1);
}
export function getDefaultValue(type: string | string[] | undefined): any {
const t = Array.isArray(type) ? (<string[]>type)[0] : <string>type;
switch (t) {
case 'boolean':
return false;
case 'integer':
case 'number':
return 0;
case 'string':
return '';
case 'array':
return [];
case 'object':
return {};
default:
return null;
}
}
const configurationRegistry = new ConfigurationRegistry();
Registry.add(Extensions.Configuration, configurationRegistry);
export function validateProperty(property: string): string | null {
if (!property.trim()) {
return nls.localize('config.property.empty', "Cannot register an empty property");
}
if (OVERRIDE_PROPERTY_PATTERN.test(property)) {
return nls.localize('config.property.languageDefault', "Cannot register '{0}'. This matches property pattern '\\\\[.*\\\\]$' for describing language specific editor settings. Use 'configurationDefaults' contribution.", property);
}
if (configurationRegistry.getConfigurationProperties()[property] !== undefined) {
return nls.localize('config.property.duplicate', "Cannot register '{0}'. This property is already registered.", property);
}
return null;
}
export function getScopes(): [string, ConfigurationScope | undefined][] {
const scopes: [string, ConfigurationScope | undefined][] = [];
const configurationProperties = configurationRegistry.getConfigurationProperties();
for (const key of Object.keys(configurationProperties)) {
scopes.push([key, configurationProperties[key].scope]);
}
scopes.push(['launch', ConfigurationScope.RESOURCE]);
scopes.push(['task', ConfigurationScope.RESOURCE]);
return scopes;
} | the_stack |
import { mdiDrag, mdiNotificationClearAll, mdiPlus, mdiSort } from "@mdi/js";
import { UnsubscribeFunc } from "home-assistant-js-websocket";
import {
css,
CSSResultGroup,
html,
LitElement,
PropertyValues,
TemplateResult,
} from "lit";
import { customElement, property, state, query } from "lit/decorators";
import { classMap } from "lit/directives/class-map";
import { guard } from "lit/directives/guard";
import { repeat } from "lit/directives/repeat";
import { applyThemesOnElement } from "../../../common/dom/apply_themes_on_element";
import "../../../components/ha-card";
import "../../../components/ha-svg-icon";
import "../../../components/ha-checkbox";
import "../../../components/ha-textfield";
import {
addItem,
clearItems,
fetchItems,
reorderItems,
ShoppingListItem,
updateItem,
} from "../../../data/shopping-list";
import { SubscribeMixin } from "../../../mixins/subscribe-mixin";
import { HomeAssistant } from "../../../types";
import { LovelaceCard, LovelaceCardEditor } from "../types";
import { SensorCardConfig, ShoppingListCardConfig } from "./types";
import type { HaTextField } from "../../../components/ha-textfield";
let Sortable;
@customElement("hui-shopping-list-card")
class HuiShoppingListCard
extends SubscribeMixin(LitElement)
implements LovelaceCard
{
public static async getConfigElement(): Promise<LovelaceCardEditor> {
await import("../editor/config-elements/hui-shopping-list-editor");
return document.createElement("hui-shopping-list-card-editor");
}
public static getStubConfig(): ShoppingListCardConfig {
return { type: "shopping-list" };
}
@property({ attribute: false }) public hass?: HomeAssistant;
@state() private _config?: ShoppingListCardConfig;
@state() private _uncheckedItems?: ShoppingListItem[];
@state() private _checkedItems?: ShoppingListItem[];
@state() private _reordering = false;
@state() private _renderEmptySortable = false;
private _sortable?;
@query("#sortable") private _sortableEl?: HTMLElement;
public getCardSize(): number {
return (this._config ? (this._config.title ? 2 : 0) : 0) + 3;
}
public setConfig(config: ShoppingListCardConfig): void {
this._config = config;
this._uncheckedItems = [];
this._checkedItems = [];
}
public hassSubscribe(): Promise<UnsubscribeFunc>[] {
this._fetchData();
return [
this.hass!.connection.subscribeEvents(
() => this._fetchData(),
"shopping_list_updated"
),
];
}
protected updated(changedProps: PropertyValues): void {
super.updated(changedProps);
if (!this._config || !this.hass) {
return;
}
const oldHass = changedProps.get("hass") as HomeAssistant | undefined;
const oldConfig = changedProps.get("_config") as
| SensorCardConfig
| undefined;
if (
(changedProps.has("hass") && oldHass?.themes !== this.hass.themes) ||
(changedProps.has("_config") && oldConfig?.theme !== this._config.theme)
) {
applyThemesOnElement(this, this.hass.themes, this._config.theme);
}
}
protected render(): TemplateResult {
if (!this._config || !this.hass) {
return html``;
}
return html`
<ha-card
.header=${this._config.title}
class=${classMap({
"has-header": "title" in this._config,
})}
>
<div class="addRow">
<ha-svg-icon
class="addButton"
.path=${mdiPlus}
.title=${this.hass!.localize(
"ui.panel.lovelace.cards.shopping-list.add_item"
)}
@click=${this._addItem}
>
</ha-svg-icon>
<ha-textfield
class="addBox"
.placeholder=${this.hass!.localize(
"ui.panel.lovelace.cards.shopping-list.add_item"
)}
@keydown=${this._addKeyPress}
></ha-textfield>
<ha-svg-icon
class="reorderButton"
.path=${mdiSort}
.title=${this.hass!.localize(
"ui.panel.lovelace.cards.shopping-list.reorder_items"
)}
@click=${this._toggleReorder}
>
</ha-svg-icon>
</div>
${this._reordering
? html`
<div id="sortable">
${guard([this._uncheckedItems, this._renderEmptySortable], () =>
this._renderEmptySortable
? ""
: this._renderItems(this._uncheckedItems!)
)}
</div>
`
: this._renderItems(this._uncheckedItems!)}
${this._checkedItems!.length > 0
? html`
<div class="divider"></div>
<div class="checked">
<span>
${this.hass!.localize(
"ui.panel.lovelace.cards.shopping-list.checked_items"
)}
</span>
<ha-svg-icon
class="clearall"
tabindex="0"
.path=${mdiNotificationClearAll}
.title=${this.hass!.localize(
"ui.panel.lovelace.cards.shopping-list.clear_items"
)}
@click=${this._clearItems}
>
</ha-svg-icon>
</div>
${repeat(
this._checkedItems!,
(item) => item.id,
(item) =>
html`
<div class="editRow">
<ha-checkbox
tabindex="0"
.checked=${item.complete}
.itemId=${item.id}
@change=${this._completeItem}
></ha-checkbox>
<ha-textfield
class="item"
.value=${item.name}
.itemId=${item.id}
@change=${this._saveEdit}
></ha-textfield>
</div>
`
)}
`
: ""}
</ha-card>
`;
}
private _renderItems(items: ShoppingListItem[]) {
return html`
${repeat(
items,
(item) => item.id,
(item) =>
html`
<div class="editRow" item-id=${item.id}>
<ha-checkbox
tabindex="0"
.checked=${item.complete}
.itemId=${item.id}
@change=${this._completeItem}
></ha-checkbox>
<ha-textfield
class="item"
.value=${item.name}
.itemId=${item.id}
@change=${this._saveEdit}
></ha-textfield>
${this._reordering
? html`
<ha-svg-icon
.title=${this.hass!.localize(
"ui.panel.lovelace.cards.shopping-list.drag_and_drop"
)}
class="reorderButton"
.path=${mdiDrag}
>
</ha-svg-icon>
`
: ""}
</div>
`
)}
`;
}
private async _fetchData(): Promise<void> {
if (!this.hass) {
return;
}
const checkedItems: ShoppingListItem[] = [];
const uncheckedItems: ShoppingListItem[] = [];
const items = await fetchItems(this.hass);
for (const key in items) {
if (items[key].complete) {
checkedItems.push(items[key]);
} else {
uncheckedItems.push(items[key]);
}
}
this._checkedItems = checkedItems;
this._uncheckedItems = uncheckedItems;
}
private _completeItem(ev): void {
updateItem(this.hass!, ev.target.itemId, {
complete: ev.target.checked,
}).catch(() => this._fetchData());
}
private _saveEdit(ev): void {
updateItem(this.hass!, ev.target.itemId, {
name: ev.target.value,
}).catch(() => this._fetchData());
ev.target.blur();
}
private _clearItems(): void {
if (this.hass) {
clearItems(this.hass).catch(() => this._fetchData());
}
}
private get _newItem(): HaTextField {
return this.shadowRoot!.querySelector(".addBox") as HaTextField;
}
private _addItem(ev): void {
const newItem = this._newItem;
if (newItem.value!.length > 0) {
addItem(this.hass!, newItem.value!).catch(() => this._fetchData());
}
newItem.value = "";
if (ev) {
newItem.focus();
}
}
private _addKeyPress(ev): void {
if (ev.keyCode === 13) {
this._addItem(null);
}
}
private async _toggleReorder() {
if (!Sortable) {
const sortableImport = await import(
"sortablejs/modular/sortable.core.esm"
);
Sortable = sortableImport.Sortable;
}
this._reordering = !this._reordering;
await this.updateComplete;
if (this._reordering) {
this._createSortable();
} else {
this._sortable?.destroy();
this._sortable = undefined;
}
}
private _createSortable() {
const sortableEl = this._sortableEl;
this._sortable = new Sortable(sortableEl, {
animation: 150,
fallbackClass: "sortable-fallback",
dataIdAttr: "item-id",
handle: "ha-svg-icon",
onEnd: async (evt) => {
// Since this is `onEnd` event, it's possible that
// an item wa dragged away and was put back to its original position.
if (evt.oldIndex !== evt.newIndex) {
reorderItems(this.hass!, this._sortable.toArray()).catch(() =>
this._fetchData()
);
// Move the shopping list item in memory.
this._uncheckedItems!.splice(
evt.newIndex,
0,
this._uncheckedItems!.splice(evt.oldIndex, 1)[0]
);
}
this._renderEmptySortable = true;
await this.updateComplete;
while (sortableEl?.lastElementChild) {
sortableEl.removeChild(sortableEl.lastElementChild);
}
this._renderEmptySortable = false;
},
});
}
static get styles(): CSSResultGroup {
return css`
ha-card {
padding: 16px;
height: 100%;
box-sizing: border-box;
}
.has-header {
padding-top: 0;
}
.editRow,
.addRow,
.checked {
display: flex;
flex-direction: row;
align-items: center;
}
.item {
margin-top: 8px;
}
.addButton {
padding-right: 16px;
padding-inline-end: 16px;
cursor: pointer;
direction: var(--direction);
}
.reorderButton {
padding-left: 16px;
padding-inline-start: 16px;
cursor: pointer;
direction: var(--direction);
}
ha-checkbox {
margin-left: -12px;
margin-inline-start: -12px;
direction: var(--direction);
}
ha-textfield {
flex-grow: 1;
}
.checked {
margin: 12px 0;
justify-content: space-between;
}
.checked span {
color: var(--primary-text-color);
font-weight: 500;
}
.divider {
height: 1px;
background-color: var(--divider-color);
margin: 10px 0;
}
.clearall {
cursor: pointer;
}
`;
}
}
declare global {
interface HTMLElementTagNameMap {
"hui-shopping-list-card": HuiShoppingListCard;
}
} | the_stack |
import { OAuth2AuthenticateOptions } from "./definitions";
// import sha256 from "fast-sha256";
export class WebUtils {
/**
* Public only for testing
*/
static getAppId(options: OAuth2AuthenticateOptions): string {
return this.getOverwritableValue(options, "appId");
}
static getOverwritableValue<T>(options: OAuth2AuthenticateOptions | any, key: string): T {
let base = options[key];
if (options.web && key in options.web) {
base = options.web[key];
}
return base;
}
/**
* Public only for testing
*/
static getAuthorizationUrl(options: WebOptions): string {
let url = options.authorizationBaseUrl + "?client_id=" + options.appId;
url += "&response_type=" + options.responseType;
if (options.redirectUrl) {
url += "&redirect_uri=" + options.redirectUrl;
}
if (options.scope) {
url += "&scope=" + options.scope;
}
url += "&state=" + options.state;
if (options.additionalParameters) {
for (const key in options.additionalParameters) {
url += "&" + key + "=" + options.additionalParameters[key];
}
}
if (options.pkceCodeChallenge) {
url += "&code_challenge=" + options.pkceCodeChallenge;
url += "&code_challenge_method=" + options.pkceCodeChallengeMethod;
}
return encodeURI(url);
}
static getTokenEndpointData(options: WebOptions, code: string): string {
let body = '';
body += encodeURIComponent('grant_type') + '=' + encodeURIComponent('authorization_code') + '&';
body += encodeURIComponent('client_id') + '=' + encodeURIComponent(options.appId) + '&';
body += encodeURIComponent('redirect_uri') + '=' + encodeURIComponent(options.redirectUrl) + '&';
body += encodeURIComponent('code') + '=' + encodeURIComponent(code) + '&';
body += encodeURIComponent('code_verifier') + '=' + encodeURIComponent(options.pkceCodeVerifier);
return body;
}
/**
* Public only for testing
*/
static getUrlParams(url: string): { [x: string]: string; } | undefined {
const urlString = `${url}`.trim();
if (urlString.length === 0) {
return;
}
// #132
let hashIndex = urlString.indexOf("#");
let queryIndex = urlString.indexOf("?");
if (hashIndex === -1 && queryIndex === -1) {
return;
}
let paramsIndex: number;
if (hashIndex > -1 && queryIndex === -1) {
paramsIndex = hashIndex;
} else if (queryIndex > -1 && hashIndex === -1) {
paramsIndex = queryIndex;
} else {
paramsIndex = hashIndex > -1 && hashIndex < queryIndex ? hashIndex : queryIndex;
}
if (urlString.length <= paramsIndex + 1) {
return;
}
const urlParamStr = urlString.slice(paramsIndex + 1);
const keyValuePairs: string[] = urlParamStr.split(`&`);
// @ts-ignore
return keyValuePairs.reduce((accumulator, currentValue) => {
const [key, val] = currentValue.split(`=`);
if (key && key.length > 0) {
return {
...accumulator,
[key]: decodeURIComponent(val)
}
}
}, {});
}
static randomString(length: number = 10) {
const haystack = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let randomStr;
if (window.crypto) {
let numberArray: Uint32Array = new Uint32Array(length);
window.crypto.getRandomValues(numberArray);
numberArray = numberArray.map(x => haystack.charCodeAt(x % haystack.length));
let stringArray: string[] = [];
numberArray.forEach(x => {
stringArray.push(haystack.charAt(x % haystack.length));
})
randomStr = stringArray.join("");
} else {
randomStr = "";
for (let i = 0; i < length; i++) {
randomStr += haystack.charAt(Math.floor(Math.random() * haystack.length));
}
}
return randomStr;
}
static async buildWebOptions(configOptions: OAuth2AuthenticateOptions): Promise<WebOptions> {
const webOptions = new WebOptions();
webOptions.appId = this.getAppId(configOptions);
webOptions.authorizationBaseUrl = this.getOverwritableValue(configOptions, "authorizationBaseUrl");
webOptions.responseType = this.getOverwritableValue(configOptions, "responseType");
if (!webOptions.responseType) {
webOptions.responseType = "token";
}
webOptions.redirectUrl = this.getOverwritableValue(configOptions, "redirectUrl");
// controlling parameters
webOptions.resourceUrl = this.getOverwritableValue(configOptions, "resourceUrl");
webOptions.accessTokenEndpoint = this.getOverwritableValue(configOptions, "accessTokenEndpoint");
webOptions.pkceEnabled = this.getOverwritableValue(configOptions, "pkceEnabled");
if (webOptions.pkceEnabled) {
webOptions.pkceCodeVerifier = this.randomString(64);
if (CryptoUtils.HAS_SUBTLE_CRYPTO) {
await CryptoUtils.deriveChallenge(webOptions.pkceCodeVerifier).then(c => {
webOptions.pkceCodeChallenge = c;
webOptions.pkceCodeChallengeMethod = "S256";
});
} else {
webOptions.pkceCodeChallenge = webOptions.pkceCodeVerifier;
webOptions.pkceCodeChallengeMethod = "plain";
}
}
webOptions.scope = this.getOverwritableValue(configOptions, "scope");
webOptions.state = this.getOverwritableValue(configOptions, "state");
if (!webOptions.state || webOptions.state.length === 0) {
webOptions.state = this.randomString(20);
}
let parametersMapHelper = this.getOverwritableValue<{ [key: string]: string }>(configOptions, "additionalParameters");
if (parametersMapHelper) {
webOptions.additionalParameters = {};
for (const key in parametersMapHelper) {
if (key && key.trim().length > 0) {
let value = parametersMapHelper[key];
if (value && value.trim().length > 0) {
webOptions.additionalParameters[key] = value;
}
}
}
}
let headersMapHelper = this.getOverwritableValue<{ [key: string]: string }>(configOptions, "additionalResourceHeaders");
if (headersMapHelper) {
webOptions.additionalResourceHeaders = {};
for (const key in headersMapHelper) {
if (key && key.trim().length > 0) {
let value = headersMapHelper[key];
if (value && value.trim().length > 0) {
webOptions.additionalResourceHeaders[key] = value;
}
}
}
}
webOptions.logsEnabled = this.getOverwritableValue(configOptions, "logsEnabled");
if (configOptions.web) {
if (configOptions.web.windowOptions) {
webOptions.windowOptions = configOptions.web.windowOptions;
}
if (configOptions.web.windowTarget) {
webOptions.windowTarget = configOptions.web.windowTarget;
}
webOptions.windowReplace = configOptions.web.windowReplace;
}
return webOptions;
}
}
export class CryptoUtils {
static BASE64_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
static HAS_SUBTLE_CRYPTO: boolean = typeof window !== 'undefined' && !!(window.crypto as any) && !!(window.crypto.subtle as any);
static toUint8Array(str: string): Uint8Array {
const buf = new ArrayBuffer(str.length);
const bufView = new Uint8Array(buf);
for (let i = 0; i < str.length; i++) {
bufView[i] = str.charCodeAt(i);
}
return bufView;
}
static toBase64Url(base64: string): string {
return base64.replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, '');
}
static toBase64(bytes: Uint8Array): string {
let len = bytes.length;
let base64 = "";
for (let i = 0; i < len; i += 3) {
base64 += this.BASE64_CHARS[bytes[i] >> 2];
base64 += this.BASE64_CHARS[((bytes[i] & 3) << 4) | (bytes[i + 1] >> 4)];
base64 += this.BASE64_CHARS[((bytes[i + 1] & 15) << 2) | (bytes[i + 2] >> 6)];
base64 += this.BASE64_CHARS[bytes[i + 2] & 63];
}
if ((len % 3) === 2) {
base64 = base64.substring(0, base64.length - 1) + "=";
} else if (len % 3 === 1) {
base64 = base64.substring(0, base64.length - 2) + "==";
}
return base64;
}
static deriveChallenge(codeVerifier: string): Promise<string> {
if (codeVerifier.length < 43 || codeVerifier.length > 128) {
return Promise.reject(new Error('ERR_PKCE_CODE_VERIFIER_INVALID_LENGTH'));
}
if (!CryptoUtils.HAS_SUBTLE_CRYPTO) {
return Promise.reject(new Error('ERR_PKCE_CRYPTO_NOTSUPPORTED'));
}
return new Promise((resolve, reject) => {
crypto.subtle.digest('SHA-256', this.toUint8Array(codeVerifier)).then(
arrayBuffer => {
return resolve(this.toBase64Url(this.toBase64(new Uint8Array(arrayBuffer))));
},
error => reject(error)
);
});
}
}
export class WebOptions {
appId: string;
authorizationBaseUrl: string;
accessTokenEndpoint: string;
resourceUrl: string;
responseType: string;
scope: string;
state: string;
redirectUrl: string;
logsEnabled: boolean;
windowOptions: string;
windowTarget: string = "_blank";
windowReplace: boolean | undefined;
pkceEnabled: boolean;
pkceCodeVerifier: string;
pkceCodeChallenge: string;
pkceCodeChallengeMethod: string;
additionalParameters: { [key: string]: string };
additionalResourceHeaders: { [key: string]: string };
} | the_stack |
import every from 'lodash/every'
import { combineEpics, ofType } from 'redux-observable'
import { of, interval, concat, EMPTY } from 'rxjs'
import {
filter,
map,
mapTo,
switchMap,
mergeMap,
takeUntil,
withLatestFrom,
} from 'rxjs/operators'
// imported directly to avoid circular dependencies between discovery and shell
import { getAllRobots, getRobotApiVersion } from '../discovery'
import {
startDiscovery,
finishDiscovery,
removeRobot,
} from '../discovery/actions'
import { GET, POST, fetchRobotApi } from '../robot-api'
import {
RESTART_PATH,
RESTART_STATUS_CHANGED,
RESTART_SUCCEEDED_STATUS,
RESTART_TIMED_OUT_STATUS,
restartRobotSuccess,
} from '../robot-admin'
import { actions as robotActions } from '../robot'
import {
getBuildrootTargetVersion,
getBuildrootSession,
getBuildrootRobotName,
getBuildrootRobot,
} from './selectors'
import {
startBuildrootUpdate,
startBuildrootPremigration,
readUserBuildrootFile,
createSession,
createSessionSuccess,
buildrootStatus,
uploadBuildrootFile,
setBuildrootSessionStep,
unexpectedBuildrootError,
} from './actions'
import {
PREMIGRATION_RESTART,
GET_TOKEN,
PROCESS_FILE,
COMMIT_UPDATE,
RESTART,
FINISHED,
AWAITING_FILE,
DONE,
READY_FOR_RESTART,
BR_START_UPDATE,
BR_USER_FILE_INFO,
BR_CREATE_SESSION,
BR_CREATE_SESSION_SUCCESS,
} from './constants'
import type { Observable } from 'rxjs'
import type { State, Action, Epic } from '../types'
import type { ViewableRobot } from '../discovery/types'
import type { RobotApiResponse } from '../robot-api/types'
import type { RestartStatusChangedAction } from '../robot-admin/types'
import type {
BuildrootAction,
StartBuildrootUpdateAction,
CreateSessionAction,
CreateSessionSuccessAction,
BuildrootUpdateSession,
BuildrootStatusAction,
} from './types'
export const POLL_INTERVAL_MS = 2000
export const REDISCOVERY_TIME_MS = 1200000
// TODO(mc, 2020-01-08): i18n
const UNABLE_TO_FIND_ROBOT_WITH_NAME = 'Unable to find online robot with name'
const ROBOT_HAS_BAD_CAPABILITIES = 'Robot has incorrect capabilities shape'
const UNABLE_TO_START_UPDATE_SESSION = 'Unable to start update session'
const UNABLE_TO_CANCEL_UPDATE_SESSION =
'Unable to cancel in-progress update session'
const UNABLE_TO_COMMIT_UPDATE = 'Unable to commit update'
const UNABLE_TO_RESTART_ROBOT = 'Unable to restart robot'
const ROBOT_RECONNECTED_WITH_VERSION = 'Robot reconnected with version'
const ROBOT_DID_NOT_RECONNECT = 'Robot did not successfully reconnect'
const BUT_WE_EXPECTED = 'but we expected'
const UNKNOWN = 'unknown'
const CHECK_TO_VERIFY_UPDATE =
"Check your robot's settings page to verify whether or not the update was successful."
// listen for the kickoff action and:
// if not ready for buildroot, kickoff premigration
// if not migrated, kickoff migration
// if migrated, kickoff regular buildroot update
export const startUpdateEpic: Epic = (action$, state$) =>
action$.pipe(
ofType<Action, StartBuildrootUpdateAction>(BR_START_UPDATE),
withLatestFrom(state$),
map<[StartBuildrootUpdateAction, State], any>(([action, state]) => {
// BR_START_UPDATE will set the active updating robot in state
const { robotName, systemFile } = action.payload
const host = getBuildrootRobot(state)
const serverHealth = host?.serverHealth || null
// we need the target robot's update server to be up to do anything
if (host === null || serverHealth === null) {
return unexpectedBuildrootError(
`${UNABLE_TO_FIND_ROBOT_WITH_NAME} ${robotName}`
)
}
const capabilities = serverHealth.capabilities || null
// if action passed a system file, we need to read that file
if (systemFile !== null) {
return readUserBuildrootFile(systemFile)
}
// if capabilities is empty, the robot requires premigration
if (capabilities === null) {
// @ts-expect-error TODO: host is actually of type Robot|ReachableRobot but this action expects a RobotHost
return startBuildrootPremigration(host)
}
// otherwise robot is ready for migration or update, so get token
// capabilities response has the correct request path to use
const sessionPath =
capabilities.buildrootUpdate || capabilities.buildrootMigration
if (sessionPath == null) {
return unexpectedBuildrootError(
`${ROBOT_HAS_BAD_CAPABILITIES}: ${JSON.stringify(capabilities)}`
)
}
// @ts-expect-error TODO: host is actually of type Robot|ReachableRobot but this action expects a RobotHost
return createSession(host, sessionPath)
})
)
// listen for a the active robot to come back with capabilities after premigration
export const retryAfterPremigrationEpic: Epic = (_, state$) => {
return state$.pipe(
switchMap(state => {
const session = getBuildrootSession(state)
const robot = getBuildrootRobot(state)
return robot !== null &&
session?.step === PREMIGRATION_RESTART &&
robot.serverHealth?.capabilities != null
? of(startBuildrootUpdate(robot.name))
: EMPTY
})
)
}
export const retryAfterUserFileInfoEpic: Epic = (action$, state$) => {
return action$.pipe(
ofType(BR_USER_FILE_INFO),
withLatestFrom(state$, (_, state) => getBuildrootRobotName(state)),
filter((robotName): robotName is string => robotName !== null),
map<string, any>(robotName => startBuildrootUpdate(robotName))
)
}
// create a buildroot update session
// if unable to create because of 409 conflict, cancel session and retry
export const createSessionEpic: Epic = action$ => {
return action$.pipe(
ofType(BR_CREATE_SESSION),
switchMap<CreateSessionAction, ReturnType<typeof fetchRobotApi>>(
createAction => {
const { host, sessionPath } = createAction.payload
return fetchRobotApi(host, { method: POST, path: sessionPath })
}
),
switchMap(resp => {
const { host, path, ok, status } = resp
const pathPrefix = path.replace('/begin', '')
if (ok) {
return of(createSessionSuccess(host, resp.body.token, pathPrefix))
}
if (!ok && status === 409) {
return fetchRobotApi(host, {
method: POST,
path: `${pathPrefix}/cancel`,
}).pipe(
map(cancelResp => {
return cancelResp.ok
? createSession(host, path)
: unexpectedBuildrootError(UNABLE_TO_CANCEL_UPDATE_SESSION)
})
)
}
return of(unexpectedBuildrootError(UNABLE_TO_START_UPDATE_SESSION))
})
)
}
// epic to listen for token creation success success and start a
// status poll until the status switches to 'ready-for-restart'
export const statusPollEpic: Epic = (action$, state$) => {
return action$.pipe(
ofType(BR_CREATE_SESSION_SUCCESS),
mergeMap<CreateSessionSuccessAction, Observable<BuildrootStatusAction>>(
action => {
const { host, token, pathPrefix } = action.payload
const request = { method: GET, path: `${pathPrefix}/${token}/status` }
return interval(POLL_INTERVAL_MS).pipe(
takeUntil(
state$.pipe(
filter(state => {
const session = getBuildrootSession(state)
return (
session?.stage === READY_FOR_RESTART ||
// @ts-expect-error TODO: `session?.error === true` always returns false, remove it?
session?.error === true ||
session === null
)
})
)
),
switchMap(() => fetchRobotApi(host, request)),
filter(resp => resp.ok),
map<RobotApiResponse, BuildrootStatusAction>(successResp =>
buildrootStatus(
successResp.body.stage,
successResp.body.message,
successResp.body.progress != null
? Math.round(successResp.body.progress * 100)
: null
)
)
)
}
)
)
}
// filter for an active session with given properties
const passActiveSession = (props: Partial<BuildrootUpdateSession>) => (
state: State
): boolean => {
const robot = getBuildrootRobot(state)
const session = getBuildrootSession(state)
return (
robot !== null &&
!session?.error &&
typeof session?.pathPrefix === 'string' &&
typeof session?.token === 'string' &&
every(
props,
(value, key) => session?.[key as keyof BuildrootUpdateSession] === value
)
)
}
// upload the update file to the robot when it switches to `awaiting-file`
export const uploadFileEpic: Epic = (_, state$) => {
return state$.pipe(
filter(passActiveSession({ stage: AWAITING_FILE, step: GET_TOKEN })),
map<State, ReturnType<typeof uploadBuildrootFile>>(stateWithSession => {
const host: ViewableRobot = getBuildrootRobot(stateWithSession) as any
const session = getBuildrootSession(stateWithSession)
const pathPrefix: string = session?.pathPrefix as any
const token: string = session?.token as any
const systemFile = session?.userFileInfo?.systemFile || null
return uploadBuildrootFile(
// @ts-expect-error TODO: host is actually of type Robot|ReachableRobot but this action expects a RobotHost
host,
`${pathPrefix}/${token}/file`,
systemFile
)
})
)
}
// commit the update file on the robot when it switches to `done`
export const commitUpdateEpic: Epic = (_, state$) => {
return state$.pipe(
filter(passActiveSession({ stage: DONE, step: PROCESS_FILE })),
switchMap<State, Observable<BuildrootAction>>(stateWithSession => {
const host: ViewableRobot = getBuildrootRobot(stateWithSession) as any
const session = getBuildrootSession(stateWithSession)
const pathPrefix: string = session?.pathPrefix as any
const token: string = session?.token as any
const path = `${pathPrefix}/${token}/commit`
// @ts-expect-error TODO: host is actually of type Robot|ReachableRobot but this action expects a RobotHost
const request$ = fetchRobotApi(host, { method: POST, path }).pipe(
filter(resp => !resp.ok),
map(resp => {
return unexpectedBuildrootError(
`${UNABLE_TO_COMMIT_UPDATE}: ${resp.body.message}`
)
})
)
return concat(of(setBuildrootSessionStep(COMMIT_UPDATE)), request$)
})
)
}
// restart the robot when it switches to `ready-for-restart`
export const restartAfterCommitEpic: Epic = (_, state$) => {
return state$.pipe(
filter(
passActiveSession({ stage: READY_FOR_RESTART, step: COMMIT_UPDATE })
),
switchMap<State, Observable<any>>(stateWithSession => {
const host: ViewableRobot = getBuildrootRobot(stateWithSession) as any
const path = host.serverHealth?.capabilities?.restart || RESTART_PATH
// @ts-expect-error TODO: host is actually of type Robot|ReachableRobot but this action expects a RobotHost
const request$ = fetchRobotApi(host, { method: POST, path }).pipe(
switchMap(resp => {
return resp.ok
? of(
startDiscovery(REDISCOVERY_TIME_MS),
restartRobotSuccess(host.name, {})
)
: of(
unexpectedBuildrootError(
`${UNABLE_TO_RESTART_ROBOT}: ${resp.body.message}`
)
)
})
)
return concat(of(setBuildrootSessionStep(RESTART)), request$)
})
)
}
export const finishAfterRestartEpic: Epic = (action$, state$) => {
return action$.pipe(
ofType(RESTART_STATUS_CHANGED),
withLatestFrom<
RestartStatusChangedAction,
[RestartStatusChangedAction, State]
>(state$),
filter(([action, state]: [RestartStatusChangedAction, State]) => {
const session = getBuildrootSession(state)
const robot = getBuildrootRobot(state)
const restartDone =
action.payload.restartStatus === RESTART_SUCCEEDED_STATUS ||
action.payload.restartStatus === RESTART_TIMED_OUT_STATUS
return (
restartDone &&
robot?.name === action.payload.robotName &&
!session?.error &&
session?.step === RESTART
)
}),
switchMap(([action, stateWithRobot]) => {
const targetVersion = getBuildrootTargetVersion(stateWithRobot)
const robot: ViewableRobot = getBuildrootRobot(stateWithRobot) as any
const robotVersion = getRobotApiVersion(robot)
const timedOut = action.payload.restartStatus === RESTART_TIMED_OUT_STATUS
const actual = robotVersion ?? UNKNOWN
const expected = targetVersion ?? UNKNOWN
let finishAction
if (
targetVersion != null &&
robotVersion != null &&
robotVersion === targetVersion
) {
finishAction = setBuildrootSessionStep(FINISHED)
} else if (timedOut) {
finishAction = unexpectedBuildrootError(
`${ROBOT_DID_NOT_RECONNECT}. ${CHECK_TO_VERIFY_UPDATE}.`
)
} else {
finishAction = unexpectedBuildrootError(
`${ROBOT_RECONNECTED_WITH_VERSION} ${actual}, ${BUT_WE_EXPECTED} ${expected}. ${CHECK_TO_VERIFY_UPDATE}.`
)
}
return of(finishAction, finishDiscovery())
})
)
}
// if robot was renamed as part of migration, remove old robot name, balena
// robots have name opentrons-robot-name, BR robots have robot-name
// getBuildrootRobot will handle that logic, so we can compare name in state
// vs the actual robot we're interacting with
export const removeMigratedRobotsEpic: Epic = (_, state$) => {
return state$.pipe(
filter(state => {
const robotName = getBuildrootRobotName(state)
const robot = getBuildrootRobot(state)
const allRobots = getAllRobots(state)
return (
robot !== null &&
robotName !== null &&
robot.name !== robotName &&
allRobots.some(r => r.name === robotName)
)
}),
map<State, ReturnType<typeof removeRobot>>(stateWithRobotName => {
const robotName: string = getBuildrootRobotName(stateWithRobotName) as any
return removeRobot(robotName)
})
)
}
export const disconnectRpcOnStartEpic: Epic = action$ => {
return action$.pipe(ofType(BR_START_UPDATE), mapTo(robotActions.disconnect()))
}
export const buildrootEpic = combineEpics<Epic>(
startUpdateEpic,
retryAfterPremigrationEpic,
retryAfterUserFileInfoEpic,
createSessionEpic,
statusPollEpic,
uploadFileEpic,
commitUpdateEpic,
restartAfterCommitEpic,
finishAfterRestartEpic,
removeMigratedRobotsEpic,
disconnectRpcOnStartEpic
) | the_stack |
import { AccessLevelList } from "../shared/access-level";
import { PolicyStatement, Operator } from "../shared";
/**
* Statement provider for service [a4b](https://docs.aws.amazon.com/service-authorization/latest/reference/list_alexaforbusiness.html).
*
* @param sid [SID](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_sid.html) of the statement
*/
export class A4b extends PolicyStatement {
public servicePrefix = 'a4b';
/**
* Statement provider for service [a4b](https://docs.aws.amazon.com/service-authorization/latest/reference/list_alexaforbusiness.html).
*
* @param sid [SID](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_sid.html) of the statement
*/
constructor (sid?: string) {
super(sid);
}
/**
* Grants permission to associate a skill with the organization under the customer's AWS account
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ApproveSkill.html
*/
public toApproveSkill() {
return this.to('ApproveSkill');
}
/**
* Grants permission to associate a contact with a given address book
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_AssociateContactWithAddressBook.html
*/
public toAssociateContactWithAddressBook() {
return this.to('AssociateContactWithAddressBook');
}
/**
* Grants permission to associate a device with the specified network profile
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_AssociateDeviceWithNetworkProfile.html
*/
public toAssociateDeviceWithNetworkProfile() {
return this.to('AssociateDeviceWithNetworkProfile');
}
/**
* Grants permission to associate device with given room
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_AssociateDeviceWithRoom.html
*/
public toAssociateDeviceWithRoom() {
return this.to('AssociateDeviceWithRoom');
}
/**
* Grants permission to associate the skill group with given room
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_AssociateSkillGroupWithRoom.html
*/
public toAssociateSkillGroupWithRoom() {
return this.to('AssociateSkillGroupWithRoom');
}
/**
* Grants permission to associate a skill with a skill group
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_AssociateSkillWithSkillGroup.html
*/
public toAssociateSkillWithSkillGroup() {
return this.to('AssociateSkillWithSkillGroup');
}
/**
* Grants permission to make a private skill available for enrolled users to enable on their devices
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_AssociateSkillWithUsers.html
*/
public toAssociateSkillWithUsers() {
return this.to('AssociateSkillWithUsers');
}
/**
* Grants permission to complete the operation of registering an Alexa device
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/ag/manage-devices.html
*/
public toCompleteRegistration() {
return this.to('CompleteRegistration');
}
/**
* Grants permission to create an address book with the specified details
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_CreateAddressBook.html
*/
public toCreateAddressBook() {
return this.to('CreateAddressBook');
}
/**
* Grants permission to create a recurring schedule for usage reports to deliver to the specified S3 location with a specified daily or weekly interval
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_CreateBusinessReportSchedule.html
*/
public toCreateBusinessReportSchedule() {
return this.to('CreateBusinessReportSchedule');
}
/**
* Grants permission to add a new conference provider under the user's AWS account
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_CreateConferenceProvider.html
*/
public toCreateConferenceProvider() {
return this.to('CreateConferenceProvider');
}
/**
* Grants permission to create a contact with the specified details
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_CreateContact.html
*/
public toCreateContact() {
return this.to('CreateContact');
}
/**
* Grants permission to create a gateway group with the specified details
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_CreateGatewayGroup.html
*/
public toCreateGatewayGroup() {
return this.to('CreateGatewayGroup');
}
/**
* Grants permission to create a network profile with the specified details
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_CreateNetworkProfile.html
*/
public toCreateNetworkProfile() {
return this.to('CreateNetworkProfile');
}
/**
* Grants permission to create a new profile
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_CreateProfile.html
*/
public toCreateProfile() {
return this.to('CreateProfile');
}
/**
* Grants permission to create room with the specified details
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_CreateRoom.html
*/
public toCreateRoom() {
return this.to('CreateRoom');
}
/**
* Grants permission to create a skill group with given name and description
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_CreateSkillGroup.html
*/
public toCreateSkillGroup() {
return this.to('CreateSkillGroup');
}
/**
* Grants permission to create a user
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_CreateUser.html
*/
public toCreateUser() {
return this.to('CreateUser');
}
/**
* Grants permission to delete an address book by the address book ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteAddressBook.html
*/
public toDeleteAddressBook() {
return this.to('DeleteAddressBook');
}
/**
* Grants permission to delete the recurring report delivery schedule with the specified schedule ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteBusinessReportSchedule.html
*/
public toDeleteBusinessReportSchedule() {
return this.to('DeleteBusinessReportSchedule');
}
/**
* Grants permission to delete a conference provider
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteConferenceProvider.html
*/
public toDeleteConferenceProvider() {
return this.to('DeleteConferenceProvider');
}
/**
* Grants permission to delete a contact by the contact ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteContact.html
*/
public toDeleteContact() {
return this.to('DeleteContact');
}
/**
* Grants permission to remove a device from Alexa For Business
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteDevice.html
*/
public toDeleteDevice() {
return this.to('DeleteDevice');
}
/**
* Grants permission to delete the device's entire previous history of voice input data and associated response data
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteDeviceUsageData.html
*/
public toDeleteDeviceUsageData() {
return this.to('DeleteDeviceUsageData');
}
/**
* Grants permission to delete a gateway group
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteGatewayGroup.html
*/
public toDeleteGatewayGroup() {
return this.to('DeleteGatewayGroup');
}
/**
* Grants permission to delete a network profile by the network profile ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteNetworkProfile.html
*/
public toDeleteNetworkProfile() {
return this.to('DeleteNetworkProfile');
}
/**
* Grants permission to delete profile by profile ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteProfile.html
*/
public toDeleteProfile() {
return this.to('DeleteProfile');
}
/**
* Grants permission to delete room
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteRoom.html
*/
public toDeleteRoom() {
return this.to('DeleteRoom');
}
/**
* Grants permission to delete a parameter from a skill and room
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteRoomSkillParameter.html
*/
public toDeleteRoomSkillParameter() {
return this.to('DeleteRoomSkillParameter');
}
/**
* Grants permission to unlink a third-party account from a skill
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteSkillAuthorization.html
*/
public toDeleteSkillAuthorization() {
return this.to('DeleteSkillAuthorization');
}
/**
* Grants permission to delete skill group with skill group ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteSkillGroup.html
*/
public toDeleteSkillGroup() {
return this.to('DeleteSkillGroup');
}
/**
* Grants permission to delete a user
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DeleteUser.html
*/
public toDeleteUser() {
return this.to('DeleteUser');
}
/**
* Grants permission to disassociate a contact from a given address book
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DisassociateContactFromAddressBook.html
*/
public toDisassociateContactFromAddressBook() {
return this.to('DisassociateContactFromAddressBook');
}
/**
* Grants permission to disassociate device from its current room
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DisassociateDeviceFromRoom.html
*/
public toDisassociateDeviceFromRoom() {
return this.to('DisassociateDeviceFromRoom');
}
/**
* Grants permission to disassociate a skill from a skill group
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DisassociateSkillFromSkillGroup.html
*/
public toDisassociateSkillFromSkillGroup() {
return this.to('DisassociateSkillFromSkillGroup');
}
/**
* Grants permission to make a private skill unavailable for enrolled users and prevent them from enabling it on their devices
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DisassociateSkillFromUsers.html
*/
public toDisassociateSkillFromUsers() {
return this.to('DisassociateSkillFromUsers');
}
/**
* Grants permission to disassociate the skill group from given room
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_DisassociateSkillGroupFromRoom.html
*/
public toDisassociateSkillGroupFromRoom() {
return this.to('DisassociateSkillGroupFromRoom');
}
/**
* Grants permission to forget smart home appliances associated to a room
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ForgetSmartHomeAppliances.html
*/
public toForgetSmartHomeAppliances() {
return this.to('ForgetSmartHomeAppliances');
}
/**
* Grants permission to get the address book details by the address book ARN
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetAddressBook.html
*/
public toGetAddressBook() {
return this.to('GetAddressBook');
}
/**
* Grants permission to retrieve the existing conference preferences
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetConferencePreference.html
*/
public toGetConferencePreference() {
return this.to('GetConferencePreference');
}
/**
* Grants permission to get details about a specific conference provider
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetConferenceProvider.html
*/
public toGetConferenceProvider() {
return this.to('GetConferenceProvider');
}
/**
* Grants permission to get the contact details by the contact ARN
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetContact.html
*/
public toGetContact() {
return this.to('GetContact');
}
/**
* Grants permission to get device details
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetDevice.html
*/
public toGetDevice() {
return this.to('GetDevice');
}
/**
* Grants permission to retrieve the details of a gateway
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetGateway.html
*/
public toGetGateway() {
return this.to('GetGateway');
}
/**
* Grants permission to retrieve the details of a gateway group
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetGatewayGroup.html
*/
public toGetGatewayGroup() {
return this.to('GetGatewayGroup');
}
/**
* Grants permission to retrieve the configured values for the user enrollment invitation email template
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetInvitationConfiguration.html
*/
public toGetInvitationConfiguration() {
return this.to('GetInvitationConfiguration');
}
/**
* Grants permission to get the network profile details by the network profile ARN
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetNetworkProfile.html
*/
public toGetNetworkProfile() {
return this.to('GetNetworkProfile');
}
/**
* Grants permission to get profile when provided with Profile ARN
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetProfile.html
*/
public toGetProfile() {
return this.to('GetProfile');
}
/**
* Grants permission to get room details
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetRoom.html
*/
public toGetRoom() {
return this.to('GetRoom');
}
/**
* Grants permission to get an existing parameter that has been set for a skill and room
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetRoomSkillParameter.html
*/
public toGetRoomSkillParameter() {
return this.to('GetRoomSkillParameter');
}
/**
* Grants permission to get skill group details with skill group ARN
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GetSkillGroup.html
*/
public toGetSkillGroup() {
return this.to('GetSkillGroup');
}
/**
* Grants permission to list the details of the schedules that a user configured
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ListBusinessReportSchedules.html
*/
public toListBusinessReportSchedules() {
return this.to('ListBusinessReportSchedules');
}
/**
* Grants permission to list conference providers under a specific AWS account
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ListConferenceProviders.html
*/
public toListConferenceProviders() {
return this.to('ListConferenceProviders');
}
/**
* Grants permission to list the device event history, including device connection status, for up to 30 days
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ListDeviceEvents.html
*/
public toListDeviceEvents() {
return this.to('ListDeviceEvents');
}
/**
* Grants permission to list gateway group summaries
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ListGatewayGroups.html
*/
public toListGatewayGroups() {
return this.to('ListGatewayGroups');
}
/**
* Grants permission to list gateway summaries
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ListGateways.html
*/
public toListGateways() {
return this.to('ListGateways');
}
/**
* Grants permission to list skills
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ListSkills.html
*/
public toListSkills() {
return this.to('ListSkills');
}
/**
* Grants permission to list all categories in the Alexa skill store
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ListSkillsStoreCategories.html
*/
public toListSkillsStoreCategories() {
return this.to('ListSkillsStoreCategories');
}
/**
* Grants permission to list all skills in the Alexa skill store by category
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ListSkillsStoreSkillsByCategory.html
*/
public toListSkillsStoreSkillsByCategory() {
return this.to('ListSkillsStoreSkillsByCategory');
}
/**
* Grants permission to list all of the smart home appliances associated with a room
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ListSmartHomeAppliances.html
*/
public toListSmartHomeAppliances() {
return this.to('ListSmartHomeAppliances');
}
/**
* Grants permission to list all tags on a resource
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ListTags.html
*/
public toListTags() {
return this.to('ListTags');
}
/**
* Grants permission to set the conference preferences on a specific conference provider at the account level
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_PutConferencePreference.html
*/
public toPutConferencePreference() {
return this.to('PutConferencePreference');
}
/**
* Grants permission to publish Alexa device setup events
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/ag/manage-devices.html
*/
public toPutDeviceSetupEvents() {
return this.to('PutDeviceSetupEvents');
}
/**
* Grants permission to configure the email template for the user enrollment invitation with the specified attributes
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_PutInvitationConfiguration.html
*/
public toPutInvitationConfiguration() {
return this.to('PutInvitationConfiguration');
}
/**
* Grants permission to put a room specific parameter for a skill
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_PutRoomSkillParameter.html
*/
public toPutRoomSkillParameter() {
return this.to('PutRoomSkillParameter');
}
/**
* Grants permission to link a user's account to a third-party skill provider
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_PutSkillAuthorization.html
*/
public toPutSkillAuthorization() {
return this.to('PutSkillAuthorization');
}
/**
* Grants permission to register an Alexa-enabled device built by an Original Equipment Manufacturer (OEM) using Alexa Voice Service (AVS)
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_RegisterAVSDevice.html
*/
public toRegisterAVSDevice() {
return this.to('RegisterAVSDevice');
}
/**
* Grants permission to register an Alexa device
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/ag/manage-devices.html
*/
public toRegisterDevice() {
return this.to('RegisterDevice');
}
/**
* Grants permission to disassociate a skill from the organization under a user's AWS account
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_RejectSkill.html
*/
public toRejectSkill() {
return this.to('RejectSkill');
}
/**
* Grants permission to resolve room information
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ResolveRoom.html
*/
public toResolveRoom() {
return this.to('ResolveRoom');
}
/**
* Grants permission to revoke an invitation
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_RevokeInvitation.html
*/
public toRevokeInvitation() {
return this.to('RevokeInvitation');
}
/**
* Grants permission to search address books and list the ones that meet a set of filter and sort criteria
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SearchAddressBooks.html
*/
public toSearchAddressBooks() {
return this.to('SearchAddressBooks');
}
/**
* Grants permission to search contacts and list the ones that meet a set of filter and sort criteria
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SearchContacts.html
*/
public toSearchContacts() {
return this.to('SearchContacts');
}
/**
* Grants permission to search for devices
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SearchDevices.html
*/
public toSearchDevices() {
return this.to('SearchDevices');
}
/**
* Grants permission to search network profiles and list the ones that meet a set of filter and sort criteria
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SearchNetworkProfiles.html
*/
public toSearchNetworkProfiles() {
return this.to('SearchNetworkProfiles');
}
/**
* Grants permission to search for profiles
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SearchProfiles.html
*/
public toSearchProfiles() {
return this.to('SearchProfiles');
}
/**
* Grants permission to search for rooms
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SearchRooms.html
*/
public toSearchRooms() {
return this.to('SearchRooms');
}
/**
* Grants permission to search for skill groups
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SearchSkillGroups.html
*/
public toSearchSkillGroups() {
return this.to('SearchSkillGroups');
}
/**
* Grants permission to search for users
*
* Access Level: List
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SearchUsers.html
*/
public toSearchUsers() {
return this.to('SearchUsers');
}
/**
* Grants permission to trigger an asynchronous flow to send text, SSML, or audio announcements to rooms that are identified by a search or filter
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SendAnnouncement.html
*/
public toSendAnnouncement() {
return this.to('SendAnnouncement');
}
/**
* Grants permission to send an invitation to a user
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SendInvitation.html
*/
public toSendInvitation() {
return this.to('SendInvitation');
}
/**
* Grants permission to restore the device and its account to its known, default settings by clearing all information and settings set by its previous users
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_StartDeviceSync.html
*/
public toStartDeviceSync() {
return this.to('StartDeviceSync');
}
/**
* Grants permission to initiate the discovery of any smart home appliances associated with the room
*
* Access Level: Read
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_StartSmartHomeApplianceDiscovery.html
*/
public toStartSmartHomeApplianceDiscovery() {
return this.to('StartSmartHomeApplianceDiscovery');
}
/**
* Grants permission to add metadata tags to a resource
*
* Access Level: Tagging
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_TagResource.html
*/
public toTagResource() {
return this.to('TagResource');
}
/**
* Grants permission to remove metadata tags from a resource
*
* Access Level: Tagging
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UntagResource.html
*/
public toUntagResource() {
return this.to('UntagResource');
}
/**
* Grants permission to update address book details by the address book ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateAddressBook.html
*/
public toUpdateAddressBook() {
return this.to('UpdateAddressBook');
}
/**
* Grants permission to update the configuration of the report delivery schedule with the specified schedule ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateBusinessReportSchedule.html
*/
public toUpdateBusinessReportSchedule() {
return this.to('UpdateBusinessReportSchedule');
}
/**
* Grants permission to update an existing conference provider's settings
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateConferenceProvider.html
*/
public toUpdateConferenceProvider() {
return this.to('UpdateConferenceProvider');
}
/**
* Grants permission to update the contact details by the contact ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateContact.html
*/
public toUpdateContact() {
return this.to('UpdateContact');
}
/**
* Grants permission to update device name
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateDevice.html
*/
public toUpdateDevice() {
return this.to('UpdateDevice');
}
/**
* Grants permission to update the details of a gateway
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateGateway.html
*/
public toUpdateGateway() {
return this.to('UpdateGateway');
}
/**
* Grants permission to update the details of a gateway group
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateGatewayGroup.html
*/
public toUpdateGatewayGroup() {
return this.to('UpdateGatewayGroup');
}
/**
* Grants permission to update a network profile by the network profile ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateNetworkProfile.html
*/
public toUpdateNetworkProfile() {
return this.to('UpdateNetworkProfile');
}
/**
* Grants permission to update an existing profile
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateProfile.html
*/
public toUpdateProfile() {
return this.to('UpdateProfile');
}
/**
* Grants permission to update room details
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateRoom.html
*/
public toUpdateRoom() {
return this.to('UpdateRoom');
}
/**
* Grants permission to update skill group details with skill group ARN
*
* Access Level: Write
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UpdateSkillGroup.html
*/
public toUpdateSkillGroup() {
return this.to('UpdateSkillGroup');
}
protected accessLevelList: AccessLevelList = {
"Write": [
"ApproveSkill",
"AssociateContactWithAddressBook",
"AssociateDeviceWithNetworkProfile",
"AssociateDeviceWithRoom",
"AssociateSkillGroupWithRoom",
"AssociateSkillWithSkillGroup",
"AssociateSkillWithUsers",
"CompleteRegistration",
"CreateAddressBook",
"CreateBusinessReportSchedule",
"CreateConferenceProvider",
"CreateContact",
"CreateGatewayGroup",
"CreateNetworkProfile",
"CreateProfile",
"CreateRoom",
"CreateSkillGroup",
"CreateUser",
"DeleteAddressBook",
"DeleteBusinessReportSchedule",
"DeleteConferenceProvider",
"DeleteContact",
"DeleteDevice",
"DeleteDeviceUsageData",
"DeleteGatewayGroup",
"DeleteNetworkProfile",
"DeleteProfile",
"DeleteRoom",
"DeleteRoomSkillParameter",
"DeleteSkillAuthorization",
"DeleteSkillGroup",
"DeleteUser",
"DisassociateContactFromAddressBook",
"DisassociateDeviceFromRoom",
"DisassociateSkillFromSkillGroup",
"DisassociateSkillFromUsers",
"DisassociateSkillGroupFromRoom",
"ForgetSmartHomeAppliances",
"PutConferencePreference",
"PutDeviceSetupEvents",
"PutInvitationConfiguration",
"PutRoomSkillParameter",
"PutSkillAuthorization",
"RegisterAVSDevice",
"RegisterDevice",
"RejectSkill",
"RevokeInvitation",
"SendAnnouncement",
"SendInvitation",
"StartDeviceSync",
"UpdateAddressBook",
"UpdateBusinessReportSchedule",
"UpdateConferenceProvider",
"UpdateContact",
"UpdateDevice",
"UpdateGateway",
"UpdateGatewayGroup",
"UpdateNetworkProfile",
"UpdateProfile",
"UpdateRoom",
"UpdateSkillGroup"
],
"Read": [
"GetAddressBook",
"GetConferencePreference",
"GetConferenceProvider",
"GetContact",
"GetDevice",
"GetGateway",
"GetGatewayGroup",
"GetInvitationConfiguration",
"GetNetworkProfile",
"GetProfile",
"GetRoom",
"GetRoomSkillParameter",
"GetSkillGroup",
"ListTags",
"ResolveRoom",
"StartSmartHomeApplianceDiscovery"
],
"List": [
"ListBusinessReportSchedules",
"ListConferenceProviders",
"ListDeviceEvents",
"ListGatewayGroups",
"ListGateways",
"ListSkills",
"ListSkillsStoreCategories",
"ListSkillsStoreSkillsByCategory",
"ListSmartHomeAppliances",
"SearchAddressBooks",
"SearchContacts",
"SearchDevices",
"SearchNetworkProfiles",
"SearchProfiles",
"SearchRooms",
"SearchSkillGroups",
"SearchUsers"
],
"Tagging": [
"TagResource",
"UntagResource"
]
};
/**
* Adds a resource of type profile to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_Profile.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onProfile(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:profile/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type room to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_Room.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*
* Possible conditions:
* - .ifAwsResourceTag()
*/
public onRoom(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:room/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type device to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_Device.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*
* Possible conditions:
* - .ifAwsResourceTag()
*/
public onDevice(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:device/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type skillgroup to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SkillGroup.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onSkillgroup(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:skill-group/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type user to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_UserData.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*
* Possible conditions:
* - .ifAwsResourceTag()
*/
public onUser(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:user/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type addressbook to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_AddressBook.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onAddressbook(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:address-book/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type conferenceprovider to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_ConferenceProvider.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onConferenceprovider(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:conference-provider/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type contact to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_Contact.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onContact(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:contact/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type schedule to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_BusinessReportSchedule.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onSchedule(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:schedule/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type networkprofile to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_NetworkProfile.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onNetworkprofile(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:network-profile/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type gateway to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_Gateway.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onGateway(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:gateway/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type gatewaygroup to the statement
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_GatewayGroup.html
*
* @param resourceId - Identifier for the resourceId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onGatewaygroup(resourceId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:a4b:${Region}:${Account}:gateway-group/${Resource_id}';
arn = arn.replace('${Resource_id}', resourceId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Filters actions based on the Amazon Id in the request
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_RegisterAVSDevice.html
*
* @param value The value(s) to check
* @param operator Works with [string operators](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_condition_operators.html#Conditions_String). **Default:** `StringLike`
*/
public ifAmazonId(value: string | string[], operator?: Operator | string) {
return this.if(`amazonId`, value, operator || 'StringLike');
}
/**
* Filters actions based on the device type in the request
*
* https://docs.aws.amazon.com/a4b/latest/APIReference/API_SearchDevices.html
*
* @param value The value(s) to check
* @param operator Works with [string operators](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_condition_operators.html#Conditions_String). **Default:** `StringLike`
*/
public ifFiltersDeviceType(value: string | string[], operator?: Operator | string) {
return this.if(`filters_deviceType`, value, operator || 'StringLike');
}
} | the_stack |
import { ConcreteRequest } from "relay-runtime";
import { FragmentRefs } from "relay-runtime";
export type MyProfileQueryVariables = {};
export type MyProfileQueryResponse = {
readonly me: {
readonly " $fragmentRefs": FragmentRefs<"MyProfile_me">;
} | null;
};
export type MyProfileQuery = {
readonly response: MyProfileQueryResponse;
readonly variables: MyProfileQueryVariables;
};
/*
query MyProfileQuery {
me @optionalField {
...MyProfile_me
id
}
}
fragment MyProfile_me on Me {
name
createdAt
followsAndSaves {
artworksConnection(first: 10, private: true) {
edges {
node {
id
...SmallTileRail_artworks
}
}
}
}
}
fragment SmallTileRail_artworks on Artwork {
href
saleMessage
artistNames
slug
internalID
sale {
isAuction
isClosed
displayTimelyAt
endAt
id
}
saleArtwork {
counts {
bidderPositions
}
currentBid {
display
}
id
}
partner {
name
id
}
image {
imageURL
}
}
*/
const node: ConcreteRequest = (function(){
var v0 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "name",
"storageKey": null
},
v1 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "id",
"storageKey": null
};
return {
"fragment": {
"argumentDefinitions": [],
"kind": "Fragment",
"metadata": null,
"name": "MyProfileQuery",
"selections": [
{
"alias": null,
"args": null,
"concreteType": "Me",
"kind": "LinkedField",
"name": "me",
"plural": false,
"selections": [
{
"args": null,
"kind": "FragmentSpread",
"name": "MyProfile_me"
}
],
"storageKey": null
}
],
"type": "Query",
"abstractKey": null
},
"kind": "Request",
"operation": {
"argumentDefinitions": [],
"kind": "Operation",
"name": "MyProfileQuery",
"selections": [
{
"alias": null,
"args": null,
"concreteType": "Me",
"kind": "LinkedField",
"name": "me",
"plural": false,
"selections": [
(v0/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "createdAt",
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "FollowsAndSaves",
"kind": "LinkedField",
"name": "followsAndSaves",
"plural": false,
"selections": [
{
"alias": null,
"args": [
{
"kind": "Literal",
"name": "first",
"value": 10
},
{
"kind": "Literal",
"name": "private",
"value": true
}
],
"concreteType": "SavedArtworksConnection",
"kind": "LinkedField",
"name": "artworksConnection",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"concreteType": "SavedArtworksEdge",
"kind": "LinkedField",
"name": "edges",
"plural": true,
"selections": [
{
"alias": null,
"args": null,
"concreteType": "Artwork",
"kind": "LinkedField",
"name": "node",
"plural": false,
"selections": [
(v1/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "href",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "saleMessage",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "artistNames",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "slug",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "internalID",
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "Sale",
"kind": "LinkedField",
"name": "sale",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "isAuction",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "isClosed",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "displayTimelyAt",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "endAt",
"storageKey": null
},
(v1/*: any*/)
],
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "SaleArtwork",
"kind": "LinkedField",
"name": "saleArtwork",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"concreteType": "SaleArtworkCounts",
"kind": "LinkedField",
"name": "counts",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "bidderPositions",
"storageKey": null
}
],
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "SaleArtworkCurrentBid",
"kind": "LinkedField",
"name": "currentBid",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "display",
"storageKey": null
}
],
"storageKey": null
},
(v1/*: any*/)
],
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "Partner",
"kind": "LinkedField",
"name": "partner",
"plural": false,
"selections": [
(v0/*: any*/),
(v1/*: any*/)
],
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "Image",
"kind": "LinkedField",
"name": "image",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "imageURL",
"storageKey": null
}
],
"storageKey": null
}
],
"storageKey": null
}
],
"storageKey": null
}
],
"storageKey": "artworksConnection(first:10,private:true)"
}
],
"storageKey": null
},
(v1/*: any*/)
],
"storageKey": null
}
]
},
"params": {
"id": "e8de7c8abc1c8f039996326abc785179",
"metadata": {},
"name": "MyProfileQuery",
"operationKind": "query",
"text": null
}
};
})();
(node as any).hash = 'd7b45c8cb29ba0ad56f812f112f4665b';
export default node; | the_stack |
import {applyPatch, Operation} from 'fast-json-patch';
import stringify from 'json-stringify-pretty-compact';
import {satisfies} from 'semver';
import * as vegaImport from 'vega';
import {
AutoSize,
Config as VgConfig,
EncodeEntryName,
isBoolean,
isString,
Loader,
LoaderOptions,
mergeConfig,
Renderers,
Spec as VgSpec,
TooltipHandler,
View,
} from 'vega';
import {expressionInterpreter} from 'vega-interpreter';
import * as vegaLiteImport from 'vega-lite';
import {Config as VlConfig, TopLevelSpec as VlSpec} from 'vega-lite';
import schemaParser from 'vega-schema-url-parser';
import * as themes from 'vega-themes';
import {Handler, Options as TooltipOptions} from 'vega-tooltip';
import post from './post';
import embedStyle from './style';
import {Config, Mode} from './types';
import {mergeDeep} from './util';
export * from './types';
export const vega = vegaImport;
export let vegaLite = vegaLiteImport;
// For backwards compatibility with Vega-Lite before v4.
const w = (typeof window !== 'undefined' ? window : undefined) as any;
if (vegaLite === undefined && w?.['vl']?.compile) {
vegaLite = w['vl'];
}
export interface Actions {
export?: boolean | {svg?: boolean; png?: boolean};
source?: boolean;
compiled?: boolean;
editor?: boolean;
}
export const DEFAULT_ACTIONS = {export: {svg: true, png: true}, source: true, compiled: true, editor: true};
export interface Hover {
hoverSet?: EncodeEntryName;
updateSet?: EncodeEntryName;
}
export type PatchFunc = (spec: VgSpec) => VgSpec;
const I18N = {
CLICK_TO_VIEW_ACTIONS: 'Click to view actions',
COMPILED_ACTION: 'View Compiled Vega',
EDITOR_ACTION: 'Open in Vega Editor',
PNG_ACTION: 'Save as PNG',
SOURCE_ACTION: 'View Source',
SVG_ACTION: 'Save as SVG',
};
export interface EmbedOptions<S = string, R = Renderers> {
bind?: HTMLElement | string;
actions?: boolean | Actions;
mode?: Mode;
theme?: 'excel' | 'ggplot2' | 'quartz' | 'vox' | 'dark';
defaultStyle?: boolean | string;
logLevel?: number;
loader?: Loader | LoaderOptions;
renderer?: R;
tooltip?: TooltipHandler | TooltipOptions | boolean;
patch?: S | PatchFunc | Operation[];
width?: number;
height?: number;
padding?: number | {left?: number; right?: number; top?: number; bottom?: number};
scaleFactor?: number;
config?: S | Config;
sourceHeader?: string;
sourceFooter?: string;
editorUrl?: string;
hover?: boolean | Hover;
i18n?: Partial<typeof I18N>;
downloadFileName?: string;
formatLocale?: Record<string, unknown>;
timeFormatLocale?: Record<string, unknown>;
ast?: boolean;
expr?: typeof expressionInterpreter;
viewClass?: typeof View;
}
const NAMES: {[key in Mode]: string} = {
vega: 'Vega',
'vega-lite': 'Vega-Lite',
};
const VERSION = {
vega: vega.version,
'vega-lite': vegaLite ? vegaLite.version : 'not available',
};
const PREPROCESSOR: {[mode in Mode]: (spec: any, config?: Config) => VgSpec} = {
vega: (vgSpec: VgSpec) => vgSpec,
'vega-lite': (vlSpec, config) => vegaLite.compile(vlSpec as VlSpec, {config: config as VlConfig}).spec,
};
const SVG_CIRCLES = `
<svg viewBox="0 0 16 16" fill="currentColor" stroke="none" stroke-width="1" stroke-linecap="round" stroke-linejoin="round">
<circle r="2" cy="8" cx="2"></circle>
<circle r="2" cy="8" cx="8"></circle>
<circle r="2" cy="8" cx="14"></circle>
</svg>`;
const CHART_WRAPPER_CLASS = 'chart-wrapper';
export type VisualizationSpec = VlSpec | VgSpec;
export interface Result {
/** The Vega view. */
view: View;
/** The input specification. */
spec: VisualizationSpec;
/** The compiled and patched Vega specification. */
vgSpec: VgSpec;
/** Removes references to unwanted behaviors and memory leaks. Calls Vega's `view.finalize`. */
finalize: () => void;
}
function isTooltipHandler(h?: boolean | TooltipOptions | TooltipHandler): h is TooltipHandler {
return typeof h === 'function';
}
function viewSource(source: string, sourceHeader: string, sourceFooter: string, mode: Mode) {
const header = `<html><head>${sourceHeader}</head><body><pre><code class="json">`;
const footer = `</code></pre>${sourceFooter}</body></html>`;
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const win = window.open('')!;
win.document.write(header + source + footer);
win.document.title = `${NAMES[mode]} JSON Source`;
}
/**
* Try to guess the type of spec.
*
* @param spec Vega or Vega-Lite spec.
*/
export function guessMode(spec: VisualizationSpec, providedMode?: Mode): Mode {
// Decide mode
if (spec.$schema) {
const parsed = schemaParser(spec.$schema);
if (providedMode && providedMode !== parsed.library) {
console.warn(
`The given visualization spec is written in ${NAMES[parsed.library]}, but mode argument sets ${
NAMES[providedMode] ?? providedMode
}.`
);
}
const mode = parsed.library as Mode;
if (!satisfies(VERSION[mode], `^${parsed.version.slice(1)}`)) {
console.warn(
`The input spec uses ${NAMES[mode]} ${parsed.version}, but the current version of ${NAMES[mode]} is v${VERSION[mode]}.`
);
}
return mode;
}
// try to guess from the provided spec
if (
'mark' in spec ||
'encoding' in spec ||
'layer' in spec ||
'hconcat' in spec ||
'vconcat' in spec ||
'facet' in spec ||
'repeat' in spec
) {
return 'vega-lite';
}
if ('marks' in spec || 'signals' in spec || 'scales' in spec || 'axes' in spec) {
return 'vega';
}
return providedMode ?? 'vega';
}
function isLoader(o?: LoaderOptions | Loader): o is Loader {
return !!(o && 'load' in o);
}
function createLoader(opts?: Loader | LoaderOptions) {
return isLoader(opts) ? opts : vega.loader(opts);
}
function embedOptionsFromUsermeta(parsedSpec: VisualizationSpec) {
return (parsedSpec.usermeta && (parsedSpec.usermeta as any)['embedOptions']) ?? {};
}
/**
* Embed a Vega visualization component in a web page. This function returns a promise.
*
* @param el DOM element in which to place component (DOM node or CSS selector).
* @param spec String : A URL string from which to load the Vega specification.
* Object : The Vega/Vega-Lite specification as a parsed JSON object.
* @param opts A JavaScript object containing options for embedding.
*/
export default async function embed(
el: HTMLElement | string,
spec: VisualizationSpec | string,
opts: EmbedOptions = {}
): Promise<Result> {
let parsedSpec: VisualizationSpec;
let loader: Loader | undefined;
if (isString(spec)) {
loader = createLoader(opts.loader);
parsedSpec = JSON.parse(await loader.load(spec));
} else {
parsedSpec = spec;
}
const usermetaLoader = embedOptionsFromUsermeta(parsedSpec).loader;
// either create the loader for the first time or create a new loader if the spec has new loader options
if (!loader || usermetaLoader) {
loader = createLoader(opts.loader ?? usermetaLoader);
}
const usermetaOpts = await loadOpts(embedOptionsFromUsermeta(parsedSpec), loader);
const parsedOpts = await loadOpts(opts, loader);
const mergedOpts = {
...mergeDeep(parsedOpts, usermetaOpts),
config: mergeConfig(parsedOpts.config ?? {}, usermetaOpts.config ?? {}),
};
return await _embed(el, parsedSpec, mergedOpts, loader);
}
async function loadOpts(opt: EmbedOptions, loader: Loader): Promise<EmbedOptions<never>> {
const config: Config = isString(opt.config) ? JSON.parse(await loader.load(opt.config)) : opt.config ?? {};
const patch: PatchFunc | Operation[] = isString(opt.patch) ? JSON.parse(await loader.load(opt.patch)) : opt.patch;
return {
...(opt as any),
...(patch ? {patch} : {}),
...(config ? {config} : {}),
};
}
function getRoot(el: Element) {
const possibleRoot = el.getRootNode ? el.getRootNode() : document;
if (possibleRoot instanceof ShadowRoot) {
return {root: possibleRoot, rootContainer: possibleRoot};
} else {
return {root: document, rootContainer: document.head ?? document.body};
}
}
async function _embed(
el: HTMLElement | string,
spec: VisualizationSpec,
opts: EmbedOptions<never> = {},
loader: Loader
): Promise<Result> {
const config = opts.theme ? mergeConfig(themes[opts.theme], opts.config ?? {}) : opts.config;
const actions = isBoolean(opts.actions) ? opts.actions : mergeDeep<Actions>({}, DEFAULT_ACTIONS, opts.actions ?? {});
const i18n = {...I18N, ...opts.i18n};
const renderer = opts.renderer ?? 'canvas';
const logLevel = opts.logLevel ?? vega.Warn;
const downloadFileName = opts.downloadFileName ?? 'visualization';
const element = typeof el === 'string' ? document.querySelector(el) : el;
if (!element) {
throw new Error(`${el} does not exist`);
}
if (opts.defaultStyle !== false) {
// Add a default stylesheet to the head of the document.
const ID = 'vega-embed-style';
const {root, rootContainer} = getRoot(element);
if (!root.getElementById(ID)) {
const style = document.createElement('style');
style.id = ID;
style.innerText =
opts.defaultStyle === undefined || opts.defaultStyle === true
? (embedStyle ?? '').toString()
: opts.defaultStyle;
rootContainer.appendChild(style);
}
}
const mode = guessMode(spec, opts.mode);
let vgSpec: VgSpec = PREPROCESSOR[mode](spec, config);
if (mode === 'vega-lite') {
if (vgSpec.$schema) {
const parsed = schemaParser(vgSpec.$schema);
if (!satisfies(VERSION.vega, `^${parsed.version.slice(1)}`)) {
console.warn(`The compiled spec uses Vega ${parsed.version}, but current version is v${VERSION.vega}.`);
}
}
}
element.classList.add('vega-embed');
if (actions) {
element.classList.add('has-actions');
}
element.innerHTML = ''; // clear container
let container = element;
if (actions) {
const chartWrapper = document.createElement('div');
chartWrapper.classList.add(CHART_WRAPPER_CLASS);
element.appendChild(chartWrapper);
container = chartWrapper;
}
const patch = opts.patch;
if (patch) {
if (patch instanceof Function) {
vgSpec = patch(vgSpec);
} else {
vgSpec = applyPatch(vgSpec, patch, true, false).newDocument;
}
}
// Set locale. Note that this is a global setting.
if (opts.formatLocale) {
vega.formatLocale(opts.formatLocale);
}
if (opts.timeFormatLocale) {
vega.timeFormatLocale(opts.timeFormatLocale);
}
const {ast} = opts;
// Do not apply the config to Vega when we have already applied it to Vega-Lite.
// This call may throw an Error if parsing fails.
const runtime = vega.parse(vgSpec, mode === 'vega-lite' ? {} : (config as VgConfig), {ast});
const view = new (opts.viewClass || vega.View)(runtime, {
loader,
logLevel,
renderer,
...(ast ? {expr: (vega as any).expressionInterpreter ?? opts.expr ?? expressionInterpreter} : {}),
});
view.addSignalListener('autosize', (_, autosize: Exclude<AutoSize, string>) => {
const {type} = autosize;
if (type == 'fit-x') {
container.classList.add('fit-x');
container.classList.remove('fit-y');
} else if (type == 'fit-y') {
container.classList.remove('fit-x');
container.classList.add('fit-y');
} else if (type == 'fit') {
container.classList.add('fit-x', 'fit-y');
} else {
container.classList.remove('fit-x', 'fit-y');
}
});
if (opts.tooltip !== false) {
let handler: TooltipHandler;
if (isTooltipHandler(opts.tooltip)) {
handler = opts.tooltip;
} else {
// user provided boolean true or tooltip options
handler = new Handler(opts.tooltip === true ? {} : opts.tooltip).call;
}
view.tooltip(handler);
}
let {hover} = opts;
if (hover === undefined) {
hover = mode === 'vega';
}
if (hover) {
const {hoverSet, updateSet} = (typeof hover === 'boolean' ? {} : hover) as Hover;
view.hover(hoverSet, updateSet);
}
if (opts) {
if (opts.width != null) {
view.width(opts.width);
}
if (opts.height != null) {
view.height(opts.height);
}
if (opts.padding != null) {
view.padding(opts.padding);
}
}
await view.initialize(container, opts.bind).runAsync();
let documentClickHandler: ((this: Document, ev: MouseEvent) => void) | undefined;
if (actions !== false) {
let wrapper = element;
if (opts.defaultStyle !== false) {
const details = document.createElement('details');
details.title = i18n.CLICK_TO_VIEW_ACTIONS;
element.append(details);
wrapper = details;
const summary = document.createElement('summary');
summary.innerHTML = SVG_CIRCLES;
details.append(summary);
documentClickHandler = (ev: MouseEvent) => {
if (!details.contains(ev.target as any)) {
details.removeAttribute('open');
}
};
document.addEventListener('click', documentClickHandler);
}
const ctrl = document.createElement('div');
wrapper.append(ctrl);
ctrl.classList.add('vega-actions');
// add 'Export' action
if (actions === true || actions.export !== false) {
for (const ext of ['svg', 'png'] as const) {
if (actions === true || actions.export === true || (actions.export as {svg?: boolean; png?: boolean})[ext]) {
const i18nExportAction = (i18n as {[key: string]: string})[`${ext.toUpperCase()}_ACTION`];
const exportLink = document.createElement('a');
exportLink.text = i18nExportAction;
exportLink.href = '#';
exportLink.target = '_blank';
exportLink.download = `${downloadFileName}.${ext}`;
// add link on mousedown so that it's correct when the click happens
exportLink.addEventListener('mousedown', async function (this, e) {
e.preventDefault();
const url = await view.toImageURL(ext, opts.scaleFactor);
this.href = url;
});
ctrl.append(exportLink);
}
}
}
// add 'View Source' action
if (actions === true || actions.source !== false) {
const viewSourceLink = document.createElement('a');
viewSourceLink.text = i18n.SOURCE_ACTION;
viewSourceLink.href = '#';
viewSourceLink.addEventListener('click', function (this, e) {
viewSource(stringify(spec), opts.sourceHeader ?? '', opts.sourceFooter ?? '', mode);
e.preventDefault();
});
ctrl.append(viewSourceLink);
}
// add 'View Compiled' action
if (mode === 'vega-lite' && (actions === true || actions.compiled !== false)) {
const compileLink = document.createElement('a');
compileLink.text = i18n.COMPILED_ACTION;
compileLink.href = '#';
compileLink.addEventListener('click', function (this, e) {
viewSource(stringify(vgSpec), opts.sourceHeader ?? '', opts.sourceFooter ?? '', 'vega');
e.preventDefault();
});
ctrl.append(compileLink);
}
// add 'Open in Vega Editor' action
if (actions === true || actions.editor !== false) {
const editorUrl = opts.editorUrl ?? 'https://vega.github.io/editor/';
const editorLink = document.createElement('a');
editorLink.text = i18n.EDITOR_ACTION;
editorLink.href = '#';
editorLink.addEventListener('click', function (this, e) {
post(window, editorUrl, {
config: config as Config,
mode,
renderer,
spec: stringify(spec),
});
e.preventDefault();
});
ctrl.append(editorLink);
}
}
function finalize() {
if (documentClickHandler) {
document.removeEventListener('click', documentClickHandler);
}
view.finalize();
}
return {view, spec, vgSpec, finalize};
} | the_stack |
import { PassThrough, Stream } from 'stream';
import type { IncomingMessage, ServerResponse } from 'http';
/******************************************************************************/
// Really we want:
//
// import type { FastifyReply, FastifyRequest } from 'fastify';
//
// however, we don't want people to have to install fastify to get these types,
// so we're going to do rough approximations of them. Care should be taken to
// keep these compatible with the official fastify types.
export interface CompatFastifyReply {
raw: ServerResponse; // TODO:v5: | Http2ServerResponse;
status(statusCode: number): CompatFastifyReply;
headers(values: { [key: string]: any }): CompatFastifyReply;
send(payload?: any): CompatFastifyReply;
}
export interface CompatFastifyRequest {
raw: IncomingMessage; // TODO:v5: | Http2ServerRequest;
body: unknown;
readonly headers: { [key: string]: unknown };
}
/******************************************************************************/
/******************************************************************************/
// Really we want:
//
// import type { Context as KoaContext } from 'koa';
//
// however, we don't want people to have to install koa to get these types,
// so we're going to do rough approximations of them. Care should be taken to
// keep these compatible with the official koa types.
export interface CompatKoaContext {
[key: string]: any;
req: IncomingMessage;
res: ServerResponse;
}
/******************************************************************************/
declare module 'http' {
interface IncomingMessage {
_koaCtx?: CompatKoaContext;
_fastifyRequest?: CompatFastifyRequest;
_body?: boolean;
body?: any;
originalUrl?: string;
}
}
/* TODO:v5:
declare module 'http2' {
interface Http2ServerRequest {
_koaCtx?: CompatKoaContext;
_fastifyRequest?: CompatFastifyRequest;
_body?: boolean;
body?: any;
originalUrl?: string;
}
}
*/
type Headers = { [header: string]: string };
/**
* The base class for PostGraphile responses; collects headers, status code and
* body, and then hands to the relevant adaptor at the correct time.
*/
export abstract class PostGraphileResponse {
private _headers: Headers = {};
private _body: Buffer | string | PassThrough | undefined;
private _setHeaders = false;
public statusCode = 200;
private _setHeadersOnce() {
if (!this._setHeaders) {
this._setHeaders = true;
this.setHeaders(this.statusCode, this._headers);
}
}
public setHeader(header: string, value: string): void {
if (this._setHeaders) {
throw new Error(`Cannot set a header '${header}' when headers already sent`);
}
this._headers[header] = value;
}
/**
* Use `endWithStream` or `end`; not both.
*/
public endWithStream(): PassThrough {
if (this._body != null) {
throw new Error("Cannot return a stream when there's already a response body");
}
this._setHeadersOnce();
this._body = new PassThrough();
this.setBody(this._body);
return this._body;
}
/**
* Use `endWithStream` or `end`; not both
*/
public end(moreBody?: Buffer | string | null) {
if (moreBody) {
if (this._body != null) {
if (typeof this._body === 'string') {
if (Buffer.isBuffer(moreBody)) {
throw new Error('Cannot mix string and buffer');
}
this._body = this._body + moreBody;
} else if (Buffer.isBuffer(this._body)) {
if (typeof moreBody === 'string') {
throw new Error('Cannot mix buffer and string');
}
this._body = Buffer.concat([this._body, moreBody]);
} else {
throw new Error("Can't `.end(string)` when body is a stream");
}
} else {
this._body = moreBody;
}
}
// If possible, set Content-Length to avoid unnecessary chunked encoding
if (typeof this._body === 'string') {
// String length is not reliable due to multi-byte characters; calculate via Buffer
this.setHeader('Content-Length', String(Buffer.byteLength(this._body, 'utf8')));
} else if (Buffer.isBuffer(this._body)) {
this.setHeader('Content-Length', String(this._body.byteLength));
}
this._setHeadersOnce();
this.setBody(this._body);
}
/**
* Returns the `res` object that the underlying HTTP server would have.
*/
public abstract getNodeServerRequest(): IncomingMessage; // TODO:v5: | Http2ServerRequest;
public abstract getNodeServerResponse(): ServerResponse; // TODO:v5: | Http2ServerResponse;
public abstract setHeaders(statusCode: number, headers: Headers): void;
public abstract setBody(body: Stream | Buffer | string | undefined): void;
}
/**
* Suitable for Node's HTTP server, but also connect, express, restify and fastify v2.
*/
export class PostGraphileResponseNode extends PostGraphileResponse {
private _req: IncomingMessage;
private _res: ServerResponse;
private _next: (e?: 'route' | Error) => void;
constructor(req: IncomingMessage, res: ServerResponse, next: (e?: 'route' | Error) => void) {
super();
this._req = req;
this._res = res;
this._next = next;
}
getNodeServerRequest() {
return this._req;
}
getNodeServerResponse() {
return this._res;
}
getNextCallback() {
return this._next;
}
setHeaders(statusCode: number, headers: Headers) {
for (const key in headers) {
if (Object.hasOwnProperty.call(headers, key)) {
this._res.setHeader(key, headers[key]);
}
}
this._res.statusCode = statusCode;
}
setBody(body: Stream | Buffer | string | undefined) {
if (typeof body === 'string') {
this._res.end(body);
} else if (Buffer.isBuffer(body)) {
this._res.end(body);
} else if (!body) {
this._res.end();
} else {
// Must be a stream
// It'd be really nice if we could just:
//
// body.pipe(this._res);
//
// however we need to support running within the compression middleware
// which requires special handling for server-sent events:
// https://github.com/expressjs/compression#server-sent-events
//
// Because of this, we must handle the data streaming manually so we can
// flush:
const writeData = (data: Buffer | string) => {
this._res.write(data);
// Technically we should see if `.write()` returned false, and if so we
// should pause the stream. However, since our stream is coming from
// watch mode, we find it unlikely that a significant amount of data
// will be buffered (and we don't recommend watch mode in production),
// so it doesn't feel like we need this currently. If it turns out you
// need this, a PR would be welcome.
if (typeof (this._res as any).flush === 'function') {
// https://github.com/expressjs/compression#server-sent-events
(this._res as any).flush();
} else if (typeof (this._res as any).flushHeaders === 'function') {
(this._res as any).flushHeaders();
}
};
let clean = false;
const cleanup = () => {
if (clean) return;
clean = true;
body.removeListener('data', writeData);
body.removeListener('end', cleanup);
this._req.removeListener('close', cleanup);
this._req.removeListener('end', cleanup);
this._req.removeListener('error', cleanup);
};
body.on('data', writeData);
body.on('end', () => {
cleanup();
this._res.end();
});
this._req.on('close', cleanup);
this._req.on('end', cleanup);
this._req.on('error', cleanup);
}
}
}
export type CompatKoaNext = (error?: Error) => Promise<any>;
/**
* Suitable for Koa.
*/
export class PostGraphileResponseKoa extends PostGraphileResponse {
private _ctx: CompatKoaContext;
private _next: CompatKoaNext;
constructor(ctx: CompatKoaContext, next: CompatKoaNext) {
super();
this._ctx = ctx;
this._next = next;
const req = this.getNodeServerRequest();
// For backwards compatibility, and to allow getting "back" to the Koa
// context from pgSettings, etc (this is a documented interface)
req._koaCtx = ctx;
// Make `koa-bodyparser` trigger skipping of our `body-parser`
if (ctx.request.body) {
req._body = true;
req.body = ctx.request.body;
}
// In case you're using koa-mount or similar
req.originalUrl = ctx.request.originalUrl;
}
getNodeServerRequest() {
return this._ctx.req;
}
getNodeServerResponse() {
return this._ctx.res;
}
getNextCallback() {
return this._next;
}
setHeaders(statusCode: number, headers: Headers) {
this._ctx.status = statusCode;
this._ctx.set(headers);
// DO NOT `this._ctx.flushHeaders()` as it will interfere with the compress
// middleware.
}
endWithStream() {
// We're going to assume this is the EventStream which we want to
// be realtime for watch mode, and there's no value in compressing it.
this._ctx.compress = false;
// TODO: find a better way of flushing the event stream on write.
return super.endWithStream();
}
setBody(body: Stream | Buffer | string | undefined) {
this._ctx.body = body || '';
this._next();
}
}
/**
* Suitable for Fastify v3 (use PostGraphileResponseNode and middleware
* approach for Fastify v2)
*/
export class PostGraphileResponseFastify3 extends PostGraphileResponse {
private _request: CompatFastifyRequest;
private _reply: CompatFastifyReply;
constructor(request: CompatFastifyRequest, reply: CompatFastifyReply) {
super();
this._request = request;
this._reply = reply;
// For backwards compatibility, and to allow getting "back" to the Fastify
// request from pgSettings, etc
const req = this.getNodeServerRequest();
req._fastifyRequest = this._request;
// Make Fastify's body parsing trigger skipping of our `body-parser`
if (this._request.body) {
req._body = true;
req.body = this._request.body;
}
}
getNodeServerRequest() {
return this._request.raw;
}
getNodeServerResponse() {
return this._reply.raw;
}
setHeaders(statusCode: number, headers: Headers) {
this._reply.status(statusCode);
this._reply.headers(headers);
}
endWithStream() {
// We're going to assume this is the EventStream which we want to
// be realtime for watch mode, and there's no value in compressing it.
// Fastify will disable compression if we set the relevant request header
// (see:
// https://github.com/fastify/fastify-compress/blob/068c673fc0bd50da1f4d9f3fd2423b482c364a89/index.js#L217-L218)
this._request.headers['x-no-compression'] = '1';
// TODO: find a better way of flushing the event stream on write.
return super.endWithStream();
}
setBody(body: Stream | Buffer | string | undefined) {
this._reply.send(body);
}
} | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.